@florent - Here is the JSON. Removing the Snapshots now and trying again with the merge synchronously toggled off.
Note the remote is a Synology using NFS, if that matters.
{
"data": {
"mode": "delta",
"reportWhen": "failure"
},
"id": "1774449668020",
"jobId": "7fc5396a-5383-4dab-91fe-6758eb8b7474",
"jobName": "ADMIN VMS",
"message": "backup",
"scheduleId": "d09acecc-cc98-4cfd-84a4-5bfd1575b20f",
"start": 1774449668020,
"status": "failure",
"infos": [
{
"data": {
"vms": [
"b827a2ad-361d-e44c-19ca-f9d632baacf8",
"afe4bee2-745d-da4a-0016-c74751856556"
]
},
"message": "vms"
}
],
"tasks": [
{
"data": {
"type": "VM",
"id": "b827a2ad-361d-e44c-19ca-f9d632baacf8",
"name_label": "ADMIN-VM01"
},
"id": "1774449670085",
"message": "backup VM",
"start": 1774449670085,
"status": "success",
"tasks": [
{
"id": "1774449670095",
"message": "clean-vm",
"start": 1774449670095,
"status": "success",
"end": 1774449670170,
"result": {
"merge": false
}
},
{
"id": "1774449670451",
"message": "snapshot",
"start": 1774449670451,
"status": "success",
"end": 1774449672123,
"result": "dad1585e-4094-88aa-4894-d521fae5cb63"
},
{
"data": {
"id": "9f2e49f9-4e87-444a-aa68-4cbf73f28e6d",
"isFull": false,
"type": "remote"
},
"id": "1774449672123:0",
"message": "export",
"start": 1774449672123,
"status": "success",
"tasks": [
{
"id": "1774449673924",
"message": "transfer",
"start": 1774449673924,
"status": "success",
"end": 1774449690670,
"result": {
"size": 283115520
}
},
{
"id": "1774449697186",
"message": "clean-vm",
"start": 1774449697186,
"status": "success",
"tasks": [
{
"id": "1774449698513",
"message": "merge",
"start": 1774449698513,
"status": "success",
"end": 1774449706694
}
],
"end": 1774449706704,
"result": {
"merge": true
}
}
],
"end": 1774449706707
}
],
"end": 1774449706707
},
{
"data": {
"type": "VM",
"id": "afe4bee2-745d-da4a-0016-c74751856556",
"name_label": "ADMIN-VM02"
},
"id": "1774449670088",
"message": "backup VM",
"start": 1774449670088,
"status": "failure",
"tasks": [
{
"id": "1774449670096",
"message": "clean-vm",
"start": 1774449670096,
"status": "success",
"end": 1774449670110,
"result": {
"merge": false
}
},
{
"id": "1774449670452",
"message": "snapshot",
"start": 1774449670452,
"status": "success",
"end": 1774449673024,
"result": "77d9de45-e6b7-d202-9245-7db47b6fd9c9"
},
{
"data": {
"id": "9f2e49f9-4e87-444a-aa68-4cbf73f28e6d",
"isFull": true,
"type": "remote"
},
"id": "1774449673024:0",
"message": "export",
"start": 1774449673024,
"status": "failure",
"tasks": [
{
"id": "1774449674094",
"message": "transfer",
"start": 1774449674094,
"status": "failure",
"end": 1774451157435,
"result": {
"text": "HTTP/1.1 500 Internal Error\r\ncontent-length: 266\r\ncontent-type: text/html\r\nconnection: close\r\ncache-control: no-cache, no-store\r\n\r\n<html><body><h1>HTTP 500 internal server error</h1>An unexpected error occurred; please wait a while and try again. If the problem persists, please contact your support representative.<h1> Additional information </h1>VDI_IO_ERROR: [ Device I/O errors ]</body></html>",
"message": "stream has ended with not enough data (actual: 397, expected: 2097152)",
"name": "Error",
"stack": "Error: stream has ended with not enough data (actual: 397, expected: 2097152)\n at readChunkStrict (/opt/xo/xo-builds/xen-orchestra-202603241416/@vates/read-chunk/index.js:88:19)\n at process.processTicksAndRejections (node:internal/process/task_queues:104:5)\n at async #read (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/xapi/disks/XapiVhdStreamSource.mjs:98:65)\n at async generator (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/xapi/disks/XapiVhdStreamSource.mjs:199:22)\n at async Timeout.next (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@vates/generator-toolbox/dist/timeout.mjs:14:24)\n at async generatorWithLength (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/disk-transform/dist/Throttled.mjs:12:44)\n at async Throttle.createThrottledGenerator (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@vates/generator-toolbox/dist/throttle.mjs:53:30)\n at async ThrottledDisk.diskBlocks (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/disk-transform/dist/Disk.mjs:26:30)\n at async Promise.all (index 0)\n at async ForkedDisk.diskBlocks (file:///opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/disk-transform/dist/SynchronizedDisk.mjs:18:30)"
}
},
{
"id": "1774451158098",
"message": "clean-vm",
"start": 1774451158098,
"status": "success",
"end": 1774451158157,
"result": {
"merge": false
}
}
],
"end": 1774451158216
}
],
"end": 1774451158218,
"result": {
"errno": -2,
"code": "ENOENT",
"syscall": "stat",
"path": "/opt/xo/mounts/9f2e49f9-4e87-444a-aa68-4cbf73f28e6d/xo-vm-backups/afe4bee2-745d-da4a-0016-c74751856556/vdis/7fc5396a-5383-4dab-91fe-6758eb8b7474/530abab7-9ea9-43d4-be6e-acb3fbf67065/20260325T144114Z.alias.vhd",
"message": "ENOENT: no such file or directory, stat '/opt/xo/mounts/9f2e49f9-4e87-444a-aa68-4cbf73f28e6d/xo-vm-backups/afe4bee2-745d-da4a-0016-c74751856556/vdis/7fc5396a-5383-4dab-91fe-6758eb8b7474/530abab7-9ea9-43d4-be6e-acb3fbf67065/20260325T144114Z.alias.vhd'",
"name": "Error",
"stack": "Error: ENOENT: no such file or directory, stat '/opt/xo/mounts/9f2e49f9-4e87-444a-aa68-4cbf73f28e6d/xo-vm-backups/afe4bee2-745d-da4a-0016-c74751856556/vdis/7fc5396a-5383-4dab-91fe-6758eb8b7474/530abab7-9ea9-43d4-be6e-acb3fbf67065/20260325T144114Z.alias.vhd'\nFrom:\n at NfsHandler.addSyncStackTrace (/opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/fs/dist/local.js:21:26)\n at NfsHandler._getSize (/opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/fs/dist/local.js:113:48)\n at /opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/fs/dist/utils.js:29:26\n at new Promise (<anonymous>)\n at NfsHandler.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202603241416/@xen-orchestra/fs/dist/utils.js:24:12)\n at loopResolver (/opt/xo/xo-builds/xen-orchestra-202603241416/node_modules/promise-toolbox/retry.js:83:46)\n at new Promise (<anonymous>)\n at loop (/opt/xo/xo-builds/xen-orchestra-202603241416/node_modules/promise-toolbox/retry.js:85:22)\n at NfsHandler.retry (/opt/xo/xo-builds/xen-orchestra-202603241416/node_modules/promise-toolbox/retry.js:87:10)\n at NfsHandler._getSize (/opt/xo/xo-builds/xen-orchestra-202603241416/node_modules/promise-toolbox/retry.js:103:18)"
}
}
],
"end": 1774451158219
}