Import from VMware fails after upgrade to XOA 5.91
-
@florent
The one I showed above what the starter version. Since I made that post, I also tried running it on the one I built from source and it is version "Master, commit 09247". It has the same errors.I just opened a support tunnel.
-
@florent I just sent you tunnell id
-
@archw said in Import from VMware fails after upgrade to XOA 5.91:
@florent I just sent you tunnell id
the patch is applied
-
@florent
Cool...I'll give it a shot...takes about twenty minutes. (it always dies in the last minute). -
Hi, I tried after the patch applied but it still failed. The task progress didn't seem to hang so I thought it would complete but in the end it failed and removed the VM from XOA.
{ "id": "0ls4sww64", "properties": { "name": "importing vms 239", "userId": "2af1207e-7ddd-4ce8-a13e-1c6ede07d0c9", "total": 1, "done": 0, "progress": 0 }, "start": 1706887780060, "status": "failure", "updatedAt": 1706888683479, "tasks": [ { "id": "cyz9dh2453h", "properties": { "name": "importing vm 239", "done": 1, "progress": 100 }, "start": 1706887780061, "status": "failure", "tasks": [ { "id": "gqw5xqq9ke5", "properties": { "name": "connecting to 10.193.240.111" }, "start": 1706887780062, "status": "success", "end": 1706887780170, "result": { "_events": {}, "_eventsCount": 0 } }, { "id": "d07ygv9vyjl", "properties": { "name": "get metadata of 239" }, "start": 1706887780171, "status": "success", "end": 1706887780569, "result": { "name_label": "BusCam", "memory": 4294967296, "nCpus": 2, "guestToolsInstalled": false, "firmware": "bios", "powerState": "poweredOff", "disks": [ { "capacity": 42949672960, "isFull": true, "uid": "698f084e", "fileName": "BusCam_2-flat.vmdk", "parentId": "ffffffff", "vmdFormat": "VMFS", "nameLabel": "BusCam_2-flat.vmdk", "datastore": "TS Storage 2", "path": "BusCam", "descriptionLabel": " from esxi", "node": "scsi0:0" }, { "capacity": 42949672960, "isFull": true, "uid": "1d9d8b5e", "fileName": "BusCam_1-flat.vmdk", "parentId": "ffffffff", "vmdFormat": "VMFS", "nameLabel": "BusCam_1-flat.vmdk", "datastore": "TS Storage 2", "path": "BusCam", "descriptionLabel": " from esxi", "node": "scsi0:1" } ], "networks": [ { "macAddress": "00:50:56:b9:55:dc", "isGenerated": false } ] } }, { "id": "ngimkpdl7e", "properties": { "name": "build disks and snapshots chains for 239" }, "start": 1706887780569, "status": "success", "end": 1706887780569, "result": { "scsi0:0": [ { "capacity": 42949672960, "isFull": true, "uid": "698f084e", "fileName": "BusCam_2-flat.vmdk", "parentId": "ffffffff", "vmdFormat": "VMFS", "nameLabel": "BusCam_2-flat.vmdk", "datastore": "TS Storage 2", "path": "BusCam", "descriptionLabel": " from esxi", "node": "scsi0:0" } ], "scsi0:1": [ { "capacity": 42949672960, "isFull": true, "uid": "1d9d8b5e", "fileName": "BusCam_1-flat.vmdk", "parentId": "ffffffff", "vmdFormat": "VMFS", "nameLabel": "BusCam_1-flat.vmdk", "datastore": "TS Storage 2", "path": "BusCam", "descriptionLabel": " from esxi", "node": "scsi0:1" } ] } }, { "id": "i8w7p4xgicn", "properties": { "name": "creating MV on XCP side" }, "start": 1706887780570, "status": "success", "end": 1706887780692, "result": { "uuid": "09d2d428-4555-f70d-b246-80f62edabde7", "allowed_operations": [ "create_vtpm", "changing_NVRAM", "changing_dynamic_range", "changing_shadow_memory", "changing_static_range", "make_into_template", "migrate_send", "destroy", "export", "start_on", "start", "clone", "copy", "snapshot" ], "current_operations": {}, "name_label": "BusCam", "name_description": "from esxi", "power_state": "Halted", "user_version": 1, "is_a_template": false, "is_default_template": false, "suspend_VDI": "OpaqueRef:NULL", "resident_on": "OpaqueRef:NULL", "scheduled_to_be_resident_on": "OpaqueRef:NULL", "affinity": "OpaqueRef:NULL", "memory_overhead": 37748736, "memory_target": 0, "memory_static_max": 4294967296, "memory_dynamic_max": 4294967296, "memory_dynamic_min": 4294967296, "memory_static_min": 4294967296, "VCPUs_params": {}, "VCPUs_max": 2, "VCPUs_at_startup": 2, "actions_after_softreboot": "soft_reboot", "actions_after_shutdown": "destroy", "actions_after_reboot": "restart", "actions_after_crash": "restart", "consoles": [], "VIFs": [], "VBDs": [], "VUSBs": [], "crash_dumps": [], "VTPMs": [], "PV_bootloader": "", "PV_kernel": "", "PV_ramdisk": "", "PV_args": "", "PV_bootloader_args": "", "PV_legacy_args": "", "HVM_boot_policy": "BIOS order", "HVM_boot_params": { "order": "cdn" }, "HVM_shadow_multiplier": 1, "platform": { "timeoffset": "0", "nx": "true", "acpi": "1", "apic": "true", "pae": "true", "hpet": "true", "viridian": "true" }, "PCI_bus": "", "other_config": { "mac_seed": "9e80dc56-0498-2f80-4fb8-545ce0366a26", "vgpu_pci": "", "base_template_name": "Other install media", "install-methods": "cdrom" }, "domid": -1, "domarch": "", "last_boot_CPU_flags": {}, "is_control_domain": false, "metrics": "OpaqueRef:6252fb53-d094-107d-efca-0ba6694e2d87", "guest_metrics": "OpaqueRef:NULL", "last_booted_record": "", "recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"137438953472\" /><restriction field=\"vcpus-max\" max=\"32\" /><restriction property=\"number-of-vbds\" max=\"255\" /><restriction property=\"number-of-vifs\" max=\"7\" /><restriction field=\"has-vendor-device\" value=\"false\" /></restrictions>", "xenstore_data": {}, "ha_always_run": false, "ha_restart_priority": "", "is_a_snapshot": false, "snapshot_of": "OpaqueRef:NULL", "snapshots": [], "snapshot_time": "19700101T00:00:00Z", "transportable_snapshot_id": "", "blobs": {}, "tags": [], "blocked_operations": {}, "snapshot_info": {}, "snapshot_metadata": "", "parent": "OpaqueRef:NULL", "children": [], "bios_strings": {}, "protection_policy": "OpaqueRef:NULL", "is_snapshot_from_vmpp": false, "snapshot_schedule": "OpaqueRef:NULL", "is_vmss_snapshot": false, "appliance": "OpaqueRef:NULL", "start_delay": 0, "shutdown_delay": 0, "order": 0, "VGPUs": [], "attached_PCIs": [], "suspend_SR": "OpaqueRef:NULL", "version": 0, "generation_id": "0:0", "hardware_platform_version": 0, "has_vendor_device": false, "requires_reboot": false, "reference_label": "", "domain_type": "hvm", "NVRAM": {}, "pending_guidances": [] } }, { "id": "iacdn87dv3", "properties": { "name": "Cold import of disks scsi0:0" }, "start": 1706887780693, "status": "failure", "end": 1706888683036, "result": { "message": "no opaque ref found", "name": "Error", "stack": "Error: no opaque ref found\n at importVm (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVm.mjs:28:19)\n at processTicksAndRejections (node:internal/process/task_queues:95:5)\n at importVdi (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///usr/local/lib/node_modules/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:158:22)\n at Task.run (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:141:20)" } }, { "id": "ku5ckax1wr", "properties": { "name": "Cold import of disks scsi0:1" }, "start": 1706887780696, "status": "success", "end": 1706888650415, "result": { "ref": "Ref:002", "label": "BusCam_1-flat.vmdk" } } ], "end": 1706888683479, "result": { "message": "no opaque ref found", "name": "Error", "stack": "Error: no opaque ref found\n at importVm (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVm.mjs:28:19)\n at processTicksAndRejections (node:internal/process/task_queues:95:5)\n at importVdi (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///usr/local/lib/node_modules/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:158:22)\n at Task.run (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:141:20)" } } ], "end": 1706888683479, "result": { "succeeded": {}, "message": "no opaque ref found", "name": "Error", "stack": "Error: no opaque ref found\n at importVm (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVm.mjs:28:19)\n at processTicksAndRejections (node:internal/process/task_queues:95:5)\n at importVdi (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///usr/local/lib/node_modules/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:158:22)\n at Task.run (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:141:20)" } }
-
I tried after you patched it...same error:
Error: no opaque ref found\n at importVm (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVm.mjs:28:19)\n at processTicksAndRejections (node:internal/process/task_queues:95:5)\n at importVdi (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///usr/local/lib/node_modules/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:158:22)\n at Task.run (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:141:20)"
-
@archw said in Import from VMware fails after upgrade to XOA 5.91:
I tried after you patched it...same error:
Error: no opaque ref found\n at importVm (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVm.mjs:28:19)\n at processTicksAndRejections (node:internal/process/task_queues:95:5)\n at importVdi (file:///usr/local/lib/node_modules/xo-server/node_modules/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///usr/local/lib/node_modules/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:158:22)\n at Task.run (/usr/local/lib/node_modules/xo-server/node_modules/@vates/task/index.js:141:20)"
you had two pool with the same ip, that seems like a good candidates for xo losing the objects refs. I applied the same patches as rmaclachlan
Can you retry ? -
@florent
I's running it right now! -
@florent tunnel is open!
-
I came across the same error today before seeing this thread. Importing a 3 disk VM (powered off).
The first smaller disk failed first.
I saw the post about the patch and applied to my XO source VM. (Ronivay Debian image with the disk extended to 30 GB).
I then tried a live (with snapshot) 10 GB 1 disk VM to local thick LVM SR, and it was successful.
I retried the big VM to a NFS SR and it failed in the same spot.Feb 03 10:41:42 xo-ce xo-server[2902]: 2024-02-03T10:41:42.888Z xo:xo-server WARN possibly unhandled rejection { Feb 03 10:41:42 xo-ce xo-server[2902]: error: Error: already finalized or destroyed Feb 03 10:41:42 xo-ce xo-server[2902]: at Pack.entry (/opt/xo/xo-builds/xen-orchestra-202402030246/node_modules/tar-stream/pack.js:138:51) Feb 03 10:41:42 xo-ce xo-server[2902]: at Pack.resolver (/opt/xo/xo-builds/xen-orchestra-202402030246/node_modules/promise-toolbox/fromCallback.js:5:6) Feb 03 10:41:42 xo-ce xo-server[2902]: at Promise._execute (/opt/xo/xo-builds/xen-orchestra-202402030246/node_modules/bluebird/js/release/debuggability.js:384:9) Feb 03 10:41:42 xo-ce xo-server[2902]: at Promise._resolveFromExecutor (/opt/xo/xo-builds/xen-orchestra-202402030246/node_modules/bluebird/js/release/promise.js:518:18) Feb 03 10:41:42 xo-ce xo-server[2902]: at new Promise (/opt/xo/xo-builds/xen-orchestra-202402030246/node_modules/bluebird/js/release/promise.js:103:10) Feb 03 10:41:42 xo-ce xo-server[2902]: at Pack.fromCallback (/opt/xo/xo-builds/xen-orchestra-202402030246/node_modules/promise-toolbox/fromCallback.js:9:10) Feb 03 10:41:42 xo-ce xo-server[2902]: at writeBlock (file:///opt/xo/xo-builds/xen-orchestra-202402030246/@xen-orchestra/xva/_writeDisk.mjs:9:22) Feb 03 10:41:42 xo-ce xo-server[2902]: } Feb 03 10:41:45 xo-ce xo-server[2902]: root@10.1.4.10 Xapi#putResource /import/ XapiError: IMPORT_ERROR(INTERNAL_ERROR: [ Unix.Unix_error(Unix.ENOSPC, "write", "") ]) Feb 03 10:41:45 xo-ce xo-server[2902]: at Function.wrap (file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xen-api/_XapiError.mjs:16:12) Feb 03 10:41:45 xo-ce xo-server[2902]: at default (file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xen-api/_getTaskResult.mjs:11:29) Feb 03 10:41:45 xo-ce xo-server[2902]: at Xapi._addRecordToCache (file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xen-api/index.mjs:1006:24) Feb 03 10:41:45 xo-ce xo-server[2902]: at file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xen-api/index.mjs:1040:14 Feb 03 10:41:45 xo-ce xo-server[2902]: at Array.forEach (<anonymous>) Feb 03 10:41:45 xo-ce xo-server[2902]: at Xapi._processEvents (file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xen-api/index.mjs:1030:12) Feb 03 10:41:45 xo-ce xo-server[2902]: at Xapi._watchEvents (file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xen-api/index.mjs:1203:14) { Feb 03 10:41:45 xo-ce xo-server[2902]: code: 'IMPORT_ERROR', Feb 03 10:41:45 xo-ce xo-server[2902]: params: [ 'INTERNAL_ERROR: [ Unix.Unix_error(Unix.ENOSPC, "write", "") ]' ], Feb 03 10:41:45 xo-ce xo-server[2902]: call: undefined, Feb 03 10:41:45 xo-ce xo-server[2902]: url: undefined, Feb 03 10:41:45 xo-ce xo-server[2902]: task: task { Feb 03 10:41:45 xo-ce xo-server[2902]: uuid: 'e1ed657e-165c-0a78-2b72-3096b0550fed', Feb 03 10:41:45 xo-ce xo-server[2902]: name_label: '[XO] VM import', Feb 03 10:41:45 xo-ce xo-server[2902]: name_description: '', Feb 03 10:41:45 xo-ce xo-server[2902]: allowed_operations: [], Feb 03 10:41:45 xo-ce xo-server[2902]: current_operations: {}, Feb 03 10:41:45 xo-ce xo-server[2902]: created: '20240203T10:32:22Z', Feb 03 10:41:45 xo-ce xo-server[2902]: finished: '20240203T10:41:45Z', Feb 03 10:41:45 xo-ce xo-server[2902]: status: 'failure', Feb 03 10:41:45 xo-ce xo-server[2902]: resident_on: 'OpaqueRef:e44d0112-ac22-4037-91d3-6394943789fd', Feb 03 10:41:45 xo-ce xo-server[2902]: progress: 1, Feb 03 10:41:45 xo-ce xo-server[2902]: type: '<none/>', Feb 03 10:41:45 xo-ce xo-server[2902]: result: '', Feb 03 10:41:45 xo-ce xo-server[2902]: error_info: [ Feb 03 10:41:45 xo-ce xo-server[2902]: 'IMPORT_ERROR', Feb 03 10:41:45 xo-ce xo-server[2902]: 'INTERNAL_ERROR: [ Unix.Unix_error(Unix.ENOSPC, "write", "") ]' Feb 03 10:41:45 xo-ce xo-server[2902]: ], Feb 03 10:41:45 xo-ce xo-server[2902]: other_config: { object_creation: 'complete' }, Feb 03 10:41:45 xo-ce xo-server[2902]: subtask_of: 'OpaqueRef:NULL', Feb 03 10:41:45 xo-ce xo-server[2902]: subtasks: [], Feb 03 10:41:45 xo-ce xo-server[2902]: backtrace: '(((process xapi)(filename lib/backtrace.ml)(line 210))((process xapi)(filename ocaml/xapi/import.ml)(line 2021))((process xapi)(filename ocaml/xapi/server_helpers.ml)(line 92)))' Feb 03 10:41:45 xo-ce xo-server[2902]: } Feb 03 10:41:45 xo-ce xo-server[2902]: } Feb 03 10:41:45 xo-ce xo-server[2902]: 2024-02-03T10:41:45.956Z xo:api WARN admin@admin.net | vm.importMultipleFromEsxi(...) [9m] =!> Error: no opaque ref found
I'm going to try the next test from the same (3 disk VM) but have it powered up with a snapshot and save to local LVM SR.
The XO error for that disk that failed.
{ "id": "38jiy3bsy5r", "properties": { "name": "Cold import of disks scsi0:0" }, "start": 1706956341748, "status": "failure", "end": 1706956905772, "result": { "message": "no opaque ref found", "name": "Error", "stack": "Error: no opaque ref found\n at importVm (file:///opt/xo/xo-builds/xen-orchestra-202402030246/@xen-orchestra/xva/importVm.mjs:28:19)\n at processTicksAndRejections (node:internal/process/task_queues:95:5)\n at importVdi (file:///opt/xo/xo-builds/xen-orchestra-202402030246/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///opt/xo/xo-builds/xen-orchestra-202402030246/packages/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/opt/xo/xo-builds/xen-orchestra-202402030246/@vates/task/index.js:158:22)\n at Task.run (/opt/xo/xo-builds/xen-orchestra-202402030246/@vates/task/index.js:141:20)" } },
-
@acomav
Replying to myself.I redid the job with a snapshot from a running VM to a local SR. Same issue occurred at the same time.
-
I ran into this exact same issue with the "already finalized or destroyed" message. I'm running XO from sources, but it seems to have started after 5.91 was released, and it's happened to multiple VMs, all with a single disk.
For those who are...
- in the middle of a migration
- running XO from sources built by the external XenOrchestraInstallerUpdater tool
- need an immediate, short-term workaround
...here's what got me back to a working state.
Only consider doing this if you're in this exact, recently developed situation. It is not an appropriate long-term fix, and it may leave you with a vulnerable instance of Xen Orchestra if enough time has passed since this post.
- Run
./xo-install.sh
and attempt to use the built-in rollback feature.
a. Check that the previously available option for thin provisioning on VMware import is available. If so, you should be set.
b. If you're like me and update very frequently, your old installation may have cycled out. - If rollback is unsuccessful, edit your
xo-install.cfg
and change theBRANCH
variable from "master
" to"EDIT: "89a4de5b21104fb3fa4a6c301eb3fe98328c90d0
"fix_xva_import_thin
". This will pin your installation to aprevious commit right before the 5.91 updatetest branch.
a. This is probably not a stable release, but hopefully you've already accepted that as someone running XO from sources.
b. Like before, run./xo-install.sh
again and select the option to install.
I hope at least one person finds this helpful. Like probably many others, I'm in the middle of a migration from VMware in my homelab. Really digging XCP-ng and Xen Orchestra so far, and it feels like the switch was long overdue. Thanks for all the awesome work!
-
xo-install.sh
doesn't exist in our repo, so you can't consider your workaround as a solution. I don't know from which 3rd party you are using itAnyway, with
git
, you can indeed decide to switch to an older commit to until it works. Note that getting to a commit matching a release is not a guarantee it will work. If you want to assist, try to latest working commit identified (could be the one given, doesn't matter). But I think we know pretty well which commit introduced the issue -
@olivierlambert It's likely this 3rd party installation script and/or VM image from which they got their compilation of Xen Orchestra.
-
Yes, but people need to understand it's a 3rd party tool and it's not "de facto" way to install XO.
xo-install.sh
doesn't give a clue to external people where this thing exists. It's always better to not suppose anything and provide a "regular" way to go on another commit -
@olivierlambert Indeed it is with that tool that @john-c mentioned. While not official, it's a fairly useful and popular one that significantly lowered the barrier to entry for getting me started with XO from sources.
I mostly made that post for other people in my exact situation, and even mentioned the external tool in the prerequisites header. I didn't want to link the external repo and risk it being received as spam, and I'm sure you guys wouldn't be thrilled to have an external tool like that mistakenly wind up in your support queue. I figured those who already use it would recognize it. I'm just a humble, casual homelab user and thought there might be someone else out there pooping their pants in a similar way.
I'll give some of the other commits mentioned in this thread a try and let you know how it goes!
-
No problem, keep us posted!
-
No luck on latest
master
commit 5d4723e I'm afraid. Going to try fix_xva_import_thin next.{ "id": "0ls6joz2k", "properties": { "name": "importing vms 30", "userId": "f424aa77-c82d-432a-9228-709a614019e6", "total": 1, "done": 0, "progress": 0 }, "start": 1706993226381, "status": "failure", "updatedAt": 1706994410318, "tasks": [ { "id": "8ggmvpx0fno", "properties": { "name": "importing vm 30", "done": 1, "progress": 100 }, "start": 1706993226382, "status": "failure", "tasks": [ { "id": "juhjiy9w6y7", "properties": { "name": "connecting to my-notsoawesome-vmware-server.mydomain.net" }, "start": 1706993226383, "status": "success", "end": 1706993226546, "result": { "_events": {}, "_eventsCount": 0 } }, { "id": "79wfj2zidiu", "properties": { "name": "get metadata of 30" }, "start": 1706993226547, "status": "success", "end": 1706993226851, "result": { "name_label": "my-awesome-vm", "memory": 2147483648, "nCpus": 2, "guestToolsInstalled": false, "firmware": "uefi", "powerState": "poweredOff", "disks": [ { "capacity": 64424509440, "isFull": true, "uid": "94dc7d43", "fileName": "my-awesome-vm-flat.vmdk", "parentId": "ffffffff", "vmdFormat": "VMFS", "nameLabel": "my-awesome-vm-flat.vmdk", "datastore": "my-awesome-vmware-nfs-datastore", "path": "my-awesome-vm", "descriptionLabel": " from esxi", "node": "scsi0:0" } ], "networks": [ { "label": "my-awesome-network", "macAddress": "00:50:56:9f:8a:cc", "isGenerated": false } ] } }, { "id": "soh8ame3hok", "properties": { "name": "build disks and snapshots chains for 30" }, "start": 1706993226852, "status": "success", "end": 1706993226852, "result": { "scsi0:0": [ { "capacity": 64424509440, "isFull": true, "uid": "94dc7d43", "fileName": "my-awesome-vm-flat.vmdk", "parentId": "ffffffff", "vmdFormat": "VMFS", "nameLabel": "my-awesome-vm-flat.vmdk", "datastore": "my-awesome-vmware-nfs-datastore", "path": "my-awesome-vm", "descriptionLabel": " from esxi", "node": "scsi0:0" } ] } }, { "id": "wvybfjogex", "properties": { "name": "creating MV on XCP side" }, "start": 1706993226852, "status": "success", "end": 1706993226954, "result": { "uuid": "a3dd5318-24ec-063b-3f98-9f66d50312e4", "allowed_operations": [ "changing_NVRAM", "changing_dynamic_range", "changing_shadow_memory", "changing_static_range", "make_into_template", "migrate_send", "destroy", "export", "start_on", "start", "clone", "copy", "snapshot" ], "current_operations": {}, "name_label": "my-awesome-vm", "name_description": "from esxi", "power_state": "Halted", "user_version": 1, "is_a_template": false, "is_default_template": false, "suspend_VDI": "OpaqueRef:NULL", "resident_on": "OpaqueRef:NULL", "scheduled_to_be_resident_on": "OpaqueRef:NULL", "affinity": "OpaqueRef:NULL", "memory_overhead": 20971520, "memory_target": 0, "memory_static_max": 2147483648, "memory_dynamic_max": 2147483648, "memory_dynamic_min": 2147483648, "memory_static_min": 2147483648, "VCPUs_params": {}, "VCPUs_max": 2, "VCPUs_at_startup": 2, "actions_after_shutdown": "destroy", "actions_after_reboot": "restart", "actions_after_crash": "restart", "consoles": [], "VIFs": [], "VBDs": [], "VUSBs": [], "crash_dumps": [], "VTPMs": [], "PV_bootloader": "", "PV_kernel": "", "PV_ramdisk": "", "PV_args": "", "PV_bootloader_args": "", "PV_legacy_args": "", "HVM_boot_policy": "BIOS order", "HVM_boot_params": { "order": "cdn" }, "HVM_shadow_multiplier": 1, "platform": { "timeoffset": "0", "nx": "true", "acpi": "1", "apic": "true", "pae": "true", "hpet": "true", "viridian": "true" }, "PCI_bus": "", "other_config": { "mac_seed": "22ad11b7-fb42-517f-91b1-1e834eb184af", "vgpu_pci": "", "base_template_name": "Other install media", "install-methods": "cdrom" }, "domid": -1, "domarch": "", "last_boot_CPU_flags": {}, "is_control_domain": false, "metrics": "OpaqueRef:d405d6a0-178b-45b2-8a92-5db36d65a220", "guest_metrics": "OpaqueRef:NULL", "last_booted_record": "", "recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"137438953472\" /><restriction field=\"vcpus-max\" max=\"32\" /><restriction property=\"number-of-vbds\" max=\"255\" /><restriction property=\"number-of-vifs\" max=\"7\" /><restriction field=\"has-vendor-device\" value=\"false\" /></restrictions>", "xenstore_data": {}, "ha_always_run": false, "ha_restart_priority": "", "is_a_snapshot": false, "snapshot_of": "OpaqueRef:NULL", "snapshots": [], "snapshot_time": "19700101T00:00:00Z", "transportable_snapshot_id": "", "blobs": {}, "tags": [], "blocked_operations": {}, "snapshot_info": {}, "snapshot_metadata": "", "parent": "OpaqueRef:NULL", "children": [], "bios_strings": {}, "protection_policy": "OpaqueRef:NULL", "is_snapshot_from_vmpp": false, "snapshot_schedule": "OpaqueRef:NULL", "is_vmss_snapshot": false, "appliance": "OpaqueRef:NULL", "start_delay": 0, "shutdown_delay": 0, "order": 0, "VGPUs": [], "attached_PCIs": [], "suspend_SR": "OpaqueRef:NULL", "version": 0, "generation_id": "0:0", "hardware_platform_version": 0, "has_vendor_device": false, "requires_reboot": false, "reference_label": "", "domain_type": "hvm", "NVRAM": {} } }, { "id": "fx32r2hmr3w", "properties": { "name": "Cold import of disks scsi0:0" }, "start": 1706993226955, "status": "failure", "end": 1706994410295, "result": { "message": "already finalized or destroyed", "name": "Error", "stack": "Error: already finalized or destroyed\n at Pack.entry (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/tar-stream/pack.js:138:51)\n at Pack.resolver (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/promise-toolbox/fromCallback.js:5:6)\n at Promise._execute (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/debuggability.js:384:9)\n at Promise._resolveFromExecutor (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/promise.js:518:18)\n at new Promise (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/promise.js:103:10)\n at Pack.fromCallback (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/promise-toolbox/fromCallback.js:9:10)\n at writeBlock (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/_writeDisk.mjs:6:22)\n at addDisk (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/_writeDisk.mjs:27:13)\n at importVm (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/importVm.mjs:22:5)\n at importVdi (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///opt/xo/xo-builds/xen-orchestra-202402031433/packages/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/opt/xo/xo-builds/xen-orchestra-202402031433/@vates/task/index.js:158:22)\n at Task.run (/opt/xo/xo-builds/xen-orchestra-202402031433/@vates/task/index.js:141:20)" } } ], "end": 1706994410318, "result": { "message": "already finalized or destroyed", "name": "Error", "stack": "Error: already finalized or destroyed\n at Pack.entry (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/tar-stream/pack.js:138:51)\n at Pack.resolver (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/promise-toolbox/fromCallback.js:5:6)\n at Promise._execute (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/debuggability.js:384:9)\n at Promise._resolveFromExecutor (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/promise.js:518:18)\n at new Promise (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/promise.js:103:10)\n at Pack.fromCallback (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/promise-toolbox/fromCallback.js:9:10)\n at writeBlock (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/_writeDisk.mjs:6:22)\n at addDisk (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/_writeDisk.mjs:27:13)\n at importVm (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/importVm.mjs:22:5)\n at importVdi (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///opt/xo/xo-builds/xen-orchestra-202402031433/packages/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/opt/xo/xo-builds/xen-orchestra-202402031433/@vates/task/index.js:158:22)\n at Task.run (/opt/xo/xo-builds/xen-orchestra-202402031433/@vates/task/index.js:141:20)" } } ], "end": 1706994410318, "result": { "succeeded": {}, "message": "already finalized or destroyed", "name": "Error", "stack": "Error: already finalized or destroyed\n at Pack.entry (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/tar-stream/pack.js:138:51)\n at Pack.resolver (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/promise-toolbox/fromCallback.js:5:6)\n at Promise._execute (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/debuggability.js:384:9)\n at Promise._resolveFromExecutor (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/promise.js:518:18)\n at new Promise (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/bluebird/js/release/promise.js:103:10)\n at Pack.fromCallback (/opt/xo/xo-builds/xen-orchestra-202402031433/node_modules/promise-toolbox/fromCallback.js:9:10)\n at writeBlock (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/_writeDisk.mjs:6:22)\n at addDisk (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/_writeDisk.mjs:27:13)\n at importVm (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/importVm.mjs:22:5)\n at importVdi (file:///opt/xo/xo-builds/xen-orchestra-202402031433/@xen-orchestra/xva/importVdi.mjs:6:17)\n at file:///opt/xo/xo-builds/xen-orchestra-202402031433/packages/xo-server/src/xo-mixins/migrate-vm.mjs:260:21\n at Task.runInside (/opt/xo/xo-builds/xen-orchestra-202402031433/@vates/task/index.js:158:22)\n at Task.run (/opt/xo/xo-builds/xen-orchestra-202402031433/@vates/task/index.js:141:20)" } }
-
Hooray, fix_xva_import_thin works! Huge thanks to @florent for the fix!
FWIW, jumping around between commits was a breeze thanks to that external tool.
-
FYi, jumping into various commits is simply a
git checkout <target commit>