Congrats XPC-ng team !!!
Best posts made by DodgeThis
-
RE: backblaze b2 / amazon s3 as remote in xoa
@julien-f No problem. glad to test it.
-
RE: backblaze b2 / amazon s3 as remote in xoa
@shorian yap me too but not always.
sometimes says Error: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Latest posts made by DodgeThis
-
RE: Merge process to S3 is slow for one host
@olivierlambert it's the first time that merges. The VM
it's a huge vm +/- 700GB heheheh -
RE: Merge process to S3 is slow for one host
@olivierlambert
I am waiting for the merge to end for 3 days. is there any way to see the percentage of the merge already done? -
RE: backblaze b2 / amazon s3 as remote in xoa
@nraynaud glad to help somehow
-
RE: backblaze b2 / amazon s3 as remote in xoa
Another one with a VM with +/-600 Gb
{ "data": { "mode": "full", "reportWhen": "failure" }, "id": "1608043681979", "jobId": "abfed8aa-b445-4c94-be5c-b6ed723792b8", "jobName": "amazontest", "message": "backup", "scheduleId": "44840d13-2302-45ae-9460-53ed451183ed", "start": 1608043681979, "status": "failure", "infos": [ { "data": { "vms": [ "fbea839a-72ac-ca3d-51e5-d4248e363d72" ] }, "message": "vms" } ], "tasks": [ { "data": { "type": "VM", "id": "fbea839a-72ac-ca3d-51e5-d4248e363d72" }, "id": "1608043681986:0", "message": "Starting backup of DESKTOPGREEN. (abfed8aa-b445-4c94-be5c-b6ed723792b8)", "start": 1608043681986, "status": "failure", "tasks": [ { "id": "1608043681990", "message": "snapshot", "start": 1608043681990, "status": "success", "end": 1608043683927, "result": "42f38a1a-5037-107d-1ceb-921fed6910fa" }, { "id": "1608043683929", "message": "add metadata to snapshot", "start": 1608043683929, "status": "success", "end": 1608043683938 }, { "id": "1608043684127", "message": "waiting for uptodate snapshot record", "start": 1608043684127, "status": "success", "end": 1608043684330 }, { "id": "1608043684332", "message": "start VM export", "start": 1608043684332, "status": "success", "end": 1608043684348 }, { "data": { "id": "7847ac06-d46f-476a-84c1-608f9302cb67", "type": "remote" }, "id": "1608043684352", "message": "export", "start": 1608043684352, "status": "failure", "tasks": [ { "id": "1608043684783", "message": "transfer", "start": 1608043684783, "status": "failure", "end": 1608043987240, "result": { "message": "Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.", "code": "RequestTimeout", "region": null, "time": "2020-12-15T14:53:06.821Z", "requestId": "E47562F2C7A82C45", "extendedRequestId": "5nM1888rfrbLHHgUrlsuF5cISTd2kj6oVpWfz5Yhtpj4GtfW7wkX9HnTq/Tj8uU37Opp8FNjlFU=", "statusCode": 400, "retryable": true, "name": "RequestTimeout", "stack": "RequestTimeout: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.\n at Request.extractError (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/services/s3.js:700:35)\n at Request.callListeners (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:106:20)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:78:10)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:688:14)\n at Request.transition (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:22:10)\n at AcceptorStateMachine.runTo (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:14:12)\n at /opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:26:10\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:38:9)\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:690:12)\n at Request.callListeners (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:116:18)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:78:10)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:688:14)\n at Request.transition (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:22:10)\n at AcceptorStateMachine.runTo (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:14:12)\n at /opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:26:10\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:38:9)\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:690:12)\n at Request.callListeners (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:116:18)\n at callNextListener (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:96:12)\n at IncomingMessage.onEnd (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/event_listeners.js:313:13)\n at IncomingMessage.apply (events.js:326:22)\n at IncomingMessage.patchedEmit (/opt/xo/xo-builds/xen-orchestra-202012150908/@xen-orchestra/log/src/configure.js:92:17)" } } ], "end": 1608043987241, "result": { "message": "Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.", "code": "RequestTimeout", "region": null, "time": "2020-12-15T14:53:06.821Z", "requestId": "E47562F2C7A82C45", "extendedRequestId": "5nM1888rfrbLHHgUrlsuF5cISTd2kj6oVpWfz5Yhtpj4GtfW7wkX9HnTq/Tj8uU37Opp8FNjlFU=", "statusCode": 400, "retryable": true, "name": "RequestTimeout", "stack": "RequestTimeout: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.\n at Request.extractError (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/services/s3.js:700:35)\n at Request.callListeners (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:106:20)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:78:10)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:688:14)\n at Request.transition (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:22:10)\n at AcceptorStateMachine.runTo (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:14:12)\n at /opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:26:10\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:38:9)\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:690:12)\n at Request.callListeners (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:116:18)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:78:10)\n at Request.emit (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:688:14)\n at Request.transition (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:22:10)\n at AcceptorStateMachine.runTo (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:14:12)\n at /opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/state_machine.js:26:10\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:38:9)\n at Request.<anonymous> (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/request.js:690:12)\n at Request.callListeners (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:116:18)\n at callNextListener (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/sequential_executor.js:96:12)\n at IncomingMessage.onEnd (/opt/xo/xo-builds/xen-orchestra-202012150908/node_modules/aws-sdk/lib/event_listeners.js:313:13)\n at IncomingMessage.apply (events.js:326:22)\n at IncomingMessage.patchedEmit (/opt/xo/xo-builds/xen-orchestra-202012150908/@xen-orchestra/log/src/configure.js:92:17)" } }, { "id": "1608043987670", "message": "set snapshot.other_config[xo:backup:exported]", "start": 1608043987670, "status": "success", "end": 1608043989451 } ], "end": 1608051207208 } ], "end": 1608051207238 }
-
RE: backblaze b2 / amazon s3 as remote in xoa
@julien-f
Still no sucess,
This vm that i'm trying to backups is about 290 GbXen Orchestra is on a VM with:
Debian 10
4 Gb Ram
85 GB free hd
1CPUxo-server 5.72.1
xo-web 5.75.0{ "data": { "mode": "full", "reportWhen": "failure" }, "id": "1608035511007", "jobId": "695f3dfb-783f-4cb9-85ef-6805d75c7d26", "jobName": "AmazonTest", "message": "backup", "scheduleId": "75c619c3-8b6d-4c72-b110-131b01992b3e", "start": 1608035511007, "status": "failure", "tasks": [ { "data": { "type": "VM", "id": "93b88999-8b61-7207-138b-1ef389491e71" }, "id": "1608035511027", "message": "Starting backup of MysqlDB. (695f3dfb-783f-4cb9-85ef-6805d75c7d26)", "start": 1608035511027, "status": "failure", "tasks": [ { "id": "1608035511031", "message": "snapshot", "start": 1608035511031, "status": "success", "end": 1608035512857, "result": "e612bb3b-9141-45ef-a901-ac8e4f1927c2" }, { "id": "1608035512859", "message": "add metadata to snapshot", "start": 1608035512859, "status": "success", "end": 1608035512869 }, { "id": "1608035513055", "message": "waiting for uptodate snapshot record", "start": 1608035513055, "status": "success", "end": 1608035513259 }, { "id": "1608035513261", "message": "start VM export", "start": 1608035513261, "status": "success", "end": 1608035513302 }, { "data": { "id": "72341e61-56db-43f5-8389-78baf364516f", "type": "remote" }, "id": "1608035513304", "message": "export", "start": 1608035513304, "status": "failure", "tasks": [ { "id": "1608035514389", "message": "transfer", "start": 1608035514389, "status": "failure", "end": 1608035839218, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-12-15T12:37:18.736Z", "statusCode": 400, "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:94:16)" } } ], "end": 1608035839357, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-12-15T12:37:18.736Z", "statusCode": 400, "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:94:16)" } }, { "id": "1608035839607", "message": "set snapshot.other_config[xo:backup:exported]", "start": 1608035839607, "status": "success", "end": 1608035839831 } ], "end": 1608039896381 } ], "end": 1608039896383 }
-
RE: backblaze b2 / amazon s3 as remote in xoa
@julien-f No problem. glad to test it.
-
RE: backblaze b2 / amazon s3 as remote in xoa
@shorian yap me too but not always.
sometimes says Error: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed. -
RE: backblaze b2 / amazon s3 as remote in xoa
@akurzawa s3 backups/delta backups are not working great for big vms
but I never had this problem! check the bucket and directory -
RE: S3 / Wasabi Backup
I'm getting this errors on big vms:
{ "data": { "mode": "full", "reportWhen": "failure" }, "id": "1605563266089", "jobId": "abfed8aa-b445-4c94-be5c-b6ed723792b8", "jobName": "amazonteste", "message": "backup", "scheduleId": "44840d13-2302-45ae-9460-53ed451183ed", "start": 1605563266089, "status": "failure", "infos": [ { "data": { "vms": [ "68b2bf30-8693-8419-20ab-18162d4042aa", "fbea839a-72ac-ca3d-51e5-d4248e363d72", "b24d8810-b7f5-5576-af97-6a6d5f9a8730" ] }, "message": "vms" } ], "tasks": [ { "data": { "type": "VM", "id": "68b2bf30-8693-8419-20ab-18162d4042aa" }, "id": "1605563266118:0", "message": "Starting backup of TEST. (abfed8aa-b445-4c94-be5c-b6ed723792b8)", "start": 1605563266118, "status": "failure", "tasks": [ { "id": "1605563266136", "message": "snapshot", "start": 1605563266136, "status": "success", "end": 1605563271882, "result": "a700da10-a281-a124-8283-970aa7675d39" }, { "id": "1605563271885", "message": "add metadata to snapshot", "start": 1605563271885, "status": "success", "end": 1605563271899 }, { "id": "1605563272085", "message": "waiting for uptodate snapshot record", "start": 1605563272085, "status": "success", "end": 1605563272287 }, { "id": "1605563272340", "message": "start VM export", "start": 1605563272340, "status": "success", "end": 1605563272362 }, { "data": { "id": "7847ac06-d46f-476a-84c1-608f9302cb67", "type": "remote" }, "id": "1605563272372", "message": "export", "start": 1605563272372, "status": "failure", "tasks": [ { "id": "1605563273029", "message": "transfer", "start": 1605563273029, "status": "failure", "end": 1605563715136, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-11-16T21:55:08.319Z", "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:92:16)" } } ], "end": 1605563715348, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-11-16T21:55:08.319Z", "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:92:16)" } }, { "id": "1605563717917", "message": "set snapshot.other_config[xo:backup:exported]", "start": 1605563717917, "status": "success", "end": 1605563721061 } ], "end": 1605564045507 }, { "data": { "type": "VM", "id": "fbea839a-72ac-ca3d-51e5-d4248e363d72" }, "id": "1605564045510", "message": "Starting backup of DESKTOPGREEN. (abfed8aa-b445-4c94-be5c-b6ed723792b8)", "start": 1605564045510, "status": "failure", "tasks": [ { "id": "1605564045533", "message": "snapshot", "start": 1605564045533, "status": "success", "end": 1605564047224, "result": "3e5fb1b5-c93e-f242-95a2-90757b38f727" }, { "id": "1605564047229", "message": "add metadata to snapshot", "start": 1605564047229, "status": "success", "end": 1605564047240 }, { "id": "1605564047429", "message": "waiting for uptodate snapshot record", "start": 1605564047429, "status": "success", "end": 1605564047631 }, { "id": "1605564047634", "message": "start VM export", "start": 1605564047634, "status": "success", "end": 1605564047652 }, { "data": { "id": "7847ac06-d46f-476a-84c1-608f9302cb67", "type": "remote" }, "id": "1605564047657", "message": "export", "start": 1605564047657, "status": "failure", "tasks": [ { "id": "1605564048778", "message": "transfer", "start": 1605564048778, "status": "failure", "end": 1605564534530, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-11-16T22:08:35.991Z", "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:92:16)" } } ], "end": 1605564534693, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-11-16T22:08:35.991Z", "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:92:16)" } }, { "id": "1605564535540", "message": "set snapshot.other_config[xo:backup:exported]", "start": 1605564535540, "status": "success", "end": 1605564538346 } ], "end": 1605571759789 }, { "data": { "type": "VM", "id": "b24d8810-b7f5-5576-af97-6a6d5f9a8730" }, "id": "1605571759883", "message": "Starting backup of grayLog. (abfed8aa-b445-4c94-be5c-b6ed723792b8)", "start": 1605571759883, "status": "failure", "tasks": [ { "id": "1605571759912", "message": "snapshot", "start": 1605571759912, "status": "success", "end": 1605571773047, "result": "600bf528-8c1d-5035-1876-615e1dd89fea" }, { "id": "1605571773051", "message": "add metadata to snapshot", "start": 1605571773051, "status": "success", "end": 1605571773081 }, { "id": "1605571773391", "message": "waiting for uptodate snapshot record", "start": 1605571773391, "status": "success", "end": 1605571773601 }, { "id": "1605571773603", "message": "start VM export", "start": 1605571773603, "status": "success", "end": 1605571773687 }, { "data": { "id": "7847ac06-d46f-476a-84c1-608f9302cb67", "type": "remote" }, "id": "1605571773719", "message": "export", "start": 1605571773719, "status": "failure", "tasks": [ { "id": "1605571774218", "message": "transfer", "start": 1605571774218, "status": "failure", "end": 1605572239807, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-11-17T00:17:01.143Z", "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:92:16)" } } ], "end": 1605572239942, "result": { "message": "write EPIPE", "errno": "EPIPE", "code": "NetworkingError", "syscall": "write", "region": "eu-central-1", "hostname": "s3.eu-central-1.amazonaws.com", "retryable": true, "time": "2020-11-17T00:17:01.143Z", "name": "NetworkingError", "stack": "Error: write EPIPE\n at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:92:16)" } }, { "id": "1605572242195", "message": "set snapshot.other_config[xo:backup:exported]", "start": 1605572242195, "status": "success", "end": 1605572246834 } ], "end": 1605572705448 } ], "end": 1605572705499 }
also delta backup have the same problem