diff --git a/src/backups.js b/src/backups.js index 15b556dc9..f0b404e4c 100644 --- a/src/backups.js +++ b/src/backups.js @@ -252,12 +252,14 @@ function sync(backupConfig, backupId, dataDir, callback) { var retryCount = 0; async.retry({ times: 5, interval: 20000 }, function (retryCallback) { + retryCallback = once(retryCallback); // protect again upload() erroring much later after read stream error + ++retryCount; debug(`${task.operation} ${task.path} try ${retryCount}`); if (task.operation === 'add') { setBackupProgress(`Adding ${task.path}`); var stream = fs.createReadStream(path.join(dataDir, task.path)); - stream.on('error', function () { return retryCallback(); }); // ignore error if file disappears + stream.on('error', function (error) { setBackupProgress(`read stream error for ${task.path}: ${error.message}`); retryCallback(); }); // ignore error if file disappears api(backupConfig.provider).upload(backupConfig, backupFilePath, stream, retryCallback); } }, iteratorCallback); @@ -295,8 +297,6 @@ function upload(backupId, format, dataDir, callback) { assert.strictEqual(typeof dataDir, 'string'); assert.strictEqual(typeof callback, 'function'); - callback = once(callback); - debug('upload: id %s format %s dataDir %s', backupId, format, dataDir); settings.getBackupConfig(function (error, backupConfig) { @@ -304,6 +304,8 @@ function upload(backupId, format, dataDir, callback) { if (format === 'tgz') { async.retry({ times: 5, interval: 20000 }, function (retryCallback) { + retryCallback = once(retryCallback); // protect again upload() erroring much later after tar stream error + var tarStream = createTarPackStream(dataDir, backupConfig.key || null); tarStream.on('error', retryCallback); // already returns BackupsError diff --git a/src/storage/interface.js b/src/storage/interface.js index 8e31ae509..377789716 100644 --- a/src/storage/interface.js +++ b/src/storage/interface.js @@ -35,6 +35,7 @@ function upload(apiConfig, backupFilePath, sourceStream, callback) { assert.strictEqual(typeof callback, 'function'); // Result: none + // sourceStream errors are handled upstream callback(new Error('not implemented')); } diff --git a/src/storage/s3.js b/src/storage/s3.js index 50319fa63..d99538c3b 100644 --- a/src/storage/s3.js +++ b/src/storage/s3.js @@ -139,7 +139,7 @@ function upload(apiConfig, backupFilePath, sourceStream, callback) { // s3.upload automatically does a multi-part upload. we set queueSize to 1 to reduce memory usage // uploader will buffer at most queueSize * partSize bytes into memory at any given time. - return s3.upload(params, { partSize: 10 * 1024 * 1024, queueSize: 1 }, function (error) { + s3.upload(params, { partSize: 10 * 1024 * 1024, queueSize: 1 }, function (error) { if (error) { debug('[%s] upload: s3 upload error.', backupFilePath, error); return callback(new BackupsError(BackupsError.EXTERNAL_ERROR, `Error uploading ${backupFilePath}. Message: ${error.message} HTTP Code: ${error.code}`));