diff --git a/src/backuptask.js b/src/backuptask.js index 1d54a593d..8b74f9361 100644 --- a/src/backuptask.js +++ b/src/backuptask.js @@ -193,17 +193,9 @@ async function copy(backupConfig, srcRemotePath, destRemotePath, progressCallbac const oldFilePath = backupFormat.api(format).getBackupFilePath(backupConfig, srcRemotePath); const newFilePath = backupFormat.api(format).getBackupFilePath(backupConfig, destRemotePath); - return new Promise((resolve, reject) => { - const startTime = new Date(); - - const copyEvents = storage.api(provider).copy(backupConfig, oldFilePath, newFilePath); - copyEvents.on('progress', (message) => progressCallback({ message })); - copyEvents.on('done', function (error) { - if (error) return reject(error); - debug(`copy: copied successfully to ${destRemotePath}. Took ${(new Date() - startTime)/1000} seconds`); - resolve(); - }); - }); + const startTime = new Date(); + await safe(storage.api(provider).copy(backupConfig, oldFilePath, newFilePath, progressCallback)); + debug(`copy: copied successfully to ${destRemotePath}. Took ${(new Date() - startTime)/1000} seconds`); } async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCallback) { diff --git a/src/storage/filesystem.js b/src/storage/filesystem.js index 6d50e8f6d..37c2b14a1 100644 --- a/src/storage/filesystem.js +++ b/src/storage/filesystem.js @@ -35,7 +35,6 @@ const assert = require('assert'), DataLayout = require('../datalayout.js'), debug = require('debug')('box:storage/filesystem'), df = require('@sindresorhus/df'), - EventEmitter = require('events'), fs = require('fs'), mounts = require('../mounts.js'), path = require('path'), @@ -211,29 +210,22 @@ function listDir(apiConfig, dir, batchSize, iteratorCallback, callback) { }); } -function copy(apiConfig, oldFilePath, newFilePath) { +async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) { assert.strictEqual(typeof apiConfig, 'object'); assert.strictEqual(typeof oldFilePath, 'string'); assert.strictEqual(typeof newFilePath, 'string'); + assert.strictEqual(typeof progressCallback, 'function'); - var events = new EventEmitter(); + const [mkdirError] = await safe(fs.promises.mkdir(path.dirname(newFilePath), { recursive: true })); + if (mkdirError) throw new BoxError(BoxError.EXTERNAL_ERROR, mkdirError.message); - fs.mkdir(path.dirname(newFilePath), { recursive: true }, function (error) { - if (error) return events.emit('done', new BoxError(BoxError.EXTERNAL_ERROR, error.message)); + progressCallback({ message: `Copying ${oldFilePath} to ${newFilePath}` }); - events.emit('progress', `Copying ${oldFilePath} to ${newFilePath}`); + let cpOptions = ((apiConfig.provider !== PROVIDER_MOUNTPOINT && apiConfig.provider !== PROVIDER_CIFS) || apiConfig.preserveAttributes) ? '-a' : '-dR'; + cpOptions += apiConfig.noHardlinks ? '' : 'l'; // this will hardlink backups saving space - let cpOptions = ((apiConfig.provider !== PROVIDER_MOUNTPOINT && apiConfig.provider !== PROVIDER_CIFS) || apiConfig.preserveAttributes) ? '-a' : '-dR'; - cpOptions += apiConfig.noHardlinks ? '' : 'l'; // this will hardlink backups saving space - - shell.spawn('copy', '/bin/cp', [ cpOptions, oldFilePath, newFilePath ], { }, function (error) { - if (error) return events.emit('done', new BoxError(BoxError.EXTERNAL_ERROR, error.message)); - - events.emit('done', null); - }); - }); - - return events; + const [copyError] = await safe(shell.promises.spawn('copy', '/bin/cp', [ cpOptions, oldFilePath, newFilePath ], { })); + if (copyError) throw new BoxError(BoxError.EXTERNAL_ERROR, copyError.message); } async function remove(apiConfig, filename) { diff --git a/src/storage/gcs.js b/src/storage/gcs.js index fa92e1b1d..54b88e268 100644 --- a/src/storage/gcs.js +++ b/src/storage/gcs.js @@ -31,7 +31,6 @@ const assert = require('assert'), constants = require('../constants.js'), DataLayout = require('../datalayout.js'), debug = require('debug')('box:storage/gcs'), - EventEmitter = require('events'), PassThrough = require('stream').PassThrough, path = require('path'), safe = require('safetydance'), @@ -184,12 +183,11 @@ function listDir(apiConfig, backupFilePath, batchSize, iteratorCallback, callbac }, callback); } -function copy(apiConfig, oldFilePath, newFilePath) { +async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) { assert.strictEqual(typeof apiConfig, 'object'); assert.strictEqual(typeof oldFilePath, 'string'); assert.strictEqual(typeof newFilePath, 'string'); - - var events = new EventEmitter(); + assert.strictEqual(typeof progressCallback, 'function'); function copyFile(entry, iteratorCallback) { var relativePath = path.relative(oldFilePath, entry.fullPath); @@ -206,20 +204,19 @@ function copy(apiConfig, oldFilePath, newFilePath) { const batchSize = 1000; const concurrency = apiConfig.copyConcurrency || 10; - var total = 0; + let total = 0; - listDir(apiConfig, oldFilePath, batchSize, function (entries, done) { + const listDirAsync = util.promisify(listDir); + + const [copyError] = await safe(listDirAsync(apiConfig, oldFilePath, batchSize, function (entries, done) { total += entries.length; - events.emit('progress', `Copying ${entries.length} files from ${entries[0].fullPath} to ${entries[entries.length-1].fullPath}. total: ${total}`); + progressCallback({ message: `Copying ${entries.length} files from ${entries[0].fullPath} to ${entries[entries.length-1].fullPath}. total: ${total}` }); async.eachLimit(entries, concurrency, copyFile, done); - }, function (error) { - events.emit('progress', `Copied ${total} files`); - process.nextTick(() => events.emit('done', error)); - }); + })); - return events; + progressCallback({ message: `Copied ${total} files with error: ${copyError}` }); } async function remove(apiConfig, filename) { diff --git a/src/storage/interface.js b/src/storage/interface.js index b59d998a5..9d45661c8 100644 --- a/src/storage/interface.js +++ b/src/storage/interface.js @@ -99,14 +99,13 @@ function downloadDir(apiConfig, backupFilePath, destDir) { return events; } -function copy(apiConfig, oldFilePath, newFilePath) { +async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) { assert.strictEqual(typeof apiConfig, 'object'); assert.strictEqual(typeof oldFilePath, 'string'); assert.strictEqual(typeof newFilePath, 'string'); + assert.strictEqual(typeof progressCallback, 'function'); - var events = new EventEmitter(); - process.nextTick(function () { events.emit('done', null); }); - return events; + throw new BoxError(BoxError.NOT_IMPLEMENTED, 'copy is not implemented'); } function listDir(apiConfig, dir, batchSize, iteratorCallback, callback) { diff --git a/src/storage/noop.js b/src/storage/noop.js index acc08067b..38d9f9f9d 100644 --- a/src/storage/noop.js +++ b/src/storage/noop.js @@ -92,16 +92,13 @@ function downloadDir(apiConfig, backupFilePath, destDir) { return events; } -function copy(apiConfig, oldFilePath, newFilePath) { +async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) { assert.strictEqual(typeof apiConfig, 'object'); assert.strictEqual(typeof oldFilePath, 'string'); assert.strictEqual(typeof newFilePath, 'string'); + assert.strictEqual(typeof progressCallback, 'function'); - debug('copy: %s -> %s', oldFilePath, newFilePath); - - var events = new EventEmitter(); - process.nextTick(function () { events.emit('done', null); }); - return events; + debug(`copy: ${oldFilePath} -> ${newFilePath}`); } async function remove(apiConfig, filename) { diff --git a/src/storage/s3.js b/src/storage/s3.js index a9ecb0705..9e044cf3d 100644 --- a/src/storage/s3.js +++ b/src/storage/s3.js @@ -33,7 +33,6 @@ const assert = require('assert'), constants = require('../constants.js'), DataLayout = require('../datalayout.js'), debug = require('debug')('box:storage/s3'), - EventEmitter = require('events'), https = require('https'), path = require('path'), Readable = require('stream').Readable, @@ -323,12 +322,11 @@ function encodeCopySource(bucket, path) { return `/${bucket}/${output}`; } -function copy(apiConfig, oldFilePath, newFilePath) { +async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) { assert.strictEqual(typeof apiConfig, 'object'); assert.strictEqual(typeof oldFilePath, 'string'); assert.strictEqual(typeof newFilePath, 'string'); - - const events = new EventEmitter(); + assert.strictEqual(typeof progressCallback, 'function'); function copyFile(entry, iteratorCallback) { const credentials = getS3Config(apiConfig); @@ -355,11 +353,11 @@ function copy(apiConfig, oldFilePath, newFilePath) { const largeFileLimit = (apiConfig.provider === 'exoscale-sos' || apiConfig.provider === 'backblaze-b2' || apiConfig.provider === 'digitalocean-spaces') ? 1024 * 1024 * 1024 : 5 * 1024 * 1024 * 1024; if (entry.size < largeFileLimit) { - events.emit('progress', `Copying ${relativePath || oldFilePath}`); + progressCallback({ message: `Copying ${relativePath || oldFilePath}` }); copyParams.CopySource = encodeCopySource(apiConfig.bucket, entry.fullPath); s3.copyObject(copyParams, done).on('retry', function (response) { - events.emit('progress', `Retrying (${response.retryCount+1}) copy of ${relativePath || oldFilePath}. Error: ${response.error} ${response.httpResponse.statusCode}`); + progressCallback({ message: `Retrying (${response.retryCount+1}) copy of ${relativePath || oldFilePath}. Error: ${response.error} ${response.httpResponse.statusCode}` }); // on DO, we get a random 408. these are not retried by the SDK if (response.error) response.error.retryable = true; // https://github.com/aws/aws-sdk-js/issues/412 }); @@ -367,7 +365,7 @@ function copy(apiConfig, oldFilePath, newFilePath) { return; } - events.emit('progress', `Copying (multipart) ${relativePath || oldFilePath}`); + progressCallback({ message: `Copying (multipart) ${relativePath || oldFilePath}` }); s3.createMultipartUpload(copyParams, function (error, multipart) { if (error) return done(error); @@ -394,12 +392,12 @@ function copy(apiConfig, oldFilePath, newFilePath) { UploadId: uploadId }; - events.emit('progress', `Copying part ${partCopyParams.PartNumber} - ${partCopyParams.CopySource} ${partCopyParams.CopySourceRange}`); + progressCallback({ message: `Copying part ${partCopyParams.PartNumber} - ${partCopyParams.CopySource} ${partCopyParams.CopySourceRange}` }); s3.uploadPartCopy(partCopyParams, function (error, part) { if (error) return iteratorDone(error); - events.emit('progress', `Copying part ${partCopyParams.PartNumber} - Etag: ${part.CopyPartResult.ETag}`); + progressCallback({ message: `Copying part ${partCopyParams.PartNumber} - Etag: ${part.CopyPartResult.ETag}` }); if (!part.CopyPartResult.ETag) return iteratorDone(new Error('Multi-part copy is broken or not implemented by the S3 storage provider')); @@ -407,7 +405,7 @@ function copy(apiConfig, oldFilePath, newFilePath) { iteratorDone(); }).on('retry', function (response) { - events.emit('progress', `Retrying (${response.retryCount+1}) multipart copy of ${relativePath || oldFilePath}. Error: ${response.error} ${response.httpResponse.statusCode}`); + progressCallback({ message: `Retrying (${response.retryCount+1}) multipart copy of ${relativePath || oldFilePath}. Error: ${response.error} ${response.httpResponse.statusCode}` }); }); }, function chunksCopied(error) { if (error) { // we must still recommend the user to set a AbortIncompleteMultipartUpload lifecycle rule @@ -416,7 +414,7 @@ function copy(apiConfig, oldFilePath, newFilePath) { Key: path.join(newFilePath, relativePath), UploadId: uploadId }; - events.emit('progress', `Aborting multipart copy of ${relativePath || oldFilePath}`); + progressCallback({ message: `Aborting multipart copy of ${relativePath || oldFilePath}` }); return s3.abortMultipartUpload(abortParams, () => done(error)); // ignore any abort errors } @@ -427,7 +425,7 @@ function copy(apiConfig, oldFilePath, newFilePath) { UploadId: uploadId }; - events.emit('progress', `Finishing multipart copy - ${completeMultipartParams.Key}`); + progressCallback({ message: `Finishing multipart copy - ${completeMultipartParams.Key}` }); s3.completeMultipartUpload(completeMultipartParams, done); }); @@ -436,21 +434,19 @@ function copy(apiConfig, oldFilePath, newFilePath) { let total = 0; const concurrency = apiConfig.copyConcurrency || (apiConfig.provider === 's3' ? 500 : 10); - events.emit('progress', `Copying with concurrency of ${concurrency}`); + progressCallback({ message: `Copying with concurrency of ${concurrency}` }); - listDir(apiConfig, oldFilePath, 1000, function listDirIterator(entries, done) { + const listDirAsync = util.promisify(listDir); + + const [copyError] = await safe(listDirAsync(apiConfig, oldFilePath, 1000, function listDirIterator(entries, done) { total += entries.length; - events.emit('progress', `Copying files from ${total-entries.length}-${total}`); + progressCallback({ message: `Copying files from ${total-entries.length}-${total}` }); async.eachLimit(entries, concurrency, copyFile, done); - }, function (error) { - events.emit('progress', `Copied ${total} files with error: ${error}`); + })); - process.nextTick(() => events.emit('done', error)); - }); - - return events; + progressCallback({ message: `Copied ${total} files with error: ${copyError}` }); } async function remove(apiConfig, filename) { diff --git a/src/test/storage-test.js b/src/test/storage-test.js index 6a46aa0a1..2678667df 100644 --- a/src/test/storage-test.js +++ b/src/test/storage-test.js @@ -139,16 +139,12 @@ describe('Storage', function () { }); }); - it('can copy', function (done) { + it('can copy', async function () { const sourceFile = gTmpFolder + '/uploadtest/test.txt'; // keep the test within save device const destFile = gTmpFolder + '/uploadtest/test-hardlink.txt'; - const events = filesystem.copy(gBackupConfig, sourceFile, destFile); - events.on('done', function (error) { - expect(error).to.be(null); - expect(fs.statSync(destFile).nlink).to.be(2); // created a hardlink - done(); - }); + await filesystem.copy(gBackupConfig, sourceFile, destFile, () => {}); + expect(fs.statSync(destFile).nlink).to.be(2); // created a hardlink }); it('can remove file', async function () { @@ -193,12 +189,8 @@ describe('Storage', function () { }, done); }); - it('can copy', function (done) { - const events = noop.copy(gBackupConfig, 'sourceFile', 'destFile'); - events.on('done', function (error) { - expect(error).to.be(null); - done(); - }); + it('can copy', async function () { + await noop.copy(gBackupConfig, 'sourceFile', 'destFile', () => {}); }); it('can remove file', async function () { @@ -269,21 +261,15 @@ describe('Storage', function () { }); }); - it('can copy', function (done) { + it('can copy', async function () { fs.writeFileSync(path.join(gS3Folder, 'uploadtest/C++.gitignore'), 'special', 'utf8'); const sourceKey = 'uploadtest'; - const events = s3.copy(gBackupConfig, sourceKey, 'uploadtest-copy'); - events.on('done', function (error) { - const sourceFile = path.join(__dirname, 'storage/data/test.txt'); - expect(error).to.be(null); - expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size); - - expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/C++.gitignore')).size).to.be(7); - - done(); - }); + await s3.copy(gBackupConfig, sourceKey, 'uploadtest-copy', () => {}); + const sourceFile = path.join(__dirname, 'storage/data/test.txt'); + expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size); + expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/C++.gitignore')).size).to.be(7); }); xit('can remove file', async function () {