diff --git a/package-lock.json b/package-lock.json index 704235f7e..dfefcc521 100644 --- a/package-lock.json +++ b/package-lock.json @@ -69,7 +69,7 @@ "hock": "^1.4.1", "js2xmlparser": "^5.0.0", "mocha": "^10.4.0", - "mock-aws-s3": "git+https://github.com/cloudron-io/mock-aws-s3.git", + "mock-aws-s3": "github:cloudron-io/mock-aws-s3#0ad36e5ba", "nock": "^13.5.4", "ssh2": "^1.15.0", "yesno": "^0.4.0" @@ -2552,34 +2552,26 @@ "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "node_modules/fs-extra": { - "version": "0.6.4", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", "dev": true, "dependencies": { - "jsonfile": "~1.0.1", - "mkdirp": "0.3.x", - "ncp": "~0.4.2", - "rimraf": "~2.2.0" + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" } }, - "node_modules/fs-extra/node_modules/mkdirp": { - "version": "0.3.5", + "node_modules/fs-extra/node_modules/universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", "dev": true, - "license": "MIT" - }, - "node_modules/fs-extra/node_modules/ncp": { - "version": "0.4.2", - "dev": true, - "license": "MIT", - "bin": { - "ncp": "bin/ncp" - } - }, - "node_modules/fs-extra/node_modules/rimraf": { - "version": "2.2.8", - "dev": true, - "license": "MIT", - "bin": { - "rimraf": "bin.js" + "engines": { + "node": ">= 4.0.0" } }, "node_modules/fs.realpath": { @@ -2745,6 +2737,12 @@ "url": "https://github.com/sindresorhus/got?sponsor=1" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, "node_modules/gtoken": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", @@ -3344,8 +3342,13 @@ "license": "ISC" }, "node_modules/jsonfile": { - "version": "1.0.1", - "dev": true + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", + "dev": true, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } }, "node_modules/jsonwebtoken": { "version": "9.0.2", @@ -3919,21 +3922,23 @@ } }, "node_modules/mock-aws-s3": { - "version": "2.6.0", - "resolved": "git+ssh://git@github.com/cloudron-io/mock-aws-s3.git#1306f1722b82897382a2339d52a94ded15003d8c", + "version": "4.0.2", + "resolved": "git+ssh://git@github.com/cloudron-io/mock-aws-s3.git#0ad36e5bae8d821921779012dd9f1a70397daca3", "dev": true, "dependencies": { - "fs-extra": "0.6.4", - "underscore": "1.8.3" + "bluebird": "^3.5.1", + "fs-extra": "^7.0.1", + "underscore": "1.12.1" }, "engines": { - "node": ">= 0.8.0" + "node": ">=10.0.0" } }, "node_modules/mock-aws-s3/node_modules/underscore": { - "version": "1.8.3", - "dev": true, - "license": "MIT" + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", + "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==", + "dev": true }, "node_modules/moment": { "version": "2.30.1", diff --git a/package.json b/package.json index 650f4a2e6..e78e942b7 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,7 @@ "hock": "^1.4.1", "js2xmlparser": "^5.0.0", "mocha": "^10.4.0", - "mock-aws-s3": "git+https://github.com/cloudron-io/mock-aws-s3.git", + "mock-aws-s3": "github:cloudron-io/mock-aws-s3#0ad36e5ba", "nock": "^13.5.4", "ssh2": "^1.15.0", "yesno": "^0.4.0" diff --git a/src/storage/gcs.js b/src/storage/gcs.js index 9569fa8f5..2bb405e6b 100644 --- a/src/storage/gcs.js +++ b/src/storage/gcs.js @@ -84,7 +84,7 @@ async function upload(apiConfig, backupFilePath) { .createWriteStream({ resumable: false }); return { - uploadStream, + stream: uploadStream, async finish() {} }; } diff --git a/src/test/storage-test.js b/src/test/storage-test.js index 52cbe82d2..d7f5925f3 100644 --- a/src/test/storage-test.js +++ b/src/test/storage-test.js @@ -20,7 +20,8 @@ const backups = require('../backups.js'), path = require('path'), readdirp = require('readdirp'), s3 = require('../storage/s3.js'), - safe = require('safetydance'); + safe = require('safetydance'), + stream = require('stream/promises'); const chunk = s3._chunk; @@ -64,42 +65,39 @@ describe('Storage', function () { expect(fs.existsSync(path.join(gBackupConfig.backupFolder, 'snapshot'))).to.be(true); // auto-created }); - it('can upload', function (done) { + it('can upload', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destFile = gTmpFolder + '/uploadtest/test.txt'; - filesystem.upload(gBackupConfig, destFile, sourceStream, function (error) { - expect(error).to.be(null); - expect(fs.existsSync(destFile)); - expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); - done(); - }); + const uploader = await filesystem.upload(gBackupConfig, destFile); + await stream.pipeline(sourceStream, uploader.stream); + await uploader.finish(); + expect(fs.existsSync(destFile)); + expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); }); - it('upload waits for empty file to be created', function (done) { + it('upload waits for empty file to be created', async function () { const sourceFile = path.join(__dirname, 'storage/data/empty'); const sourceStream = fs.createReadStream(sourceFile); const destFile = gTmpFolder + '/uploadtest/empty'; - filesystem.upload(gBackupConfig, destFile, sourceStream, function (error) { - expect(error).to.be(null); - expect(fs.existsSync(destFile)); - expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); - done(); - }); + const uploader = await filesystem.upload(gBackupConfig, destFile); + await stream.pipeline(sourceStream, uploader.stream); + await uploader.finish(); + expect(fs.existsSync(destFile)); + expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); }); - it('upload unlinks old file', function (done) { + it('upload unlinks old file', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destFile = gTmpFolder + '/uploadtest/test.txt'; const oldStat = fs.statSync(destFile); - filesystem.upload(gBackupConfig, destFile, sourceStream, function (error) { - expect(error).to.be(null); - expect(fs.existsSync(destFile)).to.be(true); - expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); - expect(oldStat.inode).to.not.be(fs.statSync(destFile).size); - done(); - }); + const uploader = await filesystem.upload(gBackupConfig, destFile); + await stream.pipeline(sourceStream, uploader.stream); + await uploader.finish(); + expect(fs.existsSync(destFile)).to.be(true); + expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); + expect(oldStat.inode).to.not.be(fs.statSync(destFile).size); }); it('can download file', async function () { @@ -219,16 +217,15 @@ describe('Storage', function () { fs.rmSync(MockS3.config.basePath, { recursive: true, force: true }); }); - it('can upload', function (done) { + it('can upload', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destKey = 'uploadtest/test.txt'; - s3.upload(gBackupConfig, destKey, sourceStream, function (error) { - expect(error).to.be(null); - expect(fs.existsSync(path.join(gS3Folder, destKey))).to.be(true); - expect(fs.statSync(path.join(gS3Folder, destKey)).size).to.be(fs.statSync(sourceFile).size); - done(); - }); + const uploader = await s3.upload(gBackupConfig, destKey); + await stream.pipeline(sourceStream, uploader.stream); + await uploader.finish(); + expect(fs.existsSync(path.join(gS3Folder, destKey))).to.be(true); + expect(fs.statSync(path.join(gS3Folder, destKey)).size).to.be(fs.statSync(sourceFile).size); }); it('can download file', async function () { @@ -261,12 +258,12 @@ describe('Storage', function () { expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/C++.gitignore')).size).to.be(7); }); - xit('can remove file', async function () { + it('can remove file', async function () { await s3.remove(gBackupConfig, 'uploadtest-copy/test.txt'); expect(fs.existsSync(path.join(gS3Folder, 'uploadtest-copy/test.txt'))).to.be(false); }); - xit('can remove non-existent dir', async function () { + it('can remove non-existent dir', async function () { await noop.remove(gBackupConfig, 'blah', () => {}); }); }); @@ -304,8 +301,7 @@ describe('Storage', function () { console.log('error createReadStream: '+filename); if (e.code == 'ENOENT') { e.code = 404; } this.emit('error', e); - }) - ; + }); }, createWriteStream: function() { return fs.createWriteStream(ensurePathWritable(filename)); @@ -324,8 +320,7 @@ describe('Storage', function () { .on('error', notFoundHandler) .pipe(fs.createWriteStream(ensurePathWritable(dst))) .on('end', cb) - .on('error', notFoundHandler) - ; + .on('error', notFoundHandler); } }; }; @@ -360,15 +355,13 @@ describe('Storage', function () { done(); }); - it('can backup', function (done) { + it('can backup', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destKey = 'uploadtest/test.txt'; - gcs.upload(gBackupConfig, destKey, sourceStream, function (error) { - expect(error).to.be(null); - - done(); - }); + const uploader = await gcs.upload(gBackupConfig, destKey); + await stream.pipeline(sourceStream, uploader.stream); + await uploader.finish(); }); it('can download file', async function () {