/* global it:false */ /* global describe:false */ /* global before:false */ /* global after:false */ /* global xit:false */ 'use strict'; const backupSites = require('../backupsites.js'), BoxError = require('../boxerror.js'), common = require('./common.js'), consumers = require('node:stream/consumers'), execSync = require('node:child_process').execSync, expect = require('expect.js'), filesystem = require('../storage/filesystem.js'), fs = require('node:fs'), gcs = require('../storage/gcs.js'), os = require('node:os'), path = require('node:path'), s3 = require('../storage/s3.js'), safe = require('safetydance'), stream = require('stream/promises'); describe('Storage', function () { const { setup, cleanup, getDefaultBackupSite, auditSource } = common; before(setup); after(cleanup); describe('filesystem', function () { let gTmpFolder; const gBackupConfig = { key: 'key', backupDir: null, prefix: 'someprefix', _provider: 'filesystem' // need this internal variable since we call the backend logic directly for testing }; let defaultBackupSite; before(async function () { gTmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), 'filesystem-storage-test_')); defaultBackupSite = await getDefaultBackupSite(); gBackupConfig.backupDir = path.join(gTmpFolder, 'backups/'); }); after(function (done) { fs.rmSync(gTmpFolder, { recursive: true, force: true }); done(); }); it('fails to set backup storage for bad folder', async function () { const tmp = Object.assign({}, gBackupConfig, { backupDir: '/root/oof' }); const [error] = await safe(backupSites.setConfig(defaultBackupSite, tmp, auditSource)); expect(error.reason).to.equal(BoxError.BAD_FIELD); }); it('succeeds to set backup storage', async function () { await backupSites.setConfig(defaultBackupSite, gBackupConfig, auditSource); expect(fs.existsSync(path.join(gBackupConfig.backupDir, 'someprefix/snapshot'))).to.be(true); // auto-created }); it('can upload', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt'); const uploader = await filesystem.upload(gBackupConfig, {}, 'uploadtest/test.txt'); await stream.pipeline(sourceStream, uploader.createStream()); await uploader.finish(); expect(fs.existsSync(destFile)); expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); }); xit('upload waits for empty file to be created', async function () { const sourceFile = path.join(__dirname, 'storage/data/empty'); const sourceStream = fs.createReadStream(sourceFile); const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/empty'); const uploader = await filesystem.upload(gBackupConfig, {}, destFile); await stream.pipeline(sourceStream, uploader.createStream()); await uploader.finish(); expect(fs.existsSync(destFile)); expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); }); it('upload unlinks old file', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt'); const oldStat = fs.statSync(destFile); const uploader = await filesystem.upload(gBackupConfig, {}, 'uploadtest/test.txt'); await stream.pipeline(sourceStream, uploader.createStream()); await uploader.finish(); expect(fs.existsSync(destFile)).to.be(true); expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size); expect(oldStat.inode).to.not.be(fs.statSync(destFile).size); }); it('can download file', async function () { const sourceFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt'); const [error, stream] = await safe(filesystem.download(gBackupConfig, 'uploadtest/test.txt')); expect(error).to.be(null); expect(stream).to.be.an('object'); const data = await consumers.buffer(stream); expect(fs.readFileSync(sourceFile)).to.eql(data); // buffer compare }); it('download errors for missing file', async function () { const [error] = await safe(filesystem.download(gBackupConfig, 'uploadtest/missing')); expect(error.reason).to.be(BoxError.NOT_FOUND); }); it('list dir lists the source dir', async function () { const sourceDir = path.join(__dirname, 'storage'); execSync(`cp -r ${sourceDir} ${gBackupConfig.backupDir}/${gBackupConfig.prefix}`, { encoding: 'utf8' }); let allFiles = [], marker = null; while (true) { const result = await filesystem.listDir(gBackupConfig, 'storage', 1, marker); allFiles = allFiles.concat(result.entries); if (!result.marker) break; marker = result.marker; } const expectedFiles = execSync(`find . -type f -printf '%P\n'`, { cwd: sourceDir, encoding: 'utf8' }).trim().split('\n').map(p => `storage/${p}`); expect(allFiles.map(function (f) { return f.path; }).sort()).to.eql(expectedFiles.sort()); }); it('can copy', async function () { // const sourceFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt'); // keep the test within same device const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test-hardlink.txt'); await filesystem.copy(gBackupConfig, 'uploadtest/test.txt', 'uploadtest/test-hardlink.txt', () => {}); expect(fs.statSync(destFile).nlink).to.be(2); // created a hardlink }); it('can remove file', async function () { const sourceFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test-hardlink.txt'); await filesystem.remove(gBackupConfig, 'uploadtest/test-hardlink.txt'); expect(fs.existsSync(sourceFile)).to.be(false); }); it('can remove empty dir', async function () { const sourceDir = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, 'emptydir'); fs.mkdirSync(sourceDir); await filesystem.remove(gBackupConfig, 'emptydir', () => {}); expect(fs.existsSync(sourceDir)).to.be(false); }); }); describe('s3', function () { const basePath = path.join(os.tmpdir(), 's3-backup-test-buckets'); const backupConfig = { provider: 's3', key: 'key', prefix: 'prefix-test', bucket: 'cloudron-storage-test', accessKeyId: 'testkeyid', secretAccessKey: 'testsecret', region: 'eu-central-1', format: 'tgz' }; const bucketPath = path.join(basePath, backupConfig.bucket, backupConfig.prefix); const bucketPathNoPrefix = path.join(basePath, backupConfig.bucket); class S3MockUpload { constructor(args) { // { client: s3, params, partSize, queueSize: 3, leavePartsOnError: false } // console.log('S3MockUpload constructor:', basePath, args.params.Bucket, args.params.Key); const destFilePath = path.join(basePath, args.params.Bucket, args.params.Key); fs.mkdirSync(path.dirname(destFilePath), { recursive: true }); this.pipeline = stream.pipeline(args.params.Body, fs.createWriteStream(destFilePath)); } on() {} async done() { await this.pipeline; } } class S3Mock { constructor(cfg) { expect(cfg.credentials).to.eql({ // retryDelayOptions is a function accessKeyId: backupConfig.accessKeyId, secretAccessKey: backupConfig.secretAccessKey }); expect(cfg.region).to.be(backupConfig.region); } async headObject(params) { expect(params.Bucket).to.be(backupConfig.bucket); const stat = await fs.promises.stat(path.join(bucketPathNoPrefix, params.Key)); return { ContentLength: stat.size }; } async listObjectsV2(params) { expect(params.Bucket).to.be(backupConfig.bucket); return { Contents: [{ Key: `${backupConfig.prefix}/uploadtest/test.txt`, Size: 23 }, { Key: `${backupConfig.prefix}/uploadtest/C++.gitignore`, Size: 23 }] }; } async copyObject(params) { // CopySource already has the bucket path! const source = path.join(basePath, params.CopySource.replace(/%2B/g, '+')); // Key already has prefix but no bucket ptah! const dest = path.join(bucketPathNoPrefix, params.Key); // console.log('Copying:', source, dest, path.dirname(dest)); await fs.promises.mkdir(path.dirname(dest), { recursive: true }); await fs.promises.copyFile(source, dest); } async deleteObject(params) { expect(params.Bucket).to.be(backupConfig.bucket); fs.rmSync(path.join(bucketPathNoPrefix, params.Key)); } async deleteObjects(params) { expect(params.Bucket).to.be(backupConfig.bucket); params.Delete.Objects.forEach(o => fs.rmSync(path.join(bucketPathNoPrefix, o.Key))); } } before(function () { fs.rmSync(basePath, { recursive: true, force: true }); globalThis.S3Mock = S3Mock; globalThis.S3MockUpload = S3MockUpload; }); after(function () { // fs.rmSync(basePath, { recursive: true, force: true }); delete globalThis.S3Mock; delete globalThis.S3MockUpload; }); it('can upload', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destKey = 'uploadtest/test.txt'; const uploader = await s3.upload(backupConfig, {}, destKey); await stream.pipeline(sourceStream, uploader.createStream()); await uploader.finish(); expect(fs.existsSync(path.join(bucketPath, destKey))).to.be(true); expect(fs.statSync(path.join(bucketPath, destKey)).size).to.be(fs.statSync(sourceFile).size); }); it('can download file', async function () { const sourceKey = 'uploadtest/test.txt'; const [error, outstream] = await safe(s3.download(backupConfig, sourceKey)); expect(error).to.be(null); expect(outstream).to.be.an('object'); }); it('list dir lists contents of source dir', async function () { let allFiles = [], marker = null; while (true) { const result = await s3.listDir(backupConfig, '', 1, marker); allFiles = allFiles.concat(result.entries); if (!result.marker) break; marker = result.marker; } expect(allFiles.map(function (f) { return f.path; })).to.contain('uploadtest/test.txt'); }); it('can copy', async function () { fs.writeFileSync(path.join(bucketPath, 'uploadtest/C++.gitignore'), 'special', 'utf8'); await s3.copyDir(backupConfig, {}, 'uploadtest', 'uploadtest-copy', () => {}); const sourceFile = path.join(__dirname, 'storage/data/test.txt'); expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size); expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/C++.gitignore')).size).to.be(7); }); it('can remove file', async function () { await s3.remove(backupConfig, 'uploadtest/test.txt'); expect(fs.existsSync(path.join(bucketPath, 'uploadtest/test.txt'))).to.be(false); }); it('cannot remove non-existent file', async function () { const [error] = await safe(s3.remove(backupConfig, 'blah')); expect(error).to.be.ok(); }); }); describe('gcs', function () { const backupConfig = { provider: 'gcs', key: '', prefix: 'unit.test', bucket: 'cloudron-storage-test', projectId: 'some-project', credentials: { client_email: 'some-client', private_key: 'some-key' } }; const basePath = path.join(os.tmpdir(), 'gcs-backup-test-buckets/'); const bucketPath = path.join(basePath, backupConfig.bucket, backupConfig.prefix); const bucketPathNoPrefix = path.join(basePath, backupConfig.bucket); class GCSMockBucket { constructor(name) { expect(name).to.be(backupConfig.bucket); } file(key) { // already has prefix // console.log('gcs file object:', key); function getFullWritablePath(key) { const fullPath = path.join(bucketPathNoPrefix, key); fs.mkdirSync(path.dirname(fullPath), { recursive: true }); return fullPath; } return { name: key, createReadStream: function() { return fs.createReadStream(getFullWritablePath(key)) .on('error', function(e){ // console.log('error createReadStream: ' + key); if (e.code == 'ENOENT') { e.code = 404; } this.emit('error', e); }); }, createWriteStream: function() { return fs.createWriteStream(getFullWritablePath(key)); }, delete: async function() { await fs.promises.unlink(getFullWritablePath(key)); }, copy: async function(destKey) { // console.log('gcs copy:', key, destKey); await fs.promises.mkdir(path.dirname(path.join(bucketPathNoPrefix, destKey)), { recursive: true }); await fs.promises.copyFile(path.join(bucketPathNoPrefix, key), path.join(bucketPathNoPrefix, destKey)); } }; } async getFiles(q) { expect(q.maxResults).to.be.a('number'); expect(q.prefix).to.be.a('string'); const files = [{ name: `${backupConfig.prefix}/uploadtest/test.txt`, }, { name: `${backupConfig.prefix}/uploadtest/C++.gitignore`, }]; return [ files, null ]; } }; class GCSMock { constructor(config) { expect(config.projectId).to.be(backupConfig.projectId); expect(config.credentials.private_key).to.be(backupConfig.credentials.private_key); } bucket(name) { return new GCSMockBucket(name); } } before(function () { globalThis.GCSMock = GCSMock; }); after(function () { fs.rmSync(basePath, { recursive: true, force: true }); delete globalThis.GCSMock; }); it('can upload', async function () { const sourceFile = path.join(__dirname, 'storage/data/test.txt'); const sourceStream = fs.createReadStream(sourceFile); const destKey = 'uploadtest/test.txt'; const uploader = await gcs.upload(backupConfig, {}, destKey); await stream.pipeline(sourceStream, uploader.createStream()); await uploader.finish(); expect(fs.existsSync(path.join(bucketPath, destKey))).to.be(true); expect(fs.statSync(path.join(bucketPath, destKey)).size).to.be(fs.statSync(sourceFile).size); }); it('can download file', async function () { const sourceKey = 'uploadtest/test.txt'; const [error, stream] = await safe(gcs.download(backupConfig, sourceKey)); expect(error).to.be(null); expect(stream).to.be.an('object'); }); it('list dir lists contents of source dir', async function () { let allFiles = [ ], marker = null; while (true) { const result = await gcs.listDir(backupConfig, '', 1, marker); allFiles = allFiles.concat(result.entries); if (!result.marker) break; marker = result.marker; } expect(allFiles.map(function (f) { return f.path; })).to.contain('uploadtest/test.txt'); }); it('can copy', async function () { fs.writeFileSync(path.join(bucketPath, 'uploadtest/C++.gitignore'), 'special', 'utf8'); await gcs.copyDir(backupConfig, {}, 'uploadtest', 'uploadtest-copy', () => {}); const sourceFile = path.join(__dirname, 'storage/data/test.txt'); expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size); expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/C++.gitignore')).size).to.be(7); }); it('can remove file', async function () { await gcs.remove(backupConfig, 'uploadtest-copy/test.txt'); expect(fs.existsSync(path.join(basePath, 'uploadtest-copy/test.txt'))).to.be(false); }); it('can remove non-existent dir', async function () { await gcs.remove(backupConfig, 'blah', () => {}); }); }); });