eventlog: add backup target eventlog
This commit is contained in:
@@ -0,0 +1,467 @@
|
||||
/* global it:false */
|
||||
/* global describe:false */
|
||||
/* global before:false */
|
||||
/* global after:false */
|
||||
/* global xit:false */
|
||||
|
||||
'use strict';
|
||||
|
||||
const backupTargets = require('../backuptargets.js'),
|
||||
BoxError = require('../boxerror.js'),
|
||||
common = require('./common.js'),
|
||||
execSync = require('child_process').execSync,
|
||||
expect = require('expect.js'),
|
||||
filesystem = require('../storage/filesystem.js'),
|
||||
fs = require('fs'),
|
||||
gcs = require('../storage/gcs.js'),
|
||||
noop = require('../storage/noop.js'),
|
||||
os = require('os'),
|
||||
path = require('path'),
|
||||
s3 = require('../storage/s3.js'),
|
||||
safe = require('safetydance'),
|
||||
stream = require('stream/promises');
|
||||
|
||||
const chunk = s3._chunk;
|
||||
|
||||
describe('Storage', function () {
|
||||
const { setup, cleanup, getDefaultBackupTarget, auditSource } = common;
|
||||
|
||||
before(setup);
|
||||
after(cleanup);
|
||||
|
||||
describe('filesystem', function () {
|
||||
let gTmpFolder;
|
||||
|
||||
const gBackupConfig = {
|
||||
key: 'key',
|
||||
backupFolder: null,
|
||||
};
|
||||
|
||||
let defaultBackupTarget;
|
||||
|
||||
before(async function () {
|
||||
gTmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), 'filesystem-storage-test_'));
|
||||
defaultBackupTarget = await getDefaultBackupTarget();
|
||||
gBackupConfig.backupFolder = path.join(gTmpFolder, 'backups/');
|
||||
});
|
||||
|
||||
after(function (done) {
|
||||
fs.rmSync(gTmpFolder, { recursive: true, force: true });
|
||||
done();
|
||||
});
|
||||
|
||||
it('fails to set backup storage for bad folder', async function () {
|
||||
const tmp = Object.assign({}, gBackupConfig, { backupFolder: '/root/oof' });
|
||||
const [error] = await safe(backupTargets.setConfig(defaultBackupTarget, tmp, auditSource));
|
||||
expect(error.reason).to.equal(BoxError.BAD_FIELD);
|
||||
});
|
||||
|
||||
it('succeeds to set backup storage', async function () {
|
||||
await backupTargets.setConfig(defaultBackupTarget, gBackupConfig, auditSource);
|
||||
expect(fs.existsSync(path.join(gBackupConfig.backupFolder, 'snapshot'))).to.be(true); // auto-created
|
||||
});
|
||||
|
||||
it('can upload', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destFile = gTmpFolder + '/uploadtest/test.txt';
|
||||
const uploader = await filesystem.upload(gBackupConfig, destFile);
|
||||
await stream.pipeline(sourceStream, uploader.stream);
|
||||
await uploader.finish();
|
||||
expect(fs.existsSync(destFile));
|
||||
expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size);
|
||||
});
|
||||
|
||||
it('upload waits for empty file to be created', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/empty');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destFile = gTmpFolder + '/uploadtest/empty';
|
||||
const uploader = await filesystem.upload(gBackupConfig, destFile);
|
||||
await stream.pipeline(sourceStream, uploader.stream);
|
||||
await uploader.finish();
|
||||
expect(fs.existsSync(destFile));
|
||||
expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size);
|
||||
});
|
||||
|
||||
it('upload unlinks old file', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destFile = gTmpFolder + '/uploadtest/test.txt';
|
||||
const oldStat = fs.statSync(destFile);
|
||||
const uploader = await filesystem.upload(gBackupConfig, destFile);
|
||||
await stream.pipeline(sourceStream, uploader.stream);
|
||||
await uploader.finish();
|
||||
expect(fs.existsSync(destFile)).to.be(true);
|
||||
expect(fs.statSync(sourceFile).size).to.be(fs.statSync(destFile).size);
|
||||
expect(oldStat.inode).to.not.be(fs.statSync(destFile).size);
|
||||
});
|
||||
|
||||
it('can download file', async function () {
|
||||
const sourceFile = gTmpFolder + '/uploadtest/test.txt';
|
||||
|
||||
const [error, stream] = await safe(filesystem.download(gBackupConfig, sourceFile));
|
||||
expect(error).to.be(null);
|
||||
expect(stream).to.be.an('object');
|
||||
});
|
||||
|
||||
it('download errors for missing file', async function () {
|
||||
const sourceFile = gTmpFolder + '/uploadtest/missing';
|
||||
|
||||
const [error] = await safe(filesystem.download(gBackupConfig, sourceFile));
|
||||
expect(error.reason).to.be(BoxError.NOT_FOUND);
|
||||
});
|
||||
|
||||
it('list dir lists the source dir', async function () {
|
||||
const sourceDir = path.join(__dirname, 'storage');
|
||||
|
||||
let allFiles = [], marker = null;
|
||||
while (true) {
|
||||
const result = await filesystem.listDir(gBackupConfig, sourceDir, 1, marker);
|
||||
allFiles = allFiles.concat(result.entries);
|
||||
if (!result.marker) break;
|
||||
marker = result.marker;
|
||||
}
|
||||
|
||||
const expectedFiles = execSync(`find ${sourceDir} -type f`, { encoding: 'utf8' }).trim().split('\n');
|
||||
expect(allFiles.map(function (f) { return f.fullPath; }).sort()).to.eql(expectedFiles.sort());
|
||||
});
|
||||
|
||||
it('can copy', async function () {
|
||||
const sourceFile = gTmpFolder + '/uploadtest/test.txt'; // keep the test within save device
|
||||
const destFile = gTmpFolder + '/uploadtest/test-hardlink.txt';
|
||||
|
||||
await filesystem.copy(gBackupConfig, sourceFile, destFile, () => {});
|
||||
expect(fs.statSync(destFile).nlink).to.be(2); // created a hardlink
|
||||
});
|
||||
|
||||
it('can remove file', async function () {
|
||||
const sourceFile = gTmpFolder + '/uploadtest/test-hardlink.txt';
|
||||
|
||||
await filesystem.remove(gBackupConfig, sourceFile);
|
||||
expect(fs.existsSync(sourceFile)).to.be(false);
|
||||
});
|
||||
|
||||
it('can remove empty dir', async function () {
|
||||
const sourceDir = gTmpFolder + '/emptydir';
|
||||
fs.mkdirSync(sourceDir);
|
||||
|
||||
await filesystem.remove(gBackupConfig, sourceDir, () => {});
|
||||
expect(fs.existsSync(sourceDir)).to.be(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('noop', function () {
|
||||
const gBackupConfig = {
|
||||
provider: 'noop',
|
||||
format: 'tgz'
|
||||
};
|
||||
|
||||
it('upload works', async function () {
|
||||
await noop.upload(gBackupConfig, 'file', {});
|
||||
});
|
||||
|
||||
it('can download file', async function () {
|
||||
const [error] = await safe(noop.download(gBackupConfig, 'file'));
|
||||
expect(error).to.be.an(Error);
|
||||
});
|
||||
|
||||
it('list dir contents of source dir', async function () {
|
||||
const result = await noop.listDir(gBackupConfig, 'sourceDir', 1000, null /* marker */);
|
||||
expect(result.marker).to.be(null);
|
||||
expect(result.entries).to.eql([]);
|
||||
});
|
||||
|
||||
it('can copy', async function () {
|
||||
await noop.copy(gBackupConfig, 'sourceFile', 'destFile', () => {});
|
||||
});
|
||||
|
||||
it('can remove file', async function () {
|
||||
await noop.remove(gBackupConfig, 'sourceFile');
|
||||
});
|
||||
|
||||
it('can remove empty dir', async function () {
|
||||
await noop.remove(gBackupConfig, 'sourceDir');
|
||||
});
|
||||
});
|
||||
|
||||
describe('s3', function () {
|
||||
const basePath = path.join(os.tmpdir(), 's3-backup-test-buckets');
|
||||
const backupConfig = {
|
||||
provider: 's3',
|
||||
key: 'key',
|
||||
prefix: 'unit.test',
|
||||
bucket: 'cloudron-storage-test',
|
||||
accessKeyId: 'testkeyid',
|
||||
secretAccessKey: 'testsecret',
|
||||
region: 'eu-central-1',
|
||||
format: 'tgz'
|
||||
};
|
||||
const bucketPath = path.join(basePath, backupConfig.bucket);
|
||||
|
||||
class S3MockUpload {
|
||||
constructor(args) { // { client: s3, params, partSize, queueSize: 3, leavePartsOnError: false }
|
||||
console.log(basePath, args.params.Bucket, args.params.Key);
|
||||
const destFilePath = path.join(basePath, args.params.Bucket, args.params.Key);
|
||||
fs.mkdirSync(path.dirname(destFilePath), { recursive: true });
|
||||
this.pipeline = stream.pipeline(args.params.Body, fs.createWriteStream(destFilePath));
|
||||
console.log(destFilePath);
|
||||
}
|
||||
|
||||
on() {}
|
||||
|
||||
async done() {
|
||||
await this.pipeline;
|
||||
}
|
||||
}
|
||||
|
||||
class S3Mock {
|
||||
constructor(cfg) {
|
||||
expect(cfg.credentials).to.eql({ // retryDelayOptions is a function
|
||||
accessKeyId: backupConfig.accessKeyId,
|
||||
secretAccessKey: backupConfig.secretAccessKey
|
||||
});
|
||||
expect(cfg.region).to.be(backupConfig.region);
|
||||
}
|
||||
|
||||
async listObjectsV2(params) {
|
||||
expect(params.Bucket).to.be(backupConfig.bucket);
|
||||
return {
|
||||
Contents: [{
|
||||
Key: 'uploadtest/test.txt',
|
||||
Size: 23
|
||||
}, {
|
||||
Key: 'uploadtest/C++.gitignore',
|
||||
Size: 23
|
||||
}]
|
||||
};
|
||||
}
|
||||
|
||||
async copyObject(params) {
|
||||
console.log(path.join(basePath, params.CopySource), path.join(bucketPath, params.Key));
|
||||
await fs.promises.mkdir(path.dirname(path.join(bucketPath, params.Key)), { recursive: true });
|
||||
await fs.promises.copyFile(path.join(basePath, params.CopySource.replace(/%2B/g, '+')), path.join(bucketPath, params.Key)); // CopySource already has the bucket path!
|
||||
}
|
||||
|
||||
async deleteObject(params) {
|
||||
expect(params.Bucket).to.be(backupConfig.bucket);
|
||||
fs.rmSync(path.join(bucketPath, params.Key));
|
||||
}
|
||||
|
||||
async deleteObjects(params) {
|
||||
expect(params.Bucket).to.be(backupConfig.bucket);
|
||||
params.Delete.Objects.forEach(o => fs.rmSync(path.join(bucketPath, o.Key)));
|
||||
}
|
||||
}
|
||||
|
||||
before(function () {
|
||||
fs.rmSync(basePath, { recursive: true, force: true });
|
||||
globalThis.S3Mock = S3Mock;
|
||||
globalThis.S3MockUpload = S3MockUpload;
|
||||
});
|
||||
|
||||
after(function () {
|
||||
// fs.rmSync(basePath, { recursive: true, force: true });
|
||||
delete globalThis.S3Mock;
|
||||
delete globalThis.S3MockUpload;
|
||||
});
|
||||
|
||||
it('can upload', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destKey = 'uploadtest/test.txt';
|
||||
const uploader = await s3.upload(backupConfig, destKey);
|
||||
await stream.pipeline(sourceStream, uploader.stream);
|
||||
await uploader.finish();
|
||||
expect(fs.existsSync(path.join(bucketPath, destKey))).to.be(true);
|
||||
expect(fs.statSync(path.join(bucketPath, destKey)).size).to.be(fs.statSync(sourceFile).size);
|
||||
});
|
||||
|
||||
it('can download file', async function () {
|
||||
const sourceKey = 'uploadtest/test.txt';
|
||||
const [error, outstream] = await safe(s3.download(backupConfig, sourceKey));
|
||||
expect(error).to.be(null);
|
||||
expect(outstream).to.be.an('object');
|
||||
});
|
||||
|
||||
it('list dir lists contents of source dir', async function () {
|
||||
let allFiles = [ ], marker = null;
|
||||
|
||||
while (true) {
|
||||
const result = await s3.listDir(backupConfig, '', 1, marker);
|
||||
allFiles = allFiles.concat(result.entries);
|
||||
if (!result.marker) break;
|
||||
marker = result.marker;
|
||||
}
|
||||
|
||||
expect(allFiles.map(function (f) { return f.fullPath; })).to.contain('uploadtest/test.txt');
|
||||
});
|
||||
|
||||
it('can copy', async function () {
|
||||
fs.writeFileSync(path.join(bucketPath, 'uploadtest/C++.gitignore'), 'special', 'utf8');
|
||||
|
||||
await s3.copy(backupConfig, 'uploadtest', 'uploadtest-copy', () => {});
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size);
|
||||
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/C++.gitignore')).size).to.be(7);
|
||||
});
|
||||
|
||||
it('can remove file', async function () {
|
||||
await s3.remove(backupConfig, 'uploadtest/test.txt');
|
||||
expect(fs.existsSync(path.join(bucketPath, 'uploadtest/test.txt'))).to.be(false);
|
||||
});
|
||||
|
||||
it('cannot remove non-existent file', async function () {
|
||||
const [error] = await safe(s3.remove(backupConfig, 'blah'));
|
||||
expect(error).to.be.ok();
|
||||
});
|
||||
});
|
||||
|
||||
describe('gcs', function () {
|
||||
const gBackupConfig = {
|
||||
provider: 'gcs',
|
||||
key: '',
|
||||
prefix: 'unit.test',
|
||||
bucket: 'cloudron-storage-test',
|
||||
projectId: 'some-project',
|
||||
credentials: {
|
||||
client_email: 'some-client',
|
||||
private_key: 'some-key'
|
||||
}
|
||||
};
|
||||
|
||||
const GCSMockBasePath = path.join(os.tmpdir(), 'gcs-backup-test-buckets/');
|
||||
class GCSMockBucket {
|
||||
constructor(name) {
|
||||
expect(name).to.be(gBackupConfig.bucket);
|
||||
}
|
||||
file(filename) {
|
||||
function ensurePathWritable(filename) {
|
||||
filename = GCSMockBasePath + filename;
|
||||
fs.mkdirSync(path.dirname(filename), { recursive: true });
|
||||
return filename;
|
||||
}
|
||||
|
||||
return {
|
||||
name: filename,
|
||||
createReadStream: function() {
|
||||
return fs.createReadStream(ensurePathWritable(filename))
|
||||
.on('error', function(e){
|
||||
console.log('error createReadStream: '+filename);
|
||||
if (e.code == 'ENOENT') { e.code = 404; }
|
||||
this.emit('error', e);
|
||||
});
|
||||
},
|
||||
createWriteStream: function() {
|
||||
return fs.createWriteStream(ensurePathWritable(filename));
|
||||
},
|
||||
delete: async function() {
|
||||
await fs.promises.unlink(ensurePathWritable(filename));
|
||||
},
|
||||
copy: function(dst, cb) {
|
||||
function notFoundHandler(e) {
|
||||
if (e && e.code == 'ENOENT') { e.code = 404; return cb(e); }
|
||||
cb();
|
||||
}
|
||||
|
||||
return fs.createReadStream(ensurePathWritable(filename))
|
||||
.on('end', cb)
|
||||
.on('error', notFoundHandler)
|
||||
.pipe(fs.createWriteStream(ensurePathWritable(dst)))
|
||||
.on('end', cb)
|
||||
.on('error', notFoundHandler);
|
||||
}
|
||||
};
|
||||
}
|
||||
async getFiles(q) {
|
||||
const target = path.join(GCSMockBasePath, q.prefix);
|
||||
const files = execSync(`find ${target} -type f`, { encoding: 'utf8' }).trim().split('\n');
|
||||
const pageToken = q.pageToken || 0;
|
||||
|
||||
const chunkedFiles = chunk(files, q.maxResults);
|
||||
if (q.pageToken >= chunkedFiles.length) return [[], null];
|
||||
|
||||
const gFiles = chunkedFiles[pageToken].map(f => {
|
||||
return this.file(path.relative(GCSMockBasePath, f));
|
||||
});
|
||||
|
||||
q.pageToken = pageToken + 1;
|
||||
return [ gFiles, q.pageToken < chunkedFiles.length ? q : null ];
|
||||
}
|
||||
};
|
||||
|
||||
class GCSMock {
|
||||
constructor(config) {
|
||||
expect(config.projectId).to.be(gBackupConfig.projectId);
|
||||
expect(config.credentials.private_key).to.be(gBackupConfig.credentials.private_key);
|
||||
}
|
||||
|
||||
bucket(name) {
|
||||
return new GCSMockBucket(name);
|
||||
}
|
||||
}
|
||||
|
||||
before(function () {
|
||||
globalThis.GCSMock = GCSMock;
|
||||
});
|
||||
|
||||
after(function () {
|
||||
fs.rmSync(GCSMockBasePath, { recursive: true, force: true });
|
||||
delete globalThis.GCSMock;
|
||||
});
|
||||
|
||||
it('can backup', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destKey = 'uploadtest/test.txt';
|
||||
const uploader = await gcs.upload(gBackupConfig, destKey);
|
||||
await stream.pipeline(sourceStream, uploader.stream);
|
||||
await uploader.finish();
|
||||
});
|
||||
|
||||
it('can download file', async function () {
|
||||
const sourceKey = 'uploadtest/test.txt';
|
||||
const [error, stream] = await safe(gcs.download(gBackupConfig, sourceKey));
|
||||
expect(error).to.be(null);
|
||||
expect(stream).to.be.an('object');
|
||||
});
|
||||
|
||||
it('list dir lists contents of source dir', async function () {
|
||||
let allFiles = [ ], marker = null;
|
||||
|
||||
while (true) {
|
||||
const result = await gcs.listDir(gBackupConfig, '', 1, marker);
|
||||
allFiles = allFiles.concat(result.entries);
|
||||
if (!result.marker) break;
|
||||
marker = result.marker;
|
||||
}
|
||||
|
||||
expect(allFiles.map(function (f) { return f.fullPath; }).sort()).to.eql([ 'uploadtest/test.txt' ]);
|
||||
});
|
||||
|
||||
xit('can copy', function (done) {
|
||||
fs.writeFileSync(path.join(GCSMockBasePath, 'uploadtest/C++.gitignore'), 'special', 'utf8');
|
||||
|
||||
const sourceKey = 'uploadtest';
|
||||
|
||||
const events = gcs.copy(gBackupConfig, sourceKey, 'uploadtest-copy');
|
||||
events.on('done', function (error) {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
expect(error).to.be(null);
|
||||
expect(fs.statSync(path.join(GCSMockBasePath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size);
|
||||
|
||||
expect(fs.statSync(path.join(GCSMockBasePath, 'uploadtest-copy/C++.gitignore')).size).to.be(7);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('can remove file', async function () {
|
||||
await gcs.remove(gBackupConfig, 'uploadtest-copy/test.txt');
|
||||
expect(fs.existsSync(path.join(GCSMockBasePath, 'uploadtest-copy/test.txt'))).to.be(false);
|
||||
});
|
||||
|
||||
it('can remove non-existent dir', async function () {
|
||||
await gcs.remove(gBackupConfig, 'blah', () => {});
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user