storage: start migration of s3 api

This commit is contained in:
Girish Ramakrishnan
2025-02-12 20:56:46 +01:00
parent 9888aa8c08
commit a138425298
7 changed files with 828 additions and 558 deletions

View File

@@ -14,7 +14,6 @@ const backups = require('../backups.js'),
filesystem = require('../storage/filesystem.js'),
fs = require('fs'),
gcs = require('../storage/gcs.js'),
MockS3 = require('mock-aws-s3'),
noop = require('../storage/noop.js'),
os = require('os'),
path = require('path'),
@@ -183,13 +182,13 @@ describe('Storage', function () {
});
it('can remove empty dir', async function () {
await noop.remove(gBackupConfig, 'sourceDir', () => {});
await noop.remove(gBackupConfig, 'sourceDir');
});
});
describe('s3', function () {
let gS3Folder;
const gBackupConfig = {
const basePath = path.join(os.tmpdir(), 's3-backup-test-buckets');
const backupConfig = {
provider: 's3',
key: 'key',
prefix: 'unit.test',
@@ -199,69 +198,118 @@ describe('Storage', function () {
region: 'eu-central-1',
format: 'tgz'
};
const bucketPath = path.join(basePath, backupConfig.bucket);
class S3MockUpload {
constructor(args) { // { client: s3, params, partSize, queueSize: 3, leavePartsOnError: false }
console.log(basePath, args.params.Bucket, args.params.Key);
const destFilePath = path.join(basePath, args.params.Bucket, args.params.Key);
fs.mkdirSync(path.dirname(destFilePath), { recursive: true });
this.pipeline = stream.pipeline(args.params.Body, fs.createWriteStream(destFilePath));
console.log(destFilePath);
}
on() {}
async done() {
await this.pipeline;
}
}
class S3Mock {
constructor(cfg) {
expect(cfg.credentials).to.eql({ // retryDelayOptions is a function
accessKeyId: backupConfig.accessKeyId,
secretAccessKey: backupConfig.secretAccessKey
});
expect(cfg.region).to.be(backupConfig.region);
}
async listObjects(params) {
expect(params.Bucket).to.be(backupConfig.bucket);
return {
Contents: [{
Key: 'uploadtest/test.txt',
Size: 23
}, {
Key: 'uploadtest/C++.gitignore',
Size: 23
}]
};
}
async copyObject(params) {
console.log(path.join(basePath, params.CopySource), path.join(bucketPath, params.Key));
await fs.promises.mkdir(path.dirname(path.join(bucketPath, params.Key)), { recursive: true });
await fs.promises.copyFile(path.join(basePath, params.CopySource.replace(/%2B/g, '+')), path.join(bucketPath, params.Key)); // CopySource already has the bucket path!
}
async deleteObjects(params) {
expect(params.Bucket).to.be(backupConfig.bucket);
params.Delete.Objects.forEach(o => fs.rmSync(path.join(bucketPath, o.Key)));
}
}
before(function () {
MockS3.config.basePath = path.join(os.tmpdir(), 's3-backup-test-buckets/');
fs.rmSync(MockS3.config.basePath, { recursive: true, force: true });
gS3Folder = path.join(MockS3.config.basePath, gBackupConfig.bucket);
s3._mockInject(MockS3);
fs.rmSync(basePath, { recursive: true, force: true });
globalThis.S3Mock = S3Mock;
globalThis.S3MockUpload = S3MockUpload;
});
after(function () {
s3._mockRestore();
fs.rmSync(MockS3.config.basePath, { recursive: true, force: true });
// fs.rmSync(basePath, { recursive: true, force: true });
delete globalThis.S3Mock;
delete globalThis.S3MockUpload;
});
it('can upload', async function () {
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
const sourceStream = fs.createReadStream(sourceFile);
const destKey = 'uploadtest/test.txt';
const uploader = await s3.upload(gBackupConfig, destKey);
const uploader = await s3.upload(backupConfig, destKey);
await stream.pipeline(sourceStream, uploader.stream);
await uploader.finish();
expect(fs.existsSync(path.join(gS3Folder, destKey))).to.be(true);
expect(fs.statSync(path.join(gS3Folder, destKey)).size).to.be(fs.statSync(sourceFile).size);
expect(fs.existsSync(path.join(bucketPath, destKey))).to.be(true);
expect(fs.statSync(path.join(bucketPath, destKey)).size).to.be(fs.statSync(sourceFile).size);
});
it('can download file', async function () {
const sourceKey = 'uploadtest/test.txt';
const [error, stream] = await safe(s3.download(gBackupConfig, sourceKey));
const [error, outstream] = await safe(s3.download(backupConfig, sourceKey));
expect(error).to.be(null);
expect(stream).to.be.an('object');
expect(outstream).to.be.an('object');
});
it('list dir lists contents of source dir', async function () {
let allFiles = [ ], marker = null;
while (true) {
const result = await s3.listDir(gBackupConfig, '', 1, marker);
const result = await s3.listDir(backupConfig, '', 1, marker);
allFiles = allFiles.concat(result.entries);
if (!result.marker) break;
marker = result.marker;
}
expect(allFiles.map(function (f) { return f.fullPath; }).sort()).to.eql([ 'uploadtest/test.txt' ]);
expect(allFiles.map(function (f) { return f.fullPath; })).to.contain('uploadtest/test.txt');
});
it('can copy', async function () {
fs.writeFileSync(path.join(gS3Folder, 'uploadtest/C++.gitignore'), 'special', 'utf8');
fs.writeFileSync(path.join(bucketPath, 'uploadtest/C++.gitignore'), 'special', 'utf8');
const sourceKey = 'uploadtest';
await s3.copy(gBackupConfig, sourceKey, 'uploadtest-copy', () => {});
await s3.copy(backupConfig, 'uploadtest', 'uploadtest-copy', () => {});
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size);
expect(fs.statSync(path.join(gS3Folder, 'uploadtest-copy/C++.gitignore')).size).to.be(7);
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size);
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/C++.gitignore')).size).to.be(7);
});
it('can remove file', async function () {
await s3.remove(gBackupConfig, 'uploadtest-copy/test.txt');
expect(fs.existsSync(path.join(gS3Folder, 'uploadtest-copy/test.txt'))).to.be(false);
await s3.remove(backupConfig, 'uploadtest/test.txt');
expect(fs.existsSync(path.join(bucketPath, 'uploadtest/test.txt'))).to.be(false);
});
it('can remove non-existent dir', async function () {
await noop.remove(gBackupConfig, 'blah', () => {});
it('cannot remove non-existent file', async function () {
const [error] = await safe(s3.remove(backupConfig, 'blah'));
expect(error).to.be.ok();
});
});
@@ -271,85 +319,91 @@ describe('Storage', function () {
key: '',
prefix: 'unit.test',
bucket: 'cloudron-storage-test',
projectId: '',
projectId: 'some-project',
credentials: {
client_email: '',
private_key: ''
client_email: 'some-client',
private_key: 'some-key'
}
};
const GCSMockBasePath = path.join(os.tmpdir(), 'gcs-backup-test-buckets/');
class GCSMockBucket {
constructor(name) {
expect(name).to.be(gBackupConfig.bucket);
}
file(filename) {
function ensurePathWritable(filename) {
filename = GCSMockBasePath + filename;
fs.mkdirSync(path.dirname(filename), { recursive: true });
return filename;
}
return {
name: filename,
createReadStream: function() {
return fs.createReadStream(ensurePathWritable(filename))
.on('error', function(e){
console.log('error createReadStream: '+filename);
if (e.code == 'ENOENT') { e.code = 404; }
this.emit('error', e);
});
},
createWriteStream: function() {
return fs.createWriteStream(ensurePathWritable(filename));
},
delete: async function() {
await fs.promises.unlink(ensurePathWritable(filename));
},
copy: function(dst, cb) {
function notFoundHandler(e) {
if (e && e.code == 'ENOENT') { e.code = 404; return cb(e); }
cb();
}
return fs.createReadStream(ensurePathWritable(filename))
.on('end', cb)
.on('error', notFoundHandler)
.pipe(fs.createWriteStream(ensurePathWritable(dst)))
.on('end', cb)
.on('error', notFoundHandler);
}
};
}
async getFiles(q) {
const target = path.join(GCSMockBasePath, q.prefix);
const files = execSync(`find ${target} -type f`, { encoding: 'utf8' }).trim().split('\n');
const pageToken = q.pageToken || 0;
const chunkedFiles = chunk(files, q.maxResults);
if (q.pageToken >= chunkedFiles.length) return [[], null];
const gFiles = chunkedFiles[pageToken].map(f => {
return this.file(path.relative(GCSMockBasePath, f));
});
q.pageToken = pageToken + 1;
return [ gFiles, q.pageToken < chunkedFiles.length ? q : null ];
}
};
class GCSMock {
constructor(config) {
expect(config.projectId).to.be(gBackupConfig.projectId);
expect(config.credentials.private_key).to.be(gBackupConfig.credentials.private_key);
}
bucket(name) {
return new GCSMockBucket(name);
}
}
before(function () {
const mockGCS = function() {
return {
bucket: function() {
const file = function (filename) {
function ensurePathWritable(filename) {
filename = GCSMockBasePath + filename;
fs.mkdirSync(path.dirname(filename), { recursive: true });
return filename;
}
return {
name: filename,
createReadStream: function() {
return fs.createReadStream(ensurePathWritable(filename))
.on('error', function(e){
console.log('error createReadStream: '+filename);
if (e.code == 'ENOENT') { e.code = 404; }
this.emit('error', e);
});
},
createWriteStream: function() {
return fs.createWriteStream(ensurePathWritable(filename));
},
delete: async function() {
await fs.promises.unlink(ensurePathWritable(filename));
},
copy: function(dst, cb) {
function notFoundHandler(e) {
if (e && e.code == 'ENOENT') { e.code = 404; return cb(e); }
cb();
}
return fs.createReadStream(ensurePathWritable(filename))
.on('end', cb)
.on('error', notFoundHandler)
.pipe(fs.createWriteStream(ensurePathWritable(dst)))
.on('end', cb)
.on('error', notFoundHandler);
}
};
};
return {
file,
getFiles: async function(q) {
const target = path.join(GCSMockBasePath, q.prefix);
const files = execSync(`find ${target} -type f`, { encoding: 'utf8' }).trim().split('\n');
const pageToken = q.pageToken || 0;
const chunkedFiles = chunk(files, q.maxResults);
if (q.pageToken >= chunkedFiles.length) return [[], null];
const gFiles = chunkedFiles[pageToken].map(function(f) {
return file(path.relative(GCSMockBasePath, f)); //convert to gcs
});
q.pageToken = pageToken + 1;
return [ gFiles, q.pageToken < chunkedFiles.length ? q : null ];
}
};
}};
};
gcs._mockInject(mockGCS);
globalThis.GCSMock = GCSMock;
});
after(function (done) {
gcs._mockRestore();
after(function () {
fs.rmSync(GCSMockBasePath, { recursive: true, force: true });
done();
delete globalThis.GCSMock;
});
it('can backup', async function () {