Files
cloudron-box/src/storage/gcs.js
2025-02-13 11:09:15 +01:00

238 lines
8.7 KiB
JavaScript

'use strict';
exports = module.exports = {
getAvailableSize,
upload,
exists,
download,
copy,
listDir,
remove,
removeDir,
cleanup,
testConfig,
removePrivateFields,
injectPrivateFields,
};
const assert = require('assert'),
async = require('async'),
BoxError = require('../boxerror.js'),
constants = require('../constants.js'),
debug = require('debug')('box:storage/gcs'),
GCS = require('@google-cloud/storage').Storage,
path = require('path'),
safe = require('safetydance');
function getBucket(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
const gcsConfig = {
projectId: apiConfig.projectId,
credentials: {
client_email: apiConfig.credentials.client_email,
private_key: apiConfig.credentials.private_key
}
};
const gcs = constants.TEST ? new globalThis.GCSMock(gcsConfig) : new GCS(gcsConfig);
return gcs.bucket(apiConfig.bucket);
}
async function getAvailableSize(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
return Number.POSITIVE_INFINITY;
}
async function upload(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
debug(`Uploading to ${backupFilePath}`);
const uploadStream = getBucket(apiConfig)
.file(backupFilePath)
.createWriteStream({ resumable: false });
return {
stream: uploadStream,
async finish() {}
};
}
async function exists(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
const bucket = getBucket(apiConfig);
if (!backupFilePath.endsWith('/')) {
const file = bucket.file(backupFilePath);
const [error] = await safe(file.getMetadata());
if (error && error.code === 404) return false;
if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, error.message);
return true;
} else {
const query = {
prefix: backupFilePath,
maxResults: 1,
autoPaginate: true
};
const [error, files] = await safe(bucket.getFiles(query));
if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, error.message);
return files.length !== 0;
}
}
async function download(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
debug(`Download ${backupFilePath} starting`);
const file = getBucket(apiConfig).file(backupFilePath);
return file.createReadStream();
}
async function listDir(apiConfig, backupFilePath, batchSize, marker) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
assert.strictEqual(typeof batchSize, 'number');
assert(typeof marker !== 'undefined');
const bucket = getBucket(apiConfig);
const query = marker || { prefix: backupFilePath, autoPaginate: false, maxResults: batchSize };
const [error, result] = await safe(bucket.getFiles(query));
if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, `Failed to get files: ${error.message}`);
const [files, nextQuery] = result;
if (files.length === 0) return { entries: [], marker: null }; // no more
const entries = files.map(function (f) { return { fullPath: f.name }; });
return { entries, marker: nextQuery || null };
}
async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof oldFilePath, 'string');
assert.strictEqual(typeof newFilePath, 'string');
assert.strictEqual(typeof progressCallback, 'function');
async function copyFile(entry) {
const relativePath = path.relative(oldFilePath, entry.fullPath);
const [copyError] = await safe(getBucket(apiConfig).file(entry.fullPath).copy(path.join(newFilePath, relativePath)));
if (copyError) debug('copyBackup: gcs copy error. %o', copyError);
if (copyError && copyError.code === 404) throw new BoxError(BoxError.NOT_FOUND, 'Old backup not found');
if (copyError) throw new BoxError(BoxError.EXTERNAL_ERROR, copyError.message);
}
const batchSize = 1000;
const concurrency = apiConfig.limits?.copyConcurrency || 10;
let total = 0;
let marker = null;
while (true) {
const batch = await listDir(apiConfig, oldFilePath, batchSize, marker);
total += batch.entries.length;
progressCallback({ message: `Copying ${batch.entries.length} files from ${batch.entries[0].fullPath} to ${batch.entries[batch.entries.length-1].fullPath}. total: ${total}` });
await async.eachLimit(batch.entries, concurrency, copyFile);
if (!batch.marker) break;
marker = batch.marker;
}
progressCallback({ message: `Copied ${total} files` });
}
async function remove(apiConfig, filename) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof filename, 'string');
const [error] = await safe(getBucket(apiConfig).file(filename).delete());
if (error) debug('removeBackups: Unable to remove %s (%s). Not fatal.', filename, error.message);
}
async function removeDir(apiConfig, pathPrefix, progressCallback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof pathPrefix, 'string');
assert.strictEqual(typeof progressCallback, 'function');
const batchSize = 1000, concurrency = apiConfig.limits?.deleteConcurrency || 10; // https://googleapis.dev/nodejs/storage/latest/Bucket.html#deleteFiles
let total = 0;
let marker = null;
while (true) {
const batch = await listDir(apiConfig, pathPrefix, batchSize, marker);
const entries = batch.entries;
total += entries.length;
progressCallback({ message: `Removing ${entries.length} files from ${entries[0].fullPath} to ${entries[entries.length-1].fullPath}. total: ${total}` });
await async.eachLimit(entries, concurrency, async (entry) => await remove(apiConfig, entry.fullPath));
if (!batch.marker) break;
marker = batch.marker;
}
progressCallback({ progress: `Deleted ${total} files` });
}
async function cleanup(apiConfig, progressCallback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof progressCallback, 'function');
}
async function testConfig(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
if (typeof apiConfig.projectId !== 'string') throw new BoxError(BoxError.BAD_FIELD, 'projectId must be a string');
if (!apiConfig.credentials || typeof apiConfig.credentials !== 'object') throw new BoxError(BoxError.BAD_FIELD, 'credentials must be an object');
if (typeof apiConfig.credentials.client_email !== 'string') throw new BoxError(BoxError.BAD_FIELD, 'credentials.client_email must be a string');
if (typeof apiConfig.credentials.private_key !== 'string') throw new BoxError(BoxError.BAD_FIELD, 'credentials.private_key must be a string');
if (typeof apiConfig.bucket !== 'string') throw new BoxError(BoxError.BAD_FIELD, 'bucket must be a string');
if (typeof apiConfig.prefix !== 'string') throw new BoxError(BoxError.BAD_FIELD, 'prefix must be a string');
// attempt to upload and delete a file with new credentials
const bucket = getBucket(apiConfig);
const testFile = bucket.file(path.join(apiConfig.prefix, 'cloudron-testfile'));
const uploadStream = testFile.createWriteStream({ resumable: false });
await new Promise((resolve, reject) => {
uploadStream.on('error', function(error) {
debug('testConfig: failed uploading cloudron-testfile. %o', error);
if (error && error.code && (error.code == 403 || error.code == 404)) {
return reject(new BoxError(BoxError.BAD_FIELD, error.message));
}
return reject(new BoxError(BoxError.EXTERNAL_ERROR, error.message));
});
uploadStream.write('testfilecontents');
uploadStream.end();
uploadStream.on('finish', resolve);
});
debug('testConfig: uploaded cloudron-testfile');
const [delError] = await safe(bucket.file(path.join(apiConfig.prefix, 'cloudron-testfile')).delete());
if (delError) throw new BoxError(BoxError.EXTERNAL_ERROR, delError.message);
debug('testConfig: deleted cloudron-testfile');
}
function removePrivateFields(apiConfig) {
apiConfig.credentials.private_key = constants.SECRET_PLACEHOLDER;
return apiConfig;
}
function injectPrivateFields(newConfig, currentConfig) {
if (newConfig.credentials.private_key === constants.SECRET_PLACEHOLDER && currentConfig.credentials) newConfig.credentials.private_key = currentConfig.credentials.private_key;
}