backups: move limits and storage into separate keys

This commit is contained in:
Girish Ramakrishnan
2023-08-15 08:14:35 +05:30
parent 630853abb5
commit cd9d49116e
8 changed files with 101 additions and 73 deletions
+32 -23
View File
@@ -32,6 +32,8 @@ exports = module.exports = {
getConfig,
setConfig,
setStorage,
setLimits,
remount,
getMountStatus,
@@ -414,27 +416,37 @@ async function setPolicy(policy) {
}
async function getConfig() {
const value = await settings.getJson(settings.BACKUP_CONFIG_KEY);
return value || {
provider: 'filesystem',
backupFolder: paths.DEFAULT_BACKUP_DIR,
format: 'tgz',
encryption: null,
};
const result = await settings.getJson(settings.BACKUP_STORAGE_KEY) || { provider: 'filesystem', backupFolder: paths.DEFAULT_BACKUP_DIR, format: 'tgz', encryption: null };
const limits = await settings.getJson(settings.BACKUP_LIMITS_KEY);
if (limits) result.limits = limits;
return result;
}
async function setConfig(backupConfig) {
assert.strictEqual(typeof backupConfig, 'object');
await settings.setJson(settings.BACKUP_STORAGE_KEY, _.omit(backupConfig, 'limits'));
await settings.setJson(settings.BACKUP_LIMITS_KEY, backupConfig.limits || null);
}
async function setLimits(limits) {
assert.strictEqual(typeof limits, 'object');
await settings.setJson(settings.BACKUP_LIMITS_KEY, limits);
}
async function setStorage(storageConfig) {
assert.strictEqual(typeof storageConfig, 'object');
const oldConfig = await getConfig();
injectPrivateFields(backupConfig, oldConfig);
injectPrivateFields(storageConfig, oldConfig);
if (mounts.isManagedProvider(backupConfig.provider)) {
let error = mounts.validateMountOptions(backupConfig.provider, backupConfig.mountOptions);
if (mounts.isManagedProvider(storageConfig.provider)) {
let error = mounts.validateMountOptions(storageConfig.provider, storageConfig.mountOptions);
if (error) throw error;
[error] = await safe(mounts.tryAddMount(mountObjectFromBackupConfig(backupConfig), { timeout: 10 })); // 10 seconds
[error] = await safe(mounts.tryAddMount(mountObjectFromBackupConfig(storageConfig), { timeout: 10 })); // 10 seconds
if (error) {
if (mounts.isManagedProvider(oldConfig.provider)) { // put back the old mount configuration
@@ -447,26 +459,23 @@ async function setConfig(backupConfig) {
}
}
const error = await testConfig(backupConfig);
const error = await testConfig(storageConfig);
if (error) throw error;
if ('password' in backupConfig) { // user set password
const error = await validateEncryptionPassword(backupConfig.password);
if ('password' in storageConfig) { // user set password
const error = await validateEncryptionPassword(storageConfig.password);
if (error) throw error;
backupConfig.encryption = generateEncryptionKeysSync(backupConfig.password);
delete backupConfig.password;
storageConfig.encryption = generateEncryptionKeysSync(storageConfig.password);
delete storageConfig.password;
}
// if any of these changes, we have to clear the cache
if (!_.isEqual(_.omit(backupConfig, 'limits'), _.omit(oldConfig, 'limits'))) {
debug('setBackupConfig: clearing backup cache');
cleanupCacheFilesSync();
}
debug('setBackupConfig: clearing backup cache');
cleanupCacheFilesSync();
await settings.setJson(settings.BACKUP_CONFIG_KEY, backupConfig);
await settings.setJson(settings.BACKUP_STORAGE_KEY, storageConfig);
if (mounts.isManagedProvider(oldConfig.provider) && !mounts.isManagedProvider(backupConfig.provider)) {
if (mounts.isManagedProvider(oldConfig.provider) && !mounts.isManagedProvider(storageConfig.provider)) {
debug('setBackupConfig: removing old backup mount point');
await safe(mounts.removeMount(mountObjectFromBackupConfig(oldConfig)));
}
+39 -31
View File
@@ -9,7 +9,9 @@ exports = module.exports = {
getMountStatus,
getConfig,
setConfig,
setStorage,
setLimits,
getPolicy,
setPolicy
};
@@ -83,41 +85,47 @@ async function getConfig(req, res, next) {
next(new HttpSuccess(200, backups.removePrivateFields(backupConfig)));
}
async function setConfig(req, res, next) {
async function setLimits(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
const limits = req.body;
if ('syncConcurrency' in limits) {
if (typeof limits.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer'));
if (limits.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer'));
}
if ('copyConcurrency' in limits) {
if (typeof limits.copyConcurrency !== 'number') return next(new HttpError(400, 'copyConcurrency must be a positive integer'));
if (limits.copyConcurrency < 1) return next(new HttpError(400, 'copyConcurrency must be a positive integer'));
}
if ('downloadConcurrency' in limits) {
if (typeof limits.downloadConcurrency !== 'number') return next(new HttpError(400, 'downloadConcurrency must be a positive integer'));
if (limits.downloadConcurrency < 1) return next(new HttpError(400, 'downloadConcurrency must be a positive integer'));
}
if ('deleteConcurrency' in limits) {
if (typeof limits.deleteConcurrency !== 'number') return next(new HttpError(400, 'deleteConcurrency must be a positive integer'));
if (limits.deleteConcurrency < 1) return next(new HttpError(400, 'deleteConcurrency must be a positive integer'));
}
if ('uploadPartSize' in limits) {
if (typeof limits.uploadPartSize !== 'number') return next(new HttpError(400, 'uploadPartSize must be a positive integer'));
if (limits.uploadPartSize < 1) return next(new HttpError(400, 'uploadPartSize must be a positive integer'));
}
if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer'));
const [error] = await safe(backups.setLimits(req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
}
async function setStorage(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (typeof req.body.provider !== 'string') return next(new HttpError(400, 'provider is required'));
if ('password' in req.body && typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a string'));
if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean'));
if (req.body.limits) {
if (typeof req.body.limits !== 'object') return next(new HttpError(400, 'limits must be an object'));
const limits = req.body;
if ('syncConcurrency' in limits) {
if (typeof limits.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer'));
if (limits.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer'));
}
if ('copyConcurrency' in limits) {
if (typeof limits.copyConcurrency !== 'number') return next(new HttpError(400, 'copyConcurrency must be a positive integer'));
if (limits.copyConcurrency < 1) return next(new HttpError(400, 'copyConcurrency must be a positive integer'));
}
if ('downloadConcurrency' in limits) {
if (typeof limits.downloadConcurrency !== 'number') return next(new HttpError(400, 'downloadConcurrency must be a positive integer'));
if (limits.downloadConcurrency < 1) return next(new HttpError(400, 'downloadConcurrency must be a positive integer'));
}
if ('deleteConcurrency' in limits) {
if (typeof limits.deleteConcurrency !== 'number') return next(new HttpError(400, 'deleteConcurrency must be a positive integer'));
if (limits.deleteConcurrency < 1) return next(new HttpError(400, 'deleteConcurrency must be a positive integer'));
}
if ('uploadPartSize' in limits) {
if (typeof limits.uploadPartSize !== 'number') return next(new HttpError(400, 'uploadPartSize must be a positive integer'));
if (limits.uploadPartSize < 1) return next(new HttpError(400, 'uploadPartSize must be a positive integer'));
}
if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer'));
}
if (typeof req.body.format !== 'string') return next(new HttpError(400, 'format must be a string'));
if ('acceptSelfSignedCerts' in req.body && typeof req.body.acceptSelfSignedCerts !== 'boolean') return next(new HttpError(400, 'format must be a boolean'));
@@ -126,7 +134,7 @@ async function setConfig(req, res, next) {
// testing the backup using put/del takes a bit of time at times
req.clearTimeout();
const [error] = await safe(backups.setConfig(req.body));
const [error] = await safe(backups.setStorage(req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
+11 -10
View File
@@ -155,16 +155,17 @@ async function initializeExpressSync() {
router.post('/api/v1/notifications/:notificationId', json, token, authorizeAdmin, routes.notifications.load, routes.notifications.update);
// backup routes
router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list);
router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backups.getMountStatus);
router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.create);
router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup);
router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backups.remount);
router.get ('/api/v1/backups/config', token, authorizeAdmin, routes.backups.getConfig);
router.post('/api/v1/backups/config', json, token, authorizeOwner, routes.backups.setConfig);
router.get ('/api/v1/backups/policy', token, authorizeAdmin, routes.backups.getPolicy);
router.post('/api/v1/backups/policy', json, token, authorizeOwner, routes.backups.setPolicy);
router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update);
router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list);
router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backups.getMountStatus);
router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.create);
router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup);
router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backups.remount);
router.get ('/api/v1/backups/config', token, authorizeAdmin, routes.backups.getConfig);
router.post('/api/v1/backups/config/storage', json, token, authorizeOwner, routes.backups.setStorage);
router.post('/api/v1/backups/config/limits', json, token, authorizeOwner, routes.backups.setLimits);
router.get ('/api/v1/backups/policy', token, authorizeAdmin, routes.backups.getPolicy);
router.post('/api/v1/backups/policy', json, token, authorizeOwner, routes.backups.setPolicy);
router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update);
// working off the user behind the provided token
router.get ('/api/v1/profile', token, authorizeUser, routes.profile.get);
+2 -1
View File
@@ -14,7 +14,8 @@ exports = module.exports = {
APPSTORE_WEB_TOKEN_KEY: 'appstore_web_token',
API_SERVER_ORIGIN_KEY: 'api_server_origin',
AUTOUPDATE_PATTERN_KEY: 'autoupdate_pattern',
BACKUP_CONFIG_KEY: 'backup_config',
BACKUP_STORAGE_KEY: 'backup_storage',
BACKUP_LIMITS_KEY: 'backup_limits',
BACKUP_POLICY_KEY: 'backup_policy',
CLOUDRON_AVATAR_KEY: 'cloudron_avatar',
CLOUDRON_ID_KEY: 'cloudron_id',
+1 -1
View File
@@ -210,7 +210,7 @@ describe('backup cleaner', function () {
};
before(async function () {
await settings._set(settings.BACKUP_CONFIG_KEY, JSON.stringify({
await settings._set(settings.BACKUP_STORAGE_KEY, JSON.stringify({
provider: 'filesystem',
password: 'supersecret',
backupFolder: '/tmp/someplace',
+1 -1
View File
@@ -33,7 +33,7 @@ describe('backuptask', function () {
before(async function () {
fs.rmSync(backupConfig.backupFolder, { recursive: true, force: true });
await backups.setConfig(backupConfig);
await backups.setStorage(backupConfig);
});
async function createBackup() {