diff --git a/dashboard/src/js/client.js b/dashboard/src/js/client.js index d2f4db0d7..765c775af 100644 --- a/dashboard/src/js/client.js +++ b/dashboard/src/js/client.js @@ -987,11 +987,19 @@ angular.module('Application').service('Client', ['$http', '$interval', '$timeout }; Client.prototype.setBackupConfig = function (backupConfig, callback) { - post('/api/v1/backups/config', backupConfig, null, function (error, data, status) { + const storageConfig = Object.assign({}, backupConfig); + delete storageConfig.limits; + + post('/api/v1/backups/config/storage', backupConfig, null, function (error, data, status) { if (error) return callback(error); if (status !== 200) return callback(new ClientError(status, data)); - callback(null); + post('/api/v1/backups/config/limits', backupConfig.limits, null, function (error, data, status) { + if (error) return callback(error); + if (status !== 200) return callback(new ClientError(status, data)); + + callback(null); + }); }); }; diff --git a/migrations/20230712042655-settings-split-backup-config-policy-limit.js b/migrations/20230712042655-settings-split-backup-config-policy-limit.js index 7b6570efb..630613706 100644 --- a/migrations/20230712042655-settings-split-backup-config-policy-limit.js +++ b/migrations/20230712042655-settings-split-backup-config-policy-limit.js @@ -7,19 +7,20 @@ exports.up = async function(db) { if (!result.length) return; const backupConfig = JSON.parse(result[0].value); - // split policy from backupConfig and make limits a sub-object + // split policy, limits from backupConfig const backupPolicy = backupConfig.schedulePattern && backupConfig.retentionPolicy ? { schedule: backupConfig.schedulePattern, retention: backupConfig.retentionPolicy } : null; - const newBackupConfig = _.omit(backupConfig, 'copyConcurrency', 'syncConcurrency', 'memoryLimit', 'downloadConcurrency', + const storageConfig = _.omit(backupConfig, 'copyConcurrency', 'syncConcurrency', 'memoryLimit', 'downloadConcurrency', 'deleteConcurrency', 'uploadPartSize', 'schedulePattern', 'retentionPolicy', 'mountStatus'); - newBackupConfig.limits = _.pick(backupConfig, 'copyConcurrency', 'syncConcurrency', 'memoryLimit', 'downloadConcurrency', + const limits = _.pick(backupConfig, 'copyConcurrency', 'syncConcurrency', 'memoryLimit', 'downloadConcurrency', 'deleteConcurrency', 'uploadPartSize'); await db.runSql('START TRANSACTION'); - await db.runSql('UPDATE settings SET value=? WHERE name=?', [ JSON.stringify(newBackupConfig), 'backup_config']); + await db.runSql('UPDATE settings SET value=?,name=? WHERE name=?', [ JSON.stringify(storageConfig), 'storage_config', 'backup_config']); // rename if (backupPolicy) { await db.runSql('INSERT INTO settings (name, value) VALUES (?, ?)', [ 'backup_policy', JSON.stringify(backupPolicy) ]); } + await db.runSql('INSERT INTO settings (name, value) VALUES (?, ?)', [ 'backup_limits', JSON.stringify(limits) ]); await db.runSql('COMMIT'); }; diff --git a/src/backups.js b/src/backups.js index 090a9cf60..bae2cb331 100644 --- a/src/backups.js +++ b/src/backups.js @@ -32,6 +32,8 @@ exports = module.exports = { getConfig, setConfig, + setStorage, + setLimits, remount, getMountStatus, @@ -414,27 +416,37 @@ async function setPolicy(policy) { } async function getConfig() { - const value = await settings.getJson(settings.BACKUP_CONFIG_KEY); - return value || { - provider: 'filesystem', - backupFolder: paths.DEFAULT_BACKUP_DIR, - format: 'tgz', - encryption: null, - }; + const result = await settings.getJson(settings.BACKUP_STORAGE_KEY) || { provider: 'filesystem', backupFolder: paths.DEFAULT_BACKUP_DIR, format: 'tgz', encryption: null }; + const limits = await settings.getJson(settings.BACKUP_LIMITS_KEY); + if (limits) result.limits = limits; + return result; } async function setConfig(backupConfig) { assert.strictEqual(typeof backupConfig, 'object'); + await settings.setJson(settings.BACKUP_STORAGE_KEY, _.omit(backupConfig, 'limits')); + await settings.setJson(settings.BACKUP_LIMITS_KEY, backupConfig.limits || null); +} + +async function setLimits(limits) { + assert.strictEqual(typeof limits, 'object'); + + await settings.setJson(settings.BACKUP_LIMITS_KEY, limits); +} + +async function setStorage(storageConfig) { + assert.strictEqual(typeof storageConfig, 'object'); + const oldConfig = await getConfig(); - injectPrivateFields(backupConfig, oldConfig); + injectPrivateFields(storageConfig, oldConfig); - if (mounts.isManagedProvider(backupConfig.provider)) { - let error = mounts.validateMountOptions(backupConfig.provider, backupConfig.mountOptions); + if (mounts.isManagedProvider(storageConfig.provider)) { + let error = mounts.validateMountOptions(storageConfig.provider, storageConfig.mountOptions); if (error) throw error; - [error] = await safe(mounts.tryAddMount(mountObjectFromBackupConfig(backupConfig), { timeout: 10 })); // 10 seconds + [error] = await safe(mounts.tryAddMount(mountObjectFromBackupConfig(storageConfig), { timeout: 10 })); // 10 seconds if (error) { if (mounts.isManagedProvider(oldConfig.provider)) { // put back the old mount configuration @@ -447,26 +459,23 @@ async function setConfig(backupConfig) { } } - const error = await testConfig(backupConfig); + const error = await testConfig(storageConfig); if (error) throw error; - if ('password' in backupConfig) { // user set password - const error = await validateEncryptionPassword(backupConfig.password); + if ('password' in storageConfig) { // user set password + const error = await validateEncryptionPassword(storageConfig.password); if (error) throw error; - backupConfig.encryption = generateEncryptionKeysSync(backupConfig.password); - delete backupConfig.password; + storageConfig.encryption = generateEncryptionKeysSync(storageConfig.password); + delete storageConfig.password; } - // if any of these changes, we have to clear the cache - if (!_.isEqual(_.omit(backupConfig, 'limits'), _.omit(oldConfig, 'limits'))) { - debug('setBackupConfig: clearing backup cache'); - cleanupCacheFilesSync(); - } + debug('setBackupConfig: clearing backup cache'); + cleanupCacheFilesSync(); - await settings.setJson(settings.BACKUP_CONFIG_KEY, backupConfig); + await settings.setJson(settings.BACKUP_STORAGE_KEY, storageConfig); - if (mounts.isManagedProvider(oldConfig.provider) && !mounts.isManagedProvider(backupConfig.provider)) { + if (mounts.isManagedProvider(oldConfig.provider) && !mounts.isManagedProvider(storageConfig.provider)) { debug('setBackupConfig: removing old backup mount point'); await safe(mounts.removeMount(mountObjectFromBackupConfig(oldConfig))); } diff --git a/src/routes/backups.js b/src/routes/backups.js index ed7c39f61..2769719cf 100644 --- a/src/routes/backups.js +++ b/src/routes/backups.js @@ -9,7 +9,9 @@ exports = module.exports = { getMountStatus, getConfig, - setConfig, + setStorage, + setLimits, + getPolicy, setPolicy }; @@ -83,41 +85,47 @@ async function getConfig(req, res, next) { next(new HttpSuccess(200, backups.removePrivateFields(backupConfig))); } -async function setConfig(req, res, next) { +async function setLimits(req, res, next) { + assert.strictEqual(typeof req.body, 'object'); + + const limits = req.body; + + if ('syncConcurrency' in limits) { + if (typeof limits.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer')); + if (limits.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer')); + } + if ('copyConcurrency' in limits) { + if (typeof limits.copyConcurrency !== 'number') return next(new HttpError(400, 'copyConcurrency must be a positive integer')); + if (limits.copyConcurrency < 1) return next(new HttpError(400, 'copyConcurrency must be a positive integer')); + } + if ('downloadConcurrency' in limits) { + if (typeof limits.downloadConcurrency !== 'number') return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); + if (limits.downloadConcurrency < 1) return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); + } + if ('deleteConcurrency' in limits) { + if (typeof limits.deleteConcurrency !== 'number') return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); + if (limits.deleteConcurrency < 1) return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); + } + if ('uploadPartSize' in limits) { + if (typeof limits.uploadPartSize !== 'number') return next(new HttpError(400, 'uploadPartSize must be a positive integer')); + if (limits.uploadPartSize < 1) return next(new HttpError(400, 'uploadPartSize must be a positive integer')); + } + + if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer')); + + const [error] = await safe(backups.setLimits(req.body)); + if (error) return next(BoxError.toHttpError(error)); + + next(new HttpSuccess(200, {})); +} + +async function setStorage(req, res, next) { assert.strictEqual(typeof req.body, 'object'); if (typeof req.body.provider !== 'string') return next(new HttpError(400, 'provider is required')); if ('password' in req.body && typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a string')); if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean')); - if (req.body.limits) { - if (typeof req.body.limits !== 'object') return next(new HttpError(400, 'limits must be an object')); - const limits = req.body; - - if ('syncConcurrency' in limits) { - if (typeof limits.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer')); - if (limits.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer')); - } - if ('copyConcurrency' in limits) { - if (typeof limits.copyConcurrency !== 'number') return next(new HttpError(400, 'copyConcurrency must be a positive integer')); - if (limits.copyConcurrency < 1) return next(new HttpError(400, 'copyConcurrency must be a positive integer')); - } - if ('downloadConcurrency' in limits) { - if (typeof limits.downloadConcurrency !== 'number') return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); - if (limits.downloadConcurrency < 1) return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); - } - if ('deleteConcurrency' in limits) { - if (typeof limits.deleteConcurrency !== 'number') return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); - if (limits.deleteConcurrency < 1) return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); - } - if ('uploadPartSize' in limits) { - if (typeof limits.uploadPartSize !== 'number') return next(new HttpError(400, 'uploadPartSize must be a positive integer')); - if (limits.uploadPartSize < 1) return next(new HttpError(400, 'uploadPartSize must be a positive integer')); - } - - if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer')); - } - if (typeof req.body.format !== 'string') return next(new HttpError(400, 'format must be a string')); if ('acceptSelfSignedCerts' in req.body && typeof req.body.acceptSelfSignedCerts !== 'boolean') return next(new HttpError(400, 'format must be a boolean')); @@ -126,7 +134,7 @@ async function setConfig(req, res, next) { // testing the backup using put/del takes a bit of time at times req.clearTimeout(); - const [error] = await safe(backups.setConfig(req.body)); + const [error] = await safe(backups.setStorage(req.body)); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); diff --git a/src/server.js b/src/server.js index 596497f02..0deadbe6e 100644 --- a/src/server.js +++ b/src/server.js @@ -155,16 +155,17 @@ async function initializeExpressSync() { router.post('/api/v1/notifications/:notificationId', json, token, authorizeAdmin, routes.notifications.load, routes.notifications.update); // backup routes - router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list); - router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backups.getMountStatus); - router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.create); - router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup); - router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backups.remount); - router.get ('/api/v1/backups/config', token, authorizeAdmin, routes.backups.getConfig); - router.post('/api/v1/backups/config', json, token, authorizeOwner, routes.backups.setConfig); - router.get ('/api/v1/backups/policy', token, authorizeAdmin, routes.backups.getPolicy); - router.post('/api/v1/backups/policy', json, token, authorizeOwner, routes.backups.setPolicy); - router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update); + router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list); + router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backups.getMountStatus); + router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.create); + router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup); + router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backups.remount); + router.get ('/api/v1/backups/config', token, authorizeAdmin, routes.backups.getConfig); + router.post('/api/v1/backups/config/storage', json, token, authorizeOwner, routes.backups.setStorage); + router.post('/api/v1/backups/config/limits', json, token, authorizeOwner, routes.backups.setLimits); + router.get ('/api/v1/backups/policy', token, authorizeAdmin, routes.backups.getPolicy); + router.post('/api/v1/backups/policy', json, token, authorizeOwner, routes.backups.setPolicy); + router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update); // working off the user behind the provided token router.get ('/api/v1/profile', token, authorizeUser, routes.profile.get); diff --git a/src/settings.js b/src/settings.js index b95cdf38b..92fbc74f2 100644 --- a/src/settings.js +++ b/src/settings.js @@ -14,7 +14,8 @@ exports = module.exports = { APPSTORE_WEB_TOKEN_KEY: 'appstore_web_token', API_SERVER_ORIGIN_KEY: 'api_server_origin', AUTOUPDATE_PATTERN_KEY: 'autoupdate_pattern', - BACKUP_CONFIG_KEY: 'backup_config', + BACKUP_STORAGE_KEY: 'backup_storage', + BACKUP_LIMITS_KEY: 'backup_limits', BACKUP_POLICY_KEY: 'backup_policy', CLOUDRON_AVATAR_KEY: 'cloudron_avatar', CLOUDRON_ID_KEY: 'cloudron_id', diff --git a/src/test/backupcleaner-test.js b/src/test/backupcleaner-test.js index 1563dcc53..407b359fe 100644 --- a/src/test/backupcleaner-test.js +++ b/src/test/backupcleaner-test.js @@ -210,7 +210,7 @@ describe('backup cleaner', function () { }; before(async function () { - await settings._set(settings.BACKUP_CONFIG_KEY, JSON.stringify({ + await settings._set(settings.BACKUP_STORAGE_KEY, JSON.stringify({ provider: 'filesystem', password: 'supersecret', backupFolder: '/tmp/someplace', diff --git a/src/test/backuptask-test.js b/src/test/backuptask-test.js index b011729a1..c753f0c46 100644 --- a/src/test/backuptask-test.js +++ b/src/test/backuptask-test.js @@ -33,7 +33,7 @@ describe('backuptask', function () { before(async function () { fs.rmSync(backupConfig.backupFolder, { recursive: true, force: true }); - await backups.setConfig(backupConfig); + await backups.setStorage(backupConfig); }); async function createBackup() {