diff --git a/dashboard/src/js/client.js b/dashboard/src/js/client.js index 6aa0c7540..44c861377 100644 --- a/dashboard/src/js/client.js +++ b/dashboard/src/js/client.js @@ -978,7 +978,7 @@ angular.module('Application').service('Client', ['$http', '$interval', '$timeout }; Client.prototype.setBackupConfig = function (backupConfig, callback) { - post('/api/v1/settings/backup_config', backupConfig, null, function (error, data, status) { + post('/api/v1/backups/config', backupConfig, null, function (error, data, status) { if (error) return callback(error); if (status !== 200) return callback(new ClientError(status, data)); @@ -987,7 +987,7 @@ angular.module('Application').service('Client', ['$http', '$interval', '$timeout }; Client.prototype.getBackupConfig = function (callback) { - get('/api/v1/settings/backup_config', null, function (error, data, status) { + get('/api/v1/backups/config', null, function (error, data, status) { if (error) return callback(error); if (status !== 200) return callback(new ClientError(status, data)); @@ -996,7 +996,7 @@ angular.module('Application').service('Client', ['$http', '$interval', '$timeout }; Client.prototype.setBackupPolicy = function (backupPolicy, callback) { - post('/api/v1/settings/backup_policy', backupPolicy, null, function (error, data, status) { + post('/api/v1/backups/policy', backupPolicy, null, function (error, data, status) { if (error) return callback(error); if (status !== 200) return callback(new ClientError(status, data)); @@ -1005,7 +1005,7 @@ angular.module('Application').service('Client', ['$http', '$interval', '$timeout }; Client.prototype.getBackupPolicy = function (callback) { - get('/api/v1/settings/backup_policy', null, function (error, data, status) { + get('/api/v1/backups/policy', null, function (error, data, status) { if (error) return callback(error); if (status !== 200) return callback(new ClientError(status, data)); diff --git a/src/apps.js b/src/apps.js index e1b53c5c1..648b654bc 100644 --- a/src/apps.js +++ b/src/apps.js @@ -1208,7 +1208,7 @@ async function scheduleTask(appId, installationState, taskId, auditSource) { assert.strictEqual(typeof taskId, 'string'); assert.strictEqual(typeof auditSource, 'object'); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); let memoryLimit = 400; if (installationState === exports.ISTATE_PENDING_BACKUP || installationState === exports.ISTATE_PENDING_CLONE || installationState === exports.ISTATE_PENDING_RESTORE @@ -2678,7 +2678,7 @@ async function getBackupDownloadStream(app, backupId) { if (backup.identifier !== app.id) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); // some other app's backup if (backup.format !== 'tgz') throw new BoxError(BoxError.BAD_STATE, 'only tgz backups can be downloaded'); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); const ps = new PassThrough(); diff --git a/src/backupcleaner.js b/src/backupcleaner.js index f3ea05b4b..934481a28 100644 --- a/src/backupcleaner.js +++ b/src/backupcleaner.js @@ -18,7 +18,6 @@ const apps = require('./apps.js'), path = require('path'), paths = require('./paths.js'), safe = require('safetydance'), - settings = require('./settings.js'), storage = require('./storage.js'); function applyBackupRetention(allBackups, retention, referencedBackupIds) { @@ -273,8 +272,8 @@ async function cleanupSnapshots(backupConfig) { async function run(progressCallback) { assert.strictEqual(typeof progressCallback, 'function'); - const backupConfig = await settings.getBackupConfig(); - const { retention } = await settings.getBackupPolicy(); + const backupConfig = await backups.getConfig(); + const { retention } = await backups.getPolicy(); const status = await storage.api(backupConfig.provider).getProviderStatus(backupConfig); debug(`clean: mount point status is ${JSON.stringify(status)}`); diff --git a/src/backups.js b/src/backups.js index 5cac0f57f..d68833cb3 100644 --- a/src/backups.js +++ b/src/backups.js @@ -27,6 +27,12 @@ exports = module.exports = { validateEncryptionPassword, testConfig, + getPolicy, + setPolicy, + + getConfig, + setConfig, + remount, getMountStatus, @@ -45,6 +51,7 @@ exports = module.exports = { const assert = require('assert'), BoxError = require('./boxerror.js'), constants = require('./constants.js'), + cron = require('./cron.js'), CronJob = require('cron').CronJob, crypto = require('crypto'), database = require('./database.js'), @@ -58,7 +65,8 @@ const assert = require('assert'), safe = require('safetydance'), settings = require('./settings.js'), storage = require('./storage.js'), - tasks = require('./tasks.js'); + tasks = require('./tasks.js'), + _ = require('underscore'); const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOnJson', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ]; @@ -240,7 +248,7 @@ async function startBackupTask(auditSource) { let error = locker.lock(locker.OP_FULL_BACKUP); if (error) throw new BoxError(BoxError.BAD_STATE, `Cannot backup now: ${error.message}`); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await getConfig(); const memoryLimit = backupConfig.limits?.memoryLimit ? Math.max(backupConfig.limits.memoryLimit/1024/1024, 800) : 800; @@ -352,7 +360,7 @@ async function validateEncryptionPassword(password) { async function remount(auditSource) { assert.strictEqual(typeof auditSource, 'object'); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await getConfig(); const func = storage.api(backupConfig.provider); if (!func) throw new BoxError(BoxError.BAD_FIELD, 'unknown storage provider'); @@ -361,7 +369,7 @@ async function remount(auditSource) { } async function getMountStatus() { - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await getConfig(); let hostPath; if (mounts.isManagedProvider(backupConfig.provider)) { @@ -376,3 +384,80 @@ async function getMountStatus() { return await mounts.getStatus(backupConfig.provider, hostPath); // { state, message } } + +async function getPolicy() { + const result = await settings.getJson(settings.BACKUP_POLICY_KEY); + return result || { + retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, // 2 days + schedule: '00 00 23 * * *' // every day at 11pm + }; +} + +async function setPolicy(policy) { + assert.strictEqual(typeof policy, 'object'); + + const error = await validatePolicy(policy); + if (error) throw error; + + await settings.setJson(settings.BACKUP_POLICY_KEY, policy); + await cron.backupPolicyChanged(policy); +} + +async function getConfig() { + const value = await settings.getJson(settings.BACKUP_CONFIG_KEY); + return value || { + provider: 'filesystem', + backupFolder: paths.DEFAULT_BACKUP_DIR, + format: 'tgz', + encryption: null, + }; +} + +async function setConfig(backupConfig) { + assert.strictEqual(typeof backupConfig, 'object'); + + const oldConfig = await getConfig(); + + injectPrivateFields(backupConfig, oldConfig); + + if (mounts.isManagedProvider(backupConfig.provider)) { + let error = mounts.validateMountOptions(backupConfig.provider, backupConfig.mountOptions); + if (error) throw error; + + [error] = await safe(mounts.tryAddMount(mounts.mountObjectFromBackupConfig(backupConfig), { timeout: 10 })); // 10 seconds + + if (error) { + if (mounts.isManagedProvider(oldConfig.provider)) { // put back the old mount configuration + debug('setBackupConfig: rolling back to previous mount configuration'); + + await safe(mounts.tryAddMount(mounts.mountObjectFromBackupConfig(oldConfig), { timeout: 10 })); + } + + throw error; + } + } + + const error = await testConfig(backupConfig); + if (error) throw error; + + if ('password' in backupConfig) { // user set password + const error = await validateEncryptionPassword(backupConfig.password); + if (error) throw error; + + backupConfig.encryption = generateEncryptionKeysSync(backupConfig.password); + delete backupConfig.password; + } + + // if any of these changes, we have to clear the cache + if (!_.isEqual(_.omit(backupConfig, 'limits'), _.omit(oldConfig, 'limits'))) { + debug('setBackupConfig: clearing backup cache'); + cleanupCacheFilesSync(); + } + + await settings.setJson(settings.BACKUP_CONFIG_KEY, backupConfig); + + if (mounts.isManagedProvider(oldConfig.provider) && !mounts.isManagedProvider(backupConfig.provider)) { + debug('setBackupConfig: removing old backup mount point'); + await safe(mounts.removeMount(mounts.mountObjectFromBackupConfig(oldConfig))); + } +} diff --git a/src/backuptask.js b/src/backuptask.js index 73f302387..536083480 100644 --- a/src/backuptask.js +++ b/src/backuptask.js @@ -82,7 +82,7 @@ async function upload(remotePath, format, dataLayoutString, progressCallback) { debug(`upload: path ${remotePath} format ${format} dataLayout ${dataLayoutString}`); const dataLayout = DataLayout.fromString(dataLayoutString); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); await checkPreconditions(backupConfig, dataLayout); @@ -131,7 +131,7 @@ async function downloadApp(app, restoreConfig, progressCallback) { const dataLayout = new DataLayout(appDataDir, app.storageVolumeId ? [{ localDir: await apps.getStorageDir(app), remoteDir: 'data' }] : []); const startTime = new Date(); - const backupConfig = restoreConfig.backupConfig || await settings.getBackupConfig(); + const backupConfig = restoreConfig.backupConfig || await backups.getConfig(); await download(backupConfig, restoreConfig.remotePath, restoreConfig.backupFormat, dataLayout, progressCallback); debug('downloadApp: time: %s', (new Date() - startTime)/1000); @@ -267,7 +267,7 @@ async function backupBox(dependsOn, tag, options, progressCallback) { assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); await uploadBoxSnapshot(backupConfig, progressCallback); return await rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCallback); @@ -381,7 +381,7 @@ async function backupAppWithTag(app, tag, options, progressCallback) { return results[0].id; } - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); await uploadAppSnapshot(backupConfig, app, progressCallback); return await rotateAppBackup(backupConfig, app, tag, options, progressCallback); @@ -452,7 +452,7 @@ async function backupMailWithTag(tag, options, progressCallback) { debug(`backupMailWithTag: backing up mail with tag ${tag}`); - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); await uploadMailSnapshot(backupConfig, progressCallback); return await rotateMailBackup(backupConfig, tag, options, progressCallback); } diff --git a/src/cron.js b/src/cron.js index 845a84a88..abb2c1e34 100644 --- a/src/cron.js +++ b/src/cron.js @@ -12,6 +12,7 @@ exports = module.exports = { stopJobs, handleSettingsChanged, + backupPolicyChanged, handleTimeZoneChanged, autoupdatePatternChanged, dynamicDnsChanged, @@ -162,10 +163,7 @@ async function startJobs() { start: true }); - const allSettings = await settings.list(); - - const tz = await cloudron.getTimeZone(); - backupPolicyChanged(allSettings[settings.BACKUP_POLICY_KEY], tz); + await backupPolicyChanged(await backups.getPolicy()); await autoupdatePatternChanged(await updater.getAutoupdatePattern()); dynamicDnsChanged(await network.getDynamicDns()); } @@ -188,9 +186,10 @@ async function handleSettingsChanged(key, value) { } } -function backupPolicyChanged(value, tz) { +async function backupPolicyChanged(value) { assert.strictEqual(typeof value, 'object'); - assert.strictEqual(typeof tz, 'string'); + + const tz = await cloudron.getTimeZone(); debug(`backupPolicyChanged: schedule ${value.schedule} (${tz})`); diff --git a/src/provision.js b/src/provision.js index 56acce612..f84aaf4c9 100644 --- a/src/provision.js +++ b/src/provision.js @@ -178,7 +178,7 @@ async function restoreTask(backupConfig, remotePath, ipv4Config, options, auditS if (!options.skipDnsSetup) await cloudron.setupDnsAndCert(constants.DASHBOARD_SUBDOMAIN, dashboardDomain, auditSource, (progress) => setProgress('restore', progress.message)); await cloudron.setDashboardDomain(dashboardDomain, auditSource); - await settings.setBackupConfig(backupConfig); + await backups.setConfig(backupConfig); await eventlog.add(eventlog.ACTION_RESTORE, auditSource, { remotePath }); setImmediate(() => safe(cloudron.onActivated(options), { debug })); diff --git a/src/routes/backups.js b/src/routes/backups.js index a4d0c413b..ed7c39f61 100644 --- a/src/routes/backups.js +++ b/src/routes/backups.js @@ -6,7 +6,12 @@ exports = module.exports = { create, cleanup, remount, - getMountStatus + getMountStatus, + + getConfig, + setConfig, + getPolicy, + setPolicy }; const assert = require('assert'), @@ -70,3 +75,78 @@ async function getMountStatus(req, res, next) { if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, mountStatus)); } + +async function getConfig(req, res, next) { + const [error, backupConfig] = await safe(backups.getConfig()); + if (error) return next(BoxError.toHttpError(error)); + + next(new HttpSuccess(200, backups.removePrivateFields(backupConfig))); +} + +async function setConfig(req, res, next) { + assert.strictEqual(typeof req.body, 'object'); + + if (typeof req.body.provider !== 'string') return next(new HttpError(400, 'provider is required')); + if ('password' in req.body && typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a string')); + if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean')); + + if (req.body.limits) { + if (typeof req.body.limits !== 'object') return next(new HttpError(400, 'limits must be an object')); + const limits = req.body; + + if ('syncConcurrency' in limits) { + if (typeof limits.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer')); + if (limits.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer')); + } + if ('copyConcurrency' in limits) { + if (typeof limits.copyConcurrency !== 'number') return next(new HttpError(400, 'copyConcurrency must be a positive integer')); + if (limits.copyConcurrency < 1) return next(new HttpError(400, 'copyConcurrency must be a positive integer')); + } + if ('downloadConcurrency' in limits) { + if (typeof limits.downloadConcurrency !== 'number') return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); + if (limits.downloadConcurrency < 1) return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); + } + if ('deleteConcurrency' in limits) { + if (typeof limits.deleteConcurrency !== 'number') return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); + if (limits.deleteConcurrency < 1) return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); + } + if ('uploadPartSize' in limits) { + if (typeof limits.uploadPartSize !== 'number') return next(new HttpError(400, 'uploadPartSize must be a positive integer')); + if (limits.uploadPartSize < 1) return next(new HttpError(400, 'uploadPartSize must be a positive integer')); + } + + if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer')); + } + + if (typeof req.body.format !== 'string') return next(new HttpError(400, 'format must be a string')); + if ('acceptSelfSignedCerts' in req.body && typeof req.body.acceptSelfSignedCerts !== 'boolean') return next(new HttpError(400, 'format must be a boolean')); + + if ('mountOptions' in req.body && typeof req.body.mountOptions !== 'object') return next(new HttpError(400, 'mountOptions must be a object')); + + // testing the backup using put/del takes a bit of time at times + req.clearTimeout(); + + const [error] = await safe(backups.setConfig(req.body)); + if (error) return next(BoxError.toHttpError(error)); + + next(new HttpSuccess(200, {})); +} + +async function getPolicy(req, res, next) { + const [error, policy] = await safe(backups.getPolicy()); + if (error) return next(BoxError.toHttpError(error)); + + next(new HttpSuccess(200, { policy })); +} + +async function setPolicy(req, res, next) { + assert.strictEqual(typeof req.body, 'object'); + + if (typeof req.body.schedule !== 'string') return next(new HttpError(400, 'schedule is required')); + if (!req.body.retention || typeof req.body.retention !== 'object') return next(new HttpError(400, 'retention is required')); + + const [error] = await safe(backups.setPolicy(req.body)); + if (error) return next(BoxError.toHttpError(error)); + + next(new HttpSuccess(200, {})); +} diff --git a/src/routes/settings.js b/src/routes/settings.js index 49abfa854..b16056e13 100644 --- a/src/routes/settings.js +++ b/src/routes/settings.js @@ -4,12 +4,9 @@ exports = module.exports = { set, get, - // owner only settings - setBackupConfig, }; const assert = require('assert'), - backups = require('../backups.js'), BoxError = require('../boxerror.js'), docker = require('../docker.js'), HttpError = require('connect-lastmile').HttpError, @@ -17,81 +14,6 @@ const assert = require('assert'), safe = require('safetydance'), settings = require('../settings.js'); -async function getBackupConfig(req, res, next) { - const [error, backupConfig] = await safe(settings.getBackupConfig()); - if (error) return next(BoxError.toHttpError(error)); - - next(new HttpSuccess(200, backups.removePrivateFields(backupConfig))); -} - -async function setBackupConfig(req, res, next) { - assert.strictEqual(typeof req.body, 'object'); - - if (typeof req.body.provider !== 'string') return next(new HttpError(400, 'provider is required')); - if ('password' in req.body && typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a string')); - if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean')); - - if (req.body.limits) { - if (typeof req.body.limits !== 'object') return next(new HttpError(400, 'limits must be an object')); - const limits = req.body; - - if ('syncConcurrency' in limits) { - if (typeof limits.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer')); - if (limits.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer')); - } - if ('copyConcurrency' in limits) { - if (typeof limits.copyConcurrency !== 'number') return next(new HttpError(400, 'copyConcurrency must be a positive integer')); - if (limits.copyConcurrency < 1) return next(new HttpError(400, 'copyConcurrency must be a positive integer')); - } - if ('downloadConcurrency' in limits) { - if (typeof limits.downloadConcurrency !== 'number') return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); - if (limits.downloadConcurrency < 1) return next(new HttpError(400, 'downloadConcurrency must be a positive integer')); - } - if ('deleteConcurrency' in limits) { - if (typeof limits.deleteConcurrency !== 'number') return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); - if (limits.deleteConcurrency < 1) return next(new HttpError(400, 'deleteConcurrency must be a positive integer')); - } - if ('uploadPartSize' in limits) { - if (typeof limits.uploadPartSize !== 'number') return next(new HttpError(400, 'uploadPartSize must be a positive integer')); - if (limits.uploadPartSize < 1) return next(new HttpError(400, 'uploadPartSize must be a positive integer')); - } - - if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer')); - } - - if (typeof req.body.format !== 'string') return next(new HttpError(400, 'format must be a string')); - if ('acceptSelfSignedCerts' in req.body && typeof req.body.acceptSelfSignedCerts !== 'boolean') return next(new HttpError(400, 'format must be a boolean')); - - if ('mountOptions' in req.body && typeof req.body.mountOptions !== 'object') return next(new HttpError(400, 'mountOptions must be a object')); - - // testing the backup using put/del takes a bit of time at times - req.clearTimeout(); - - const [error] = await safe(settings.setBackupConfig(req.body)); - if (error) return next(BoxError.toHttpError(error)); - - next(new HttpSuccess(200, {})); -} - -async function getBackupPolicy(req, res, next) { - const [error, policy] = await safe(settings.getBackupPolicy()); - if (error) return next(BoxError.toHttpError(error)); - - next(new HttpSuccess(200, { policy })); -} - -async function setBackupPolicy(req, res, next) { - assert.strictEqual(typeof req.body, 'object'); - - if (typeof req.body.schedule !== 'string') return next(new HttpError(400, 'schedule is required')); - if (!req.body.retention || typeof req.body.retention !== 'object') return next(new HttpError(400, 'retention is required')); - - const [error] = await safe(settings.setBackupPolicy(req.body)); - if (error) return next(BoxError.toHttpError(error)); - - next(new HttpSuccess(200, {})); -} - async function getRegistryConfig(req, res, next) { const [error, registryConfig] = await safe(settings.getRegistryConfig()); if (error) return next(BoxError.toHttpError(error)); @@ -120,8 +42,6 @@ function get(req, res, next) { assert.strictEqual(typeof req.params.setting, 'string'); switch (req.params.setting) { - case settings.BACKUP_POLICY_KEY: return getBackupPolicy(req, res, next); - case settings.BACKUP_CONFIG_KEY: return getBackupConfig(req, res, next); case settings.REGISTRY_CONFIG_KEY: return getRegistryConfig(req, res, next); default: return next(new HttpError(404, 'No such setting')); @@ -132,7 +52,6 @@ function set(req, res, next) { assert.strictEqual(typeof req.body, 'object'); switch (req.params.setting) { - case settings.BACKUP_POLICY_KEY: return setBackupPolicy(req, res, next); case settings.REGISTRY_CONFIG_KEY: return setRegistryConfig(req, res, next); default: return next(new HttpError(404, 'No such setting')); diff --git a/src/routes/test/backups-test.js b/src/routes/test/backups-test.js index ac8e6805f..4ff280a3d 100644 --- a/src/routes/test/backups-test.js +++ b/src/routes/test/backups-test.js @@ -5,20 +5,249 @@ 'use strict'; -const common = require('./common.js'), +const backups = require('../../backups.js'), + common = require('./common.js'), expect = require('expect.js'), - settings = require('../../settings.js'), superagent = require('superagent'); +const BACKUP_FOLDER = '/tmp/backup_test'; + describe('Backups API', function () { const { setup, cleanup, waitForTask, serverUrl, owner } = common; before(setup); after(cleanup); + describe('backup_policy', function () { + const defaultPolicy = { + retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, // 2 days + schedule: '00 00 23 * * *' // every day at 11pm + }; + + it('cannot set backup_policy without schedule', async function () { + const tmp = Object.assign({} , defaultPolicy); + delete tmp.schedule; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_policy with invalid schedule', async function () { + const tmp = Object.assign({} , defaultPolicy); + tmp.schedule = 'not a pattern'; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_policy without retention', async function () { + const tmp = Object.assign({} , defaultPolicy); + delete tmp.retention; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_policy with invalid retention', async function () { + const tmp = Object.assign({} , defaultPolicy); + tmp.retention = 'not an object'; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_policy with empty retention', async function () { + const tmp = Object.assign({} , defaultPolicy); + tmp.retention = {}; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_policy with retention missing properties', async function () { + const tmp = Object.assign({} , defaultPolicy); + tmp.retention = { foo: 'bar' }; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_policy with retention with invalid keepWithinSecs', async function () { + const tmp = Object.assign({} , defaultPolicy); + tmp.retention = { keepWithinSecs: 'not a number' }; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/policy`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + }); + + describe('backup_config', function () { + // keep in sync with defaults in settings.js + let defaultConfig = { + provider: 'filesystem', + backupFolder: '/var/backups', + format: 'tgz', + encryption: null, + }; + + it('can get backup_config (default)', async function () { + const response = await superagent.get(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }); + + expect(response.statusCode).to.equal(200); + expect(response.body).to.eql(defaultConfig); + }); + + it('cannot set backup_config without provider', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + delete tmp.provider; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config with invalid provider', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.provider = 'invalid provider'; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config without format', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + delete tmp.format; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config with invalid format', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.format = 'invalid format'; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config with invalid password', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.password = 1234; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config with invalid syncConcurrency', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.limits = { syncConcurrency: 'not a number' }; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config with invalid syncConcurrency', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.limits = { syncConcurrency: 0 }; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('cannot set backup_config with invalid acceptSelfSignedCerts', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.acceptSelfSignedCerts = 'not a boolean'; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp) + .ok(() => true); + + expect(response.statusCode).to.equal(400); + }); + + it('can set backup_config', async function () { + let tmp = JSON.parse(JSON.stringify(defaultConfig)); + tmp.format = 'rsync'; + tmp.backupFolder = BACKUP_FOLDER; + tmp.limits = { copyConcurrency: 34 }; + + const response = await superagent.post(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }) + .send(tmp); + + expect(response.statusCode).to.equal(200); + }); + + it('can get backup_config', async function () { + const response = await superagent.get(`${serverUrl}/api/v1/backups/config`) + .query({ access_token: owner.token }); + + expect(response.statusCode).to.equal(200); + expect(response.body.format).to.equal('rsync'); + expect(response.body.backupFolder).to.equal(BACKUP_FOLDER); + }); + }); + describe('create', function () { before(async function () { - await settings.setBackupConfig({ + await backups.setConfig({ provider: 'filesystem', backupFolder: '/tmp/backups', format: 'tgz', diff --git a/src/routes/test/settings-test.js b/src/routes/test/settings-test.js deleted file mode 100644 index c1add8b92..000000000 --- a/src/routes/test/settings-test.js +++ /dev/null @@ -1,246 +0,0 @@ -'use strict'; - -/* global it:false */ -/* global describe:false */ -/* global before:false */ -/* global after:false */ - -const common = require('./common.js'), - expect = require('expect.js'), - superagent = require('superagent'); - -const BACKUP_FOLDER = '/tmp/backup_test'; - -describe('Settings API', function () { - const { setup, cleanup, serverUrl, owner } = common; - - before(setup); - after(cleanup); - - describe('backup_policy', function () { - const defaultPolicy = { - retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, // 2 days - schedule: '00 00 23 * * *' // every day at 11pm - }; - - it('cannot set backup_policy without schedule', async function () { - const tmp = Object.assign({} , defaultPolicy); - delete tmp.schedule; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_policy with invalid schedule', async function () { - const tmp = Object.assign({} , defaultPolicy); - tmp.schedule = 'not a pattern'; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_policy without retention', async function () { - const tmp = Object.assign({} , defaultPolicy); - delete tmp.retention; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_policy with invalid retention', async function () { - const tmp = Object.assign({} , defaultPolicy); - tmp.retention = 'not an object'; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_policy with empty retention', async function () { - const tmp = Object.assign({} , defaultPolicy); - tmp.retention = {}; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_policy with retention missing properties', async function () { - const tmp = Object.assign({} , defaultPolicy); - tmp.retention = { foo: 'bar' }; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_policy with retention with invalid keepWithinSecs', async function () { - const tmp = Object.assign({} , defaultPolicy); - tmp.retention = { keepWithinSecs: 'not a number' }; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_policy`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - }); - - describe('backup_config', function () { - // keep in sync with defaults in settings.js - let defaultConfig = { - provider: 'filesystem', - backupFolder: '/var/backups', - format: 'tgz', - encryption: null, - }; - - it('can get backup_config (default)', async function () { - const response = await superagent.get(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }); - - expect(response.statusCode).to.equal(200); - expect(response.body).to.eql(defaultConfig); - }); - - it('cannot set backup_config without provider', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - delete tmp.provider; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config with invalid provider', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.provider = 'invalid provider'; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config without format', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - delete tmp.format; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config with invalid format', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.format = 'invalid format'; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config with invalid password', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.password = 1234; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config with invalid syncConcurrency', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.limits = { syncConcurrency: 'not a number' }; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config with invalid syncConcurrency', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.limits = { syncConcurrency: 0 }; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('cannot set backup_config with invalid acceptSelfSignedCerts', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.acceptSelfSignedCerts = 'not a boolean'; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp) - .ok(() => true); - - expect(response.statusCode).to.equal(400); - }); - - it('can set backup_config', async function () { - let tmp = JSON.parse(JSON.stringify(defaultConfig)); - tmp.format = 'rsync'; - tmp.backupFolder = BACKUP_FOLDER; - tmp.limits = { copyConcurrency: 34 }; - - const response = await superagent.post(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }) - .send(tmp); - - expect(response.statusCode).to.equal(200); - }); - - it('can get backup_config', async function () { - const response = await superagent.get(`${serverUrl}/api/v1/settings/backup_config`) - .query({ access_token: owner.token }); - - expect(response.statusCode).to.equal(200); - expect(response.body.format).to.equal('rsync'); - expect(response.body.backupFolder).to.equal(BACKUP_FOLDER); - }); - }); -}); diff --git a/src/server.js b/src/server.js index 78fe7a983..af721aa92 100644 --- a/src/server.js +++ b/src/server.js @@ -150,12 +150,16 @@ async function initializeExpressSync() { router.post('/api/v1/notifications/:notificationId', json, token, authorizeAdmin, routes.notifications.load, routes.notifications.update); // backup routes - router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list); - router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backups.getMountStatus); - router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.create); - router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup); - router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backups.remount); - router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update); + router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list); + router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backups.getMountStatus); + router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.create); + router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup); + router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backups.remount); + router.get ('/api/v1/backups/config', token, authorizeAdmin, routes.backups.getConfig); + router.post('/api/v1/backups/config', json, token, authorizeOwner, routes.backups.setConfig); + router.get ('/api/v1/backups/policy', token, authorizeAdmin, routes.backups.getPolicy); + router.post('/api/v1/backups/policy', json, token, authorizeOwner, routes.backups.setPolicy); + router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update); // config route (for dashboard). can return some private configuration unlike status router.get ('/api/v1/config', token, authorizeUser, routes.cloudron.getConfig); @@ -320,7 +324,6 @@ async function initializeExpressSync() { // settings routes (these are for the settings tab - avatar & name have public routes for normal users. see above) router.get ('/api/v1/settings/:setting', token, authorizeAdmin, routes.settings.get); - router.post('/api/v1/settings/backup_config', json, token, authorizeOwner, routes.settings.setBackupConfig); router.post('/api/v1/settings/:setting', json, token, authorizeAdmin, routes.settings.set); // email routes diff --git a/src/settings.js b/src/settings.js index 7dc35471c..3cb3a0758 100644 --- a/src/settings.js +++ b/src/settings.js @@ -1,12 +1,6 @@ 'use strict'; exports = module.exports = { - getBackupPolicy, - setBackupPolicy, - - getBackupConfig, - setBackupConfig, - getRegistryConfig, setRegistryConfig, @@ -88,31 +82,18 @@ exports = module.exports = { }; const assert = require('assert'), - backups = require('./backups.js'), cron = require('./cron.js'), database = require('./database.js'), debug = require('debug')('box:settings'), docker = require('./docker.js'), - mounts = require('./mounts.js'), paths = require('./paths.js'), - safe = require('safetydance'), - _ = require('underscore'); + safe = require('safetydance'); const SETTINGS_FIELDS = [ 'name', 'value' ].join(','); const SETTINGS_BLOB_FIELDS = [ 'name', 'valueBlob' ].join(','); const gDefaults = (function () { const result = { }; - result[exports.BACKUP_CONFIG_KEY] = { - provider: 'filesystem', - backupFolder: paths.DEFAULT_BACKUP_DIR, - format: 'tgz', - encryption: null, - }; - result[exports.BACKUP_POLICY_KEY] = { - retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, // 2 days - schedule: '00 00 23 * * *' // every day at 11pm - }; result[exports.REGISTRY_CONFIG_KEY] = { provider: 'noop' }; @@ -187,81 +168,6 @@ async function clear() { await database.query('DELETE FROM settings'); } -async function getBackupPolicy() { - const result = await get(exports.BACKUP_POLICY_KEY); - if (result === null) return gDefaults[exports.BACKUP_POLICY_KEY]; - return JSON.parse(result); -} - -async function setBackupPolicy(policy) { - assert.strictEqual(typeof policy, 'object'); - - const error = await backups.validatePolicy(policy); - if (error) throw error; - - await set(exports.BACKUP_POLICY_KEY, JSON.stringify(policy)); - notifyChange(exports.BACKUP_POLICY_KEY, policy); -} - -async function getBackupConfig() { - const value = await get(exports.BACKUP_CONFIG_KEY); - if (value === null) return gDefaults[exports.BACKUP_CONFIG_KEY]; - - const backupConfig = JSON.parse(value); // { provider, token, password, region, prefix, bucket } - return backupConfig; -} - -async function setBackupConfig(backupConfig) { - assert.strictEqual(typeof backupConfig, 'object'); - - const oldConfig = await getBackupConfig(); - - backups.injectPrivateFields(backupConfig, oldConfig); - - if (mounts.isManagedProvider(backupConfig.provider)) { - let error = mounts.validateMountOptions(backupConfig.provider, backupConfig.mountOptions); - if (error) throw error; - - [error] = await safe(mounts.tryAddMount(mounts.mountObjectFromBackupConfig(backupConfig), { timeout: 10 })); // 10 seconds - - if (error) { - if (mounts.isManagedProvider(oldConfig.provider)) { // put back the old mount configuration - debug('setBackupConfig: rolling back to previous mount configuration'); - - await safe(mounts.tryAddMount(mounts.mountObjectFromBackupConfig(oldConfig), { timeout: 10 })); - } - - throw error; - } - } - - const error = await backups.testConfig(backupConfig); - if (error) throw error; - - if ('password' in backupConfig) { // user set password - const error = await backups.validateEncryptionPassword(backupConfig.password); - if (error) throw error; - - backupConfig.encryption = backups.generateEncryptionKeysSync(backupConfig.password); - delete backupConfig.password; - } - - // if any of these changes, we have to clear the cache - if (!_.isEqual(_.omit(backupConfig, 'limits'), _.omit(oldConfig, 'limits'))) { - debug('setBackupConfig: clearing backup cache'); - backups.cleanupCacheFilesSync(); - } - - await set(exports.BACKUP_CONFIG_KEY, JSON.stringify(backupConfig)); - - if (mounts.isManagedProvider(oldConfig.provider) && !mounts.isManagedProvider(backupConfig.provider)) { - debug('setBackupConfig: removing old backup mount point'); - await safe(mounts.removeMount(mounts.mountObjectFromBackupConfig(oldConfig))); - } - - notifyChange(exports.BACKUP_CONFIG_KEY, backupConfig); -} - async function getRegistryConfig() { const value = await get(exports.REGISTRY_CONFIG_KEY); if (value === null) return gDefaults[exports.REGISTRY_CONFIG_KEY]; diff --git a/src/system.js b/src/system.js index 417307317..cd8559dc3 100644 --- a/src/system.js +++ b/src/system.js @@ -12,6 +12,7 @@ exports = module.exports = { const apps = require('./apps.js'), assert = require('assert'), + backups = require('./backups.js'), BoxError = require('./boxerror.js'), debug = require('debug')('box:disks'), df = require('./df.js'), @@ -21,7 +22,6 @@ const apps = require('./apps.js'), path = require('path'), paths = require('./paths.js'), safe = require('safetydance'), - settings = require('./settings.js'), shell = require('./shell.js'), volumes = require('./volumes.js'); @@ -108,7 +108,7 @@ async function getDisks() { disks[diskInfo.filesystem].contents.push(stdPath); } - const backupConfig = await settings.getBackupConfig(); + const backupConfig = await backups.getConfig(); if (backupConfig.provider === 'filesystem') { const [, dfResult] = await safe(df.file(backupConfig.backupFolder)); const filesystem = dfResult?.filesystem || rootDisk.filesystem; diff --git a/src/test/backupcleaner-test.js b/src/test/backupcleaner-test.js index 8fe4371e6..1563dcc53 100644 --- a/src/test/backupcleaner-test.js +++ b/src/test/backupcleaner-test.js @@ -216,7 +216,7 @@ describe('backup cleaner', function () { backupFolder: '/tmp/someplace', format: 'tgz' })); - await settings.setBackupPolicy({ retention: { keepWithinSecs: 1 }, schedule: '00 00 23 * * *' }); + await backups.setPolicy({ retention: { keepWithinSecs: 1 }, schedule: '00 00 23 * * *' }); }); async function cleanupBackups() { diff --git a/src/test/backups-test.js b/src/test/backups-test.js index a96f421e7..928195aff 100644 --- a/src/test/backups-test.js +++ b/src/test/backups-test.js @@ -48,66 +48,104 @@ describe('backups', function () { label: '' }; - it('add succeeds', async function () { - boxBackup.id = await backups.add(boxBackup); + describe('crud', function () { + it('add succeeds', async function () { + boxBackup.id = await backups.add(boxBackup); + }); + + it('fails with duplicate path', async function () { + const [error] = await safe(backups.add(boxBackup)); + expect(error.reason).to.be(BoxError.ALREADY_EXISTS); + }); + + it('get succeeds', async function () { + const result = await backups.get(boxBackup.id); + delete result.creationTime; + expect(result).to.eql(boxBackup); + }); + + it('get of unknown id fails', async function () { + const result = await backups.get('somerandom'); + expect(result).to.be(null); + }); + + it('getByTypePaged succeeds', async function () { + const results = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 5); + expect(results.length).to.be(1); + delete results[0].creationTime; + expect(results[0]).to.eql(boxBackup); + }); + + it('update succeeds', async function () { + await backups.update(boxBackup.id, { label: 'DuMonde', preserveSecs: 30 }); + const result = await backups.get(boxBackup.id); + expect(result.label).to.eql('DuMonde'); + expect(result.preserveSecs).to.eql(30); + }); + + it('delete succeeds', async function () { + await backups.del(boxBackup.id); + const result = await backups.get(boxBackup.id); + expect(result).to.be(null); + }); + + it('add app backup succeeds', async function () { + appBackup.id = await backups.add(appBackup); + }); + + it('get app backup succeeds', async function () { + const result = await backups.get(appBackup.id); + delete result.creationTime; + expect(result).to.eql(appBackup); + }); + + it('getByIdentifierAndStatePaged succeeds', async function () { + const results = await backups.getByIdentifierAndStatePaged(appBackup.identifier, backups.BACKUP_STATE_CREATING, 1, 5); + expect(results.length).to.be(1); + delete results[0].creationTime; + expect(results[0]).to.eql(appBackup); + }); + + it('delete app backup succeeds', async function () { + await backups.del(appBackup.id); + const result = await backups.get(appBackup.id); + expect(result).to.be(null); + }); }); - it('fails with duplicate path', async function () { - const [error] = await safe(backups.add(boxBackup)); - expect(error.reason).to.be(BoxError.ALREADY_EXISTS); - }); + describe('config and policy', function () { + it('can get backup config', async function () { + const backupConfig = await backups.getConfig(); + expect(backupConfig.provider).to.be('filesystem'); + expect(backupConfig.backupFolder).to.be('/var/backups'); + }); - it('get succeeds', async function () { - const result = await backups.get(boxBackup.id); - delete result.creationTime; - expect(result).to.eql(boxBackup); - }); + it('can set backup config', async function () { + let backupConfig = await backups.getConfig(); + backupConfig = Object.assign({}, backupConfig, { backupFolder: '/tmp/backups' }); + await backups.setConfig(backupConfig); - it('get of unknown id fails', async function () { - const result = await backups.get('somerandom'); - expect(result).to.be(null); - }); + const newBackupConfig = await backups.getConfig(); + expect(newBackupConfig.backupFolder).to.be('/tmp/backups'); + }); - it('getByTypePaged succeeds', async function () { - const results = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 5); - expect(results.length).to.be(1); - delete results[0].creationTime; - expect(results[0]).to.eql(boxBackup); - }); + it('cannot set backup policy with invalid schedule', async function () { + const [error] = await safe(backups.setPolicy({ schedule: '', retention: { keepWithinSecs: 1 }})); + expect(error.reason).to.be(BoxError.BAD_FIELD); + }); - it('update succeeds', async function () { - await backups.update(boxBackup.id, { label: 'DuMonde', preserveSecs: 30 }); - const result = await backups.get(boxBackup.id); - expect(result.label).to.eql('DuMonde'); - expect(result.preserveSecs).to.eql(30); - }); + it('cannot set backup policy with missing retention', async function () { + const [error] = await safe(backups.setPolicy({ schedule: '00 * * * * *'})); + expect(error.reason).to.be(BoxError.BAD_FIELD); + }); - it('delete succeeds', async function () { - await backups.del(boxBackup.id); - const result = await backups.get(boxBackup.id); - expect(result).to.be(null); - }); + it('cannot set backup policy with invalid retention', async function () { + const [error] = await safe(backups.setPolicy({ schedule: '00 * * * * *', retention: { keepWhenever: 4 }})); + expect(error.reason).to.be(BoxError.BAD_FIELD); + }); - it('add app backup succeeds', async function () { - appBackup.id = await backups.add(appBackup); - }); - - it('get app backup succeeds', async function () { - const result = await backups.get(appBackup.id); - delete result.creationTime; - expect(result).to.eql(appBackup); - }); - - it('getByIdentifierAndStatePaged succeeds', async function () { - const results = await backups.getByIdentifierAndStatePaged(appBackup.identifier, backups.BACKUP_STATE_CREATING, 1, 5); - expect(results.length).to.be(1); - delete results[0].creationTime; - expect(results[0]).to.eql(appBackup); - }); - - it('delete app backup succeeds', async function () { - await backups.del(appBackup.id); - const result = await backups.get(appBackup.id); - expect(result).to.be(null); + it('can set valid backup policy', async function () { + await backups.setPolicy({ schedule: '00 00 2,23 * * 0,1,2', retention: { keepWithinSecs: 1 }}); + }); }); }); diff --git a/src/test/backuptask-test.js b/src/test/backuptask-test.js index 95339eff8..b011729a1 100644 --- a/src/test/backuptask-test.js +++ b/src/test/backuptask-test.js @@ -12,7 +12,6 @@ const backups = require('../backups.js'), fs = require('fs'), os = require('os'), path = require('path'), - settings = require('../settings.js'), tasks = require('../tasks.js'), timers = require('timers/promises'); @@ -34,7 +33,7 @@ describe('backuptask', function () { before(async function () { fs.rmSync(backupConfig.backupFolder, { recursive: true, force: true }); - await settings.setBackupConfig(backupConfig); + await backups.setConfig(backupConfig); }); async function createBackup() { diff --git a/src/test/settings-test.js b/src/test/settings-test.js deleted file mode 100644 index fbb403e8f..000000000 --- a/src/test/settings-test.js +++ /dev/null @@ -1,53 +0,0 @@ -/* global it:false */ -/* global describe:false */ -/* global before:false */ -/* global after:false */ - -'use strict'; - -const common = require('./common.js'), - BoxError = require('../boxerror.js'), - expect = require('expect.js'), - settings = require('../settings.js'), - safe = require('safetydance'); - -describe('Settings', function () { - const { setup, cleanup } = common; - - before(setup); - after(cleanup); - - it('can get backup config', async function () { - const backupConfig = await settings.getBackupConfig(); - expect(backupConfig.provider).to.be('filesystem'); - expect(backupConfig.backupFolder).to.be('/var/backups'); - }); - - it('can set backup config', async function () { - let backupConfig = await settings.getBackupConfig(); - backupConfig = Object.assign({}, backupConfig, { backupFolder: '/tmp/backups' }); - await settings.setBackupConfig(backupConfig); - - const newBackupConfig = await settings.getBackupConfig(); - expect(newBackupConfig.backupFolder).to.be('/tmp/backups'); - }); - - it('cannot set backup policy with invalid schedule', async function () { - const [error] = await safe(settings.setBackupPolicy({ schedule: '', retention: { keepWithinSecs: 1 }})); - expect(error.reason).to.be(BoxError.BAD_FIELD); - }); - - it('cannot set backup policy with missing retention', async function () { - const [error] = await safe(settings.setBackupPolicy({ schedule: '00 * * * * *'})); - expect(error.reason).to.be(BoxError.BAD_FIELD); - }); - - it('cannot set backup policy with invalid retention', async function () { - const [error] = await safe(settings.setBackupPolicy({ schedule: '00 * * * * *', retention: { keepWhenever: 4 }})); - expect(error.reason).to.be(BoxError.BAD_FIELD); - }); - - it('can set valid backup policy', async function () { - await settings.setBackupPolicy({ schedule: '00 00 2,23 * * 0,1,2', retention: { keepWithinSecs: 1 }}); - }); -}); diff --git a/src/test/storage-test.js b/src/test/storage-test.js index 893cc4876..0ee4d2aaa 100644 --- a/src/test/storage-test.js +++ b/src/test/storage-test.js @@ -6,7 +6,8 @@ 'use strict'; -const BoxError = require('../boxerror.js'), +const backups = require('../backups.js'), + BoxError = require('../boxerror.js'), common = require('./common.js'), execSync = require('child_process').execSync, expect = require('expect.js'), @@ -19,8 +20,7 @@ const BoxError = require('../boxerror.js'), path = require('path'), readdirp = require('readdirp'), s3 = require('../storage/s3.js'), - safe = require('safetydance'), - settings = require('../settings.js'); + safe = require('safetydance'); const chunk = s3._chunk; @@ -55,12 +55,12 @@ describe('Storage', function () { it('fails to set backup config for bad folder', async function () { const tmp = Object.assign({}, gBackupConfig, { backupFolder: '/root/oof' }); - const [error] = await safe(settings.setBackupConfig(tmp)); + const [error] = await safe(backups.setConfig(tmp)); expect(error.reason).to.equal(BoxError.BAD_FIELD); }); it('succeeds to set backup config', async function () { - await settings.setBackupConfig(gBackupConfig); + await backups.setConfig(gBackupConfig); expect(fs.existsSync(path.join(gBackupConfig.backupFolder, 'snapshot'))).to.be(true); // auto-created }); diff --git a/src/updater.js b/src/updater.js index 65b78fe14..7b0c5c16c 100644 --- a/src/updater.js +++ b/src/updater.js @@ -11,6 +11,7 @@ exports = module.exports = { const apps = require('./apps.js'), assert = require('assert'), BoxError = require('./boxerror.js'), + backups = require('./backups.js'), backuptask = require('./backuptask.js'), constants = require('./constants.js'), cron = require('./cron.js'), @@ -219,14 +220,10 @@ async function updateToLatest(options, auditSource) { const error = locker.lock(locker.OP_BOX_UPDATE); if (error) throw new BoxError(BoxError.BAD_STATE, `Cannot update now: ${error.message}`); - const [getError, backupConfig] = await safe(settings.getBackupConfig()); - if (getError) throw getError; - + const backupConfig = await backups.getConfig(); const memoryLimit = backupConfig.limits?.memoryLimit ? Math.max(backupConfig.limits.memoryLimit/1024/1024, 400) : 400; - const [taskError, taskId] = await safe(tasks.add(tasks.TASK_UPDATE, [ boxUpdateInfo, options ])); - if (taskError) throw taskError; - + const taskId = await tasks.add(tasks.TASK_UPDATE, [ boxUpdateInfo, options ]); await eventlog.add(eventlog.ACTION_UPDATE, auditSource, { taskId, boxUpdateInfo }); tasks.startTask(taskId, { timeout: 20 * 60 * 60 * 1000 /* 20 hours */, nice: 15, memoryLimit }, async (error) => {