diff --git a/migrations/20250724102340-backupSites-create-table.js b/migrations/20250724102340-backupSites-create-table.js index e004bbb28..a832daeae 100644 --- a/migrations/20250724102340-backupSites-create-table.js +++ b/migrations/20250724102340-backupSites-create-table.js @@ -22,7 +22,7 @@ exports.up = async function (db) { 'integrityKeyPairJson TEXT,' + 'format VARCHAR(16) NOT NULL,' + 'schedule VARCHAR(128),' + - 'main BOOLEAN DEFAULT false,' + + 'enabledForUpdates BOOLEAN DEFAULT false,' + 'contentsJson TEXT,' + 'creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,' + 'ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,' + @@ -39,7 +39,7 @@ exports.up = async function (db) { return; } - const name = 'Default', main = true; + const name = 'Default', enabledForUpdates = true; let config = null, limits = null, encryption = null, format = null, provider = null; let retention = { keepWithinSecs: 2 * 24 * 60 * 60 }; let schedule = '00 00 23 * * *'; @@ -100,8 +100,8 @@ exports.up = async function (db) { child_process.execSync(`rm -f ${targetInfoDir}/*.cache`); await db.runSql('START TRANSACTION'); - await db.runSql('INSERT INTO backupSites (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ id, name, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]); + await db.runSql('INSERT INTO backupSites (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, enabledForUpdates) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ id, name, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, enabledForUpdates ]); await deleteOldSettings(db); await db.runSql('COMMIT'); }; diff --git a/migrations/schema.sql b/migrations/schema.sql index b0029644d..cb9a639ed 100644 --- a/migrations/schema.sql +++ b/migrations/schema.sql @@ -316,7 +316,7 @@ CREATE TABLE IF NOT EXISTS backupSites( format VARCHAR(16) NOT NULL, schedule VARCHAR(128), integrityKeyPairJson TEXT, // { publicKey, privateKey } - main BOOLEAN DEFAULT false, // 'primary' and 'default' are SQL keywords + enabledForUpdates BOOLEAN DEFAULT false, creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, diff --git a/src/apps.js b/src/apps.js index 784860fae..7457d38e2 100644 --- a/src/apps.js +++ b/src/apps.js @@ -1258,12 +1258,11 @@ async function scheduleTask(appId, installationState, taskId, auditSource) { assert.strictEqual(typeof taskId, 'string'); assert.strictEqual(typeof auditSource, 'object'); - const backupSite = await backupSites.getPrimary(); - let memoryLimit = 400; if (installationState === exports.ISTATE_PENDING_CLONE || installationState === exports.ISTATE_PENDING_RESTORE || installationState === exports.ISTATE_PENDING_IMPORT || installationState === exports.ISTATE_PENDING_UPDATE) { - memoryLimit = backupSite.limits?.memoryLimit ? Math.max(backupSite.limits.memoryLimit/1024/1024, 400) : 400; + const sites = await backupSites.listByContentForUpdates(appId); + memoryLimit = sites.reduce((acc, cur) => cur.limits?.memoryLimit ? Math.max(cur.limits.memoryLimit/1024/1024, acc) : acc, 400); } else if (installationState === exports.ISTATE_PENDING_DATA_DIR_MIGRATION) { memoryLimit = 1024; // cp takes more memory than we think } @@ -2390,17 +2389,16 @@ async function importApp(app, data, auditSource) { return { taskId }; } -async function exportApp(app, data, auditSource) { +async function exportApp(app, backupSiteId, auditSource) { assert.strictEqual(typeof app, 'object'); - assert.strictEqual(typeof data, 'object'); + assert.strictEqual(typeof backupSiteId, 'string'); // FIXME: this is not used at all in snapshotOnly mode assert.strictEqual(typeof auditSource, 'object'); const appId = app.id; if (!canBackupApp(app)) throw new BoxError(BoxError.BAD_STATE, 'App cannot be backed up in this state'); - const backupSite = await backupSites.getPrimary(); - const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ appId, backupSite.id, { snapshotOnly: true } ]); + const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ appId, backupSiteId, { snapshotOnly: true } ]); safe(tasks.startTask(taskId, {}), { debug }); // background return { taskId }; } @@ -2768,13 +2766,15 @@ function canBackupApp(app) { app.installationState === exports.ISTATE_PENDING_UPDATE; // called from apptask } -async function backup(app, auditSource) { +async function backup(app, backupSiteId, auditSource) { assert.strictEqual(typeof app, 'object'); + assert.strictEqual(typeof backupSiteId, 'string'); assert.strictEqual(typeof auditSource, 'object'); if (!canBackupApp(app)) throw new BoxError(BoxError.BAD_STATE, 'App cannot be backed up in this state'); - const backupSite = await backupSites.getPrimary(); + const backupSite = await backupSites.get(backupSiteId); + if (!backupSite) throw new BoxError(BoxError.BAD_FIELD, 'No such backup site'); const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ app.id, backupSite.id, { snapshotOnly: false } ]); diff --git a/src/apptask.js b/src/apptask.js index 9ec060599..be7bfec06 100644 --- a/src/apptask.js +++ b/src/apptask.js @@ -566,14 +566,18 @@ async function updateCommand(app, args, progressCallback) { if (!updateConfig.skipBackup) { await progressCallback({ percent: 15, message: 'Backing up app' }); - const backupSite = await backupSites.getPrimary(); - // preserve update backups for 3 weeks - const [error] = await safe(backuptask.backupApp(app, backupSite, { preserveSecs: 3*7*24*60*60 }, (progress) => { - progressCallback({ percent: 15, message: `Backup - ${progress.message}` }); - })); - if (error) { - error.backupError = true; - throw error; + const sites = await backupSites.listByContentForUpdates(app.id); + if (sites.length === 0) throw new BoxError(BoxError.BAD_STATE, 'App has no backup site for updates', { backupError: true }); + + for (const site of sites) { + // preserve update backups for 3 weeks + const [error] = await safe(backuptask.backupApp(app, site, { preserveSecs: 3*7*24*60*60 }, (progress) => { + progressCallback({ percent: 15, message: `Backup - ${progress.message}` }); + })); + if (error) { + error.backupError = true; + throw error; + } } } diff --git a/src/backupsites.js b/src/backupsites.js index 25e8e1ee9..3a86dbf5c 100644 --- a/src/backupsites.js +++ b/src/backupsites.js @@ -2,8 +2,8 @@ exports = module.exports = { get, - getPrimary, list, + listByContentForUpdates, add, addDefault, del, @@ -13,7 +13,7 @@ exports = module.exports = { setSchedule, setRetention, setEncryption, - setPrimary, + setEnabledForUpdates, setName, setContents, @@ -26,6 +26,8 @@ exports = module.exports = { getSnapshotInfo, setSnapshotInfo, + hasContent, + remount, getStatus, ensureMounted, @@ -62,7 +64,7 @@ const assert = require('node:assert'), // filesystem - backupDir, noHardlinks // mountpoint - mountPoint, prefix, noHardlinks // encryption: 'encryptionPassword' and 'encryptedFilenames' is converted into an 'encryption' object using hush.js. Password is lost forever after conversion. -const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'contentsJson', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(','); +const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'enabledForUpdates', 'contentsJson', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(','); function storageApi(backupSite) { assert.strictEqual(typeof backupSite, 'object'); @@ -115,8 +117,7 @@ function postProcess(result) { result.integrityKeyPair = result.integrityKeyPairJson ? safe.JSON.parse(result.integrityKeyPairJson) : null; delete result.integrityKeyPairJson; - result.primary = !!result.main; // primary is a reserved keyword in mysql - delete result.main; + result.enabledForUpdates = !!result.enabledForUpdates; result.contents = safe.JSON.parse(result.contentsJson) || null; delete result.contentsJson; @@ -203,14 +204,26 @@ async function list(page, perPage) { return results; } -async function get(id) { - const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE id=?`, [ id ]); - if (results.length === 0) return null; - return postProcess(results[0]); +function hasContent({ contents }, id) { + if (!contents) return true; + + if (contents.include && !contents.include.includes(id)) return false; + if (contents.exclude?.includes(id)) return false; + + return true; } -async function getPrimary() { - const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE main=?`, [ true ]); +async function listByContentForUpdates(id) { + assert.strictEqual(typeof id, 'string'); + + const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE enabledByUpdates=?`, [ true ]); + results.forEach(function (result) { postProcess(result); }); + + return results.filter(r => hasContent(r, id)); +} + +async function get(id) { + const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE id=?`, [ id ]); if (results.length === 0) return null; return postProcess(results[0]); } @@ -222,7 +235,7 @@ async function update(site, data) { const args = []; const fields = []; for (const k in data) { - if (k === 'name' || k === 'schedule' || k === 'main') { // format, provider cannot be updated + if (k === 'name' || k === 'schedule' || k === 'enabledForUpdates') { // format, provider cannot be updated fields.push(k + ' = ?'); args.push(data[k]); } else if (k === 'config' || k === 'limits' || k === 'retention' || k === 'contents') { // encryption cannot be updated @@ -271,21 +284,13 @@ async function setRetention(backupSite, retention, auditSource) { await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, retention }); } -async function setPrimary(backupSite, auditSource) { +async function setEnabledForUpdates(backupSite, enabledForUpdates, auditSource) { assert.strictEqual(typeof backupSite, 'object'); + assert.strictEqual(typeof enabledForUpdates, 'boolean'); assert.strictEqual(typeof auditSource, 'object'); - const queries = [ - { query: 'SELECT 1 FROM backupSites WHERE id=? FOR UPDATE', args: [ backupSite.id ] }, // ensure this exists! - { query: 'UPDATE backupSites SET main=?', args: [ false ] }, - { query: 'UPDATE backupSites SET main=? WHERE id=?', args: [ true, backupSite.id ] } - ]; - - const [error, result] = await safe(database.transaction(queries)); - if (error) throw error; - if (result[2].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found'); - - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, primary: true }); + await update(backupSite, { enabledForUpdates }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, enabledForUpdates }); } async function setEncryption(backupSite, data, auditSource) { @@ -349,7 +354,6 @@ async function del(backupSite, auditSource) { const queries = [ { query: 'DELETE FROM archives WHERE backupId IN (SELECT id FROM backups WHERE siteId=?)', args: [ backupSite.id ] }, { query: 'DELETE FROM backups WHERE siteId=?', args: [ backupSite.id ] }, - { query: 'DELETE FROM backupSites WHERE id=? AND main=?', args: [ backupSite.id, false ] }, // cannot delete primary ]; const [error, result] = await safe(database.transaction(queries)); diff --git a/src/backuptask.js b/src/backuptask.js index b563870ec..c0005d13b 100644 --- a/src/backuptask.js +++ b/src/backuptask.js @@ -493,24 +493,6 @@ async function downloadMail(backupSite, remotePath, progressCallback) { debug('downloadMail: time: %s', (new Date() - startTime)/1000); } -function shouldBackup(backupSite, id) { - if (!backupSite.contents) return true; - - const { include, exclude } = backupSite; - - if (include && !include.includes(id)) { - debug(`fullBackup: skipped backup of ${id} since it is not included`); - return false; - } - - if (exclude?.includes(id)) { - debug(`fullBackup: skipped backup of ${id} since it is not included`); - return false; - } - - return true; -} - // this function is called from external process. calling process is expected to have a lock async function fullBackup(backupSiteId, options, progressCallback) { assert.strictEqual(typeof backupSiteId, 'string'); @@ -536,7 +518,10 @@ async function fullBackup(backupSiteId, options, progressCallback) { debug(`fullBackup: skipped backup ${app.fqdn} (${i+1}/${allApps.length}) since automatic backup disabled`); continue; // nothing to backup } - if (!shouldBackup(backupSite, app.id)) continue; + if (!backupSites.hasContent(backupSite, app.id)) { + debug(`fullBackup: skipped backup ${app.fqdn} (${i+1}/${allApps.length}) as it is not in site contents`); + continue; + } progressCallback({ percent, message: `Backing up ${app.fqdn} (${i+1}/${allApps.length}). Waiting for lock` }); await locks.wait(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`); @@ -548,7 +533,7 @@ async function fullBackup(backupSiteId, options, progressCallback) { if (appBackupId) appBackupIds.push(appBackupId); // backupId can be null if in BAD_STATE and never backed up } - if (!shouldBackup(backupSite, 'mail+platform')) return appBackupIds; + if (!backupSites.hasContent(backupSite, 'box')) return appBackupIds; progressCallback({ percent, message: 'Backing up mail' }); percent += step; diff --git a/src/routes/apps.js b/src/routes/apps.js index 6200fd7d6..358f7e966 100644 --- a/src/routes/apps.js +++ b/src/routes/apps.js @@ -591,7 +591,9 @@ async function exportApp(req, res, next) { assert.strictEqual(typeof req.body, 'object'); assert.strictEqual(typeof req.resources.app, 'object'); - const [error, result] = await safe(apps.exportApp(req.resources.app, {}, AuditSource.fromRequest(req))); + if (typeof req.body.backupSiteId !== 'string') return next(new HttpError(400, 'backupSiteId must be a string')); + + const [error, result] = await safe(apps.exportApp(req.resources.app, req.body.backupSiteId, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(202, { taskId: result.taskId })); @@ -625,7 +627,9 @@ async function clone(req, res, next) { async function backup(req, res, next) { assert.strictEqual(typeof req.resources.app, 'object'); - const [error, result] = await safe(apps.backup(req.resources.app, AuditSource.fromRequest(req))); + if (typeof req.body.backupSiteId !== 'string') return next(new HttpError(400, 'backupSiteId must be a string')); + + const [error, result] = await safe(apps.backup(req.resources.app, req.body.backupSiteId, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(202, { taskId: result.taskId })); diff --git a/src/routes/backupsites.js b/src/routes/backupsites.js index 79aecb76b..b98265089 100644 --- a/src/routes/backupsites.js +++ b/src/routes/backupsites.js @@ -13,7 +13,7 @@ exports = module.exports = { setLimits, setSchedule, setRetention, - setPrimary, + setEnabledForUpdates, setName, setContents, setEncryption, @@ -176,10 +176,12 @@ async function setRetention(req, res, next) { next(new HttpSuccess(200, {})); } -async function setPrimary(req, res, next) { +async function setEnabledForUpdates(req, res, next) { assert.strictEqual(typeof req.body, 'object'); - const [error] = await safe(backupSites.setPrimary(req.resources.backupSite, AuditSource.fromRequest(req))); + if (typeof req.body.enable !== 'boolean') return next(new HttpError(400, 'enable is required')); + + const [error] = await safe(backupSites.setEnabledForUpdates(req.resources.backupSite, req.body.enable, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); diff --git a/src/server.js b/src/server.js index 143b72b67..b927dceae 100644 --- a/src/server.js +++ b/src/server.js @@ -171,9 +171,9 @@ async function initializeExpressSync() { router.post('/api/v1/backup_sites/:id/configure/limits', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setLimits); router.post('/api/v1/backup_sites/:id/configure/schedule', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setSchedule); router.post('/api/v1/backup_sites/:id/configure/retention', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setRetention); - router.post('/api/v1/backup_sites/:id/configure/primary', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setPrimary); router.post('/api/v1/backup_sites/:id/configure/encryption', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setEncryption); router.post('/api/v1/backup_sites/:id/configure/contents', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setContents); + router.post('/api/v1/backup_sites/:id/configure/enable_for_updates', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setEnabledForUpdates); // app archive routes router.get ('/api/v1/archives', token, authorizeAdmin, routes.archives.list); diff --git a/src/test/common.js b/src/test/common.js index 683fa6dad..65114e19b 100644 --- a/src/test/common.js +++ b/src/test/common.js @@ -222,16 +222,7 @@ async function databaseSetup() { await dashboard._setLocation(constants.DASHBOARD_SUBDOMAIN, exports.dashboardDomain); // duplicated here since we clear the database - const id = await backupSites.add({ - provider: 'filesystem', - name: 'Default', - config: { backupDir: '/tmp/boxtest' }, - format: 'tgz', - retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, - schedule: '00 00 23 * * *', - contents: null - }, auditSource); - await backupSites.setPrimary({ id }, auditSource); + await backupSites.addDefault(auditSource); } async function domainSetup() { diff --git a/src/updater.js b/src/updater.js index 119d09620..6e4a0fb73 100644 --- a/src/updater.js +++ b/src/updater.js @@ -176,10 +176,12 @@ async function updateBox(boxUpdateInfo, options, progressCallback) { if (!options.skipBackup) { progressCallback({ percent: 10, message: 'Backing up' }); - const site = await backupSites.getPrimary(); - if (!site) throw new BoxError(BoxError.BAD_STATE, 'no default backup site'); + const sites = await backupSites.listByContentForUpdates('box'); + if (sites.length === 0) throw new BoxError(BoxError.BAD_STATE, 'no backup site for update'); - await backuptask.fullBackup(site.id, { preserveSecs: 3*7*24*60*60 }, (progress) => progressCallback({ percent: 10+progress.percent*70/100, message: progress.message })); + for (const site of sites) { + await backuptask.fullBackup(site.id, { preserveSecs: 3*7*24*60*60 }, (progress) => progressCallback({ percent: 10+progress.percent*70/100, message: progress.message })); + } await checkFreeDiskSpace(2*1024*1024*1024); // check again in case backup is in same disk } @@ -223,11 +225,13 @@ async function startBoxUpdateTask(options, auditSource) { await checkBoxUpdateRequirements(boxUpdateInfo); + const sites = await backupSites.listByContentForUpdates('box'); + if (sites.length === 0) throw new BoxError(BoxError.BAD_STATE, 'No backup site for update'); + const [error] = await safe(locks.acquire(locks.TYPE_BOX_UPDATE_TASK)); if (error) throw new BoxError(BoxError.BAD_STATE, `Another update task is in progress: ${error.message}`); - const backupSite = await backupSites.getPrimary(); - const memoryLimit = backupSite.limits?.memoryLimit ? Math.max(backupSite.limits.memoryLimit/1024/1024, 400) : 400; + const memoryLimit = sites.reduce((acc, cur) => cur.limits?.memoryLimit ? Math.max(cur.limits.memoryLimit/1024/1024, acc) : acc, 400); const taskId = await tasks.add(tasks.TASK_BOX_UPDATE, [ boxUpdateInfo, options ]); await eventlog.add(eventlog.ACTION_UPDATE, auditSource, { taskId, boxUpdateInfo }); @@ -288,6 +292,12 @@ async function autoUpdate(auditSource) { continue; } + const sites = await backupSites.listByContentForUpdates(app.id); + if (sites.length === 0) { + debug(`autoUpdate: ${app.fqdn} has no backup site for updates. skipping`); + continue; + } + const data = { manifest: app.updateInfo.manifest, force: false