backup site: add contents

it is a json that can be one of the three:

* null - include everything
* include - only include these ids
* exclude - everything except these ids
This commit is contained in:
Girish Ramakrishnan
2025-09-22 13:27:26 +02:00
parent 0d5c1b99df
commit 4c3a8e1fd7
8 changed files with 146 additions and 20 deletions
+49 -16
View File
@@ -12,9 +12,10 @@ exports = module.exports = {
setLimits,
setSchedule,
setRetention,
setPrimary,
setEncryption,
setPrimary,
setName,
setContents,
removePrivateFields,
@@ -36,7 +37,6 @@ exports = module.exports = {
const assert = require('node:assert'),
backupFormats = require('./backupformats.js'),
backups = require('./backups.js'),
BoxError = require('./boxerror.js'),
constants = require('./constants.js'),
cron = require('./cron.js'),
@@ -62,7 +62,7 @@ const assert = require('node:assert'),
// filesystem - backupDir, noHardlinks
// mountpoint - mountPoint, prefix, noHardlinks
// encryption: 'encryptionPassword' and 'encryptedFilenames' is converted into an 'encryption' object using hush.js. Password is lost forever after conversion.
const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(',');
const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'contentsJson', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(',');
function storageApi(backupSite) {
assert.strictEqual(typeof backupSite, 'object');
@@ -119,6 +119,9 @@ function postProcess(result) {
result.primary = !!result.main; // primary is a reserved keyword in mysql
delete result.main;
result.contents = safe.JSON.parse(result.contentsJson) || null;
delete result.contentsJson;
return result;
}
@@ -143,6 +146,22 @@ function validateName(name) {
if (name.length > 100) return new BoxError(BoxError.BAD_FIELD, 'name too long');
}
function validateContents(contents) {
assert.strictEqual(typeof contents, 'object');
if (contents === null) return null;
if ('exclude' in contents) {
if (!Array.isArray(contents.exclude)) return new BoxError(BoxError.BAD_FIELD, 'exclude should be an array of strings');
if (!contents.exclude.every(item => typeof item === 'string')) return new BoxError(BoxError.BAD_FIELD, 'exclude should be an array of strings');
} else if ('include' in contents) {
if (!Array.isArray(contents.include)) return new BoxError(BoxError.BAD_FIELD, 'include should be an array of strings');
if (!contents.include.every(item => typeof item === 'string')) return new BoxError(BoxError.BAD_FIELD, 'include should be an array of strings');
}
return null;
}
function validateSchedule(schedule) {
assert.strictEqual(typeof schedule, 'string');
@@ -207,7 +226,7 @@ async function update(site, data) {
if (k === 'name' || k === 'schedule' || k === 'main') { // format, provider cannot be updated
fields.push(k + ' = ?');
args.push(data[k]);
} else if (k === 'config' || k === 'limits' || k === 'retention') { // encryption cannot be updated
} else if (k === 'config' || k === 'limits' || k === 'retention' || k === 'contents') { // encryption cannot be updated
fields.push(`${k}JSON = ?`);
args.push(JSON.stringify(data[k]));
}
@@ -308,6 +327,18 @@ async function setName(backupSite, name, auditSource) {
await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, name });
}
async function setContents(backupSite, contents, auditSource) {
assert.strictEqual(typeof backupSite, 'object');
assert.strictEqual(typeof contents, 'object');
assert.strictEqual(typeof auditSource, 'object');
const contentsError = validateContents(contents);
if (contentsError) throw contentsError;
await update(backupSite, { contents });
await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, contents });
}
async function del(backupSite, auditSource) {
assert.strictEqual(typeof backupSite, 'object');
assert.strictEqual(typeof auditSource, 'object');
@@ -326,7 +357,7 @@ async function del(backupSite, auditSource) {
if (error && error.code === 'ER_NO_REFERENCED_ROW_2') throw new BoxError(BoxError.NOT_FOUND, error);
if (error) throw error;
if (result[2].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
await eventlog.add(eventlog.ACTION_BACKUP_TARGET_REMOVE, auditSource, { backupSite: backupSite });
await eventlog.add(eventlog.ACTION_BACKUP_TARGET_REMOVE, auditSource, { backupSite });
backupSite.schedule = constants.CRON_PATTERN_NEVER;
await cron.handleBackupScheduleChanged(backupSite);
@@ -345,13 +376,12 @@ async function startBackupTask(site, auditSource) {
const taskId = await tasks.add(`${tasks.TASK_FULL_BACKUP_PREFIX}${site.id}`, [ site.id, { /* options */ } ]);
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId });
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId, siteId: site });
// background
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit, oomScoreAdjust: -999 })
.then(async (backupId) => {
const backup = await backups.get(backupId);
await eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, backupId, remotePath: backup.remotePath });
.then(async (result) => { // this can be the an array or string depending on site.contents
await eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, result, site });
})
.catch(async (error) => {
const timedOut = error.code === tasks.ETIMEOUT;
@@ -481,7 +511,7 @@ async function add(data, auditSource) {
if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode');
const { provider, name, config, format, retention, schedule } = data; // required
const { provider, name, config, format, contents, retention, schedule } = data; // required
const limits = data.limits || null,
encryptionPassword = data.encryptionPassword || null,
encryptedFilenames = data.encryptedFilenames || false,
@@ -493,6 +523,9 @@ async function add(data, auditSource) {
const nameError = validateName(name);
if (nameError) throw nameError;
const contentsError = validateContents(contents);
if (contentsError) throw contentsError;
let encryption = null;
if (encryptionPassword) {
const encryptionPasswordError = validateEncryptionPassword(encryptionPassword);
@@ -513,13 +546,13 @@ async function add(data, auditSource) {
debug('add: validating new storage configuration');
const sanitizedConfig = await storageApi({ provider }).verifyConfig({id, provider, config });
await database.query('INSERT INTO backupSites (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]);
await database.query('INSERT INTO backupSites (id, name, provider, configJson, contentsJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(contents), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format ]);
debug('add: setting up new storage configuration');
await storageApi({ provider }).setup(sanitizedConfig);
await eventlog.add(eventlog.ACTION_BACKUP_TARGET_ADD, auditSource, { id, name, provider, config, schedule, format });
await eventlog.add(eventlog.ACTION_BACKUP_TARGET_ADD, auditSource, { id, name, provider, config, contents, schedule, format });
return id;
}
@@ -534,10 +567,10 @@ async function addDefault(auditSource) {
config: { backupDir: paths.DEFAULT_BACKUP_DIR },
retention: { keepWithinSecs: 2 * 24 * 60 * 60 },
schedule: '00 00 23 * * *',
format: 'tgz'
format: 'tgz',
contents: null
};
defaultBackupSite.id = await add(defaultBackupSite, auditSource);
await setPrimary(defaultBackupSite, auditSource);
return await add(defaultBackupSite, auditSource);
}
// creates a backup site object that is not in the database
+21
View File
@@ -493,6 +493,24 @@ async function downloadMail(backupSite, remotePath, progressCallback) {
debug('downloadMail: time: %s', (new Date() - startTime)/1000);
}
function shouldBackup(backupSite, id) {
if (!backupSite.contents) return true;
const { include, exclude } = backupSite;
if (include && !include.includes(id)) {
debug(`fullBackup: skipped backup of ${id} since it is not included`);
return false;
}
if (exclude?.includes(id)) {
debug(`fullBackup: skipped backup of ${id} since it is not included`);
return false;
}
return true;
}
// this function is called from external process. calling process is expected to have a lock
async function fullBackup(backupSiteId, options, progressCallback) {
assert.strictEqual(typeof backupSiteId, 'string');
@@ -518,6 +536,7 @@ async function fullBackup(backupSiteId, options, progressCallback) {
debug(`fullBackup: skipped backup ${app.fqdn} (${i+1}/${allApps.length}) since automatic backup disabled`);
continue; // nothing to backup
}
if (!shouldBackup(backupSite, app.id)) continue;
progressCallback({ percent, message: `Backing up ${app.fqdn} (${i+1}/${allApps.length}). Waiting for lock` });
await locks.wait(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`);
@@ -529,6 +548,8 @@ async function fullBackup(backupSiteId, options, progressCallback) {
if (appBackupId) appBackupIds.push(appBackupId); // backupId can be null if in BAD_STATE and never backed up
}
if (!shouldBackup(backupSite, 'mail+platform')) return appBackupIds;
progressCallback({ percent, message: 'Backing up mail' });
percent += step;
const mailBackupId = await backupMailWithTag(backupSite, tag, options, (progress) => progressCallback({ percent, message: progress.message }));
+14 -1
View File
@@ -15,6 +15,7 @@ exports = module.exports = {
setRetention,
setPrimary,
setName,
setContents,
setEncryption,
createBackup,
@@ -65,11 +66,12 @@ async function list(req, res, next) {
async function add(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
const { name, format, provider, config, schedule, retention } = req.body;
const { name, format, provider, contents, config, schedule, retention } = req.body;
if (typeof format !== 'string') return next(new HttpError(400, 'format must be a string'));
if (typeof name !== 'string') return next(new HttpError(400, 'name must be a string'));
if (typeof provider !== 'string') return next(new HttpError(400, 'provider is required'));
if (typeof contents !== 'object') return next(new HttpError(400, 'contents is required'));
// provider specific options are validated by provider backends
if (!config || typeof config !== 'object') return next(new HttpError(400, 'config is required'));
@@ -183,6 +185,17 @@ async function setPrimary(req, res, next) {
next(new HttpSuccess(200, {}));
}
async function setContents(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (typeof req.body.contents !== 'object') return next(new HttpError(400, 'contents must be an object'));
const [error] = await safe(backupSites.setContents(req.resources.backupSite, req.body.contents, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
}
async function setEncryption(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
+56 -1
View File
@@ -19,7 +19,8 @@ describe('Backups API', function () {
config: { backupDir: '/tmp/boxtest-newsite' },
format: 'tgz',
retention: { keepWithinSecs: 60 * 60 },
schedule: '00 01 * * * *'
schedule: '00 01 * * * *',
contents: null
};
const encryptedSite = {
@@ -29,6 +30,7 @@ describe('Backups API', function () {
format: 'rsync',
retention: { keepMonthly: 60 },
schedule: '* 1 * * * *',
contents: { exclude: [ 'thatapp' ] }
};
describe('add', function () {
@@ -210,6 +212,59 @@ describe('Backups API', function () {
});
});
describe('contents', function () {
it('cannot set invalid contents', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/contents`)
.query({ access_token: owner.token })
.send({ contents: 'garbage' })
.ok(() => true);
expect(response.status).to.equal(400);
});
it('cannot set invalid include', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/contents`)
.query({ access_token: owner.token })
.send({ contents: { include: 'something' } })
.ok(() => true);
expect(response.status).to.equal(400);
});
it('can set include', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/contents`)
.query({ access_token: owner.token })
.send({ contents: { include: [ 'something' ]} })
.ok(() => true);
expect(response.status).to.equal(200);
const result = await backupSites.get(newSite.id);
expect(result.contents).to.eql({ include: [ 'something' ]});
});
it('can set exclude', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/contents`)
.query({ access_token: owner.token })
.send({ contents: { exclude: [ 'something' ]} })
.ok(() => true);
expect(response.status).to.equal(200);
const result = await backupSites.get(newSite.id);
expect(result.contents).to.eql({ exclude: [ 'something' ] });
});
it('can set null', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/contents`)
.query({ access_token: owner.token })
.send({ contents: null })
.ok(() => true);
expect(response.status).to.equal(200);
const result = await backupSites.get(newSite.id);
expect(result.contents).to.eql(null);
});
});
describe('primary', function () {
it('cannot set invalid id', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}xx/configure/primary`)
+1
View File
@@ -173,6 +173,7 @@ async function initializeExpressSync() {
router.post('/api/v1/backup_sites/:id/configure/retention', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setRetention);
router.post('/api/v1/backup_sites/:id/configure/primary', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setPrimary);
router.post('/api/v1/backup_sites/:id/configure/encryption', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setEncryption);
router.post('/api/v1/backup_sites/:id/configure/contents', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setContents);
// app archive routes
router.get ('/api/v1/archives', token, authorizeAdmin, routes.archives.list);
+2 -1
View File
@@ -29,7 +29,8 @@ describe('backups', function () {
config: { backupDir: '/tmp/boxtest2' },
format: 'rsync',
retention: { keepWithinSecs: 2 * 24 * 60 * 60 },
schedule: '00 00 23 * * *'
schedule: '00 00 23 * * *',
contents: null
};
const appBackup = {
+2 -1
View File
@@ -228,7 +228,8 @@ async function databaseSetup() {
config: { backupDir: '/tmp/boxtest' },
format: 'tgz',
retention: { keepWithinSecs: 2 * 24 * 60 * 60 },
schedule: '00 00 23 * * *'
schedule: '00 00 23 * * *',
contents: null
}, auditSource);
await backupSites.setPrimary({ id }, auditSource);
}