599 lines
25 KiB
JavaScript
599 lines
25 KiB
JavaScript
'use strict';
|
|
|
|
exports = module.exports = {
|
|
get,
|
|
list,
|
|
listByContentForUpdates,
|
|
add,
|
|
addDefault,
|
|
del,
|
|
|
|
setConfig,
|
|
setLimits,
|
|
setSchedule,
|
|
setRetention,
|
|
setEncryption,
|
|
setEnabledForUpdates,
|
|
setName,
|
|
setContents,
|
|
|
|
removePrivateFields,
|
|
|
|
startBackupTask,
|
|
|
|
startCleanupTask,
|
|
|
|
getSnapshotInfo,
|
|
setSnapshotInfo,
|
|
|
|
hasContent,
|
|
|
|
remount,
|
|
getStatus,
|
|
ensureMounted,
|
|
|
|
storageApi,
|
|
|
|
createPseudo,
|
|
|
|
reinitAll
|
|
};
|
|
|
|
const assert = require('node:assert'),
|
|
backupFormats = require('./backupformats.js'),
|
|
BoxError = require('./boxerror.js'),
|
|
constants = require('./constants.js'),
|
|
cron = require('./cron.js'),
|
|
{ CronTime } = require('cron'),
|
|
crypto = require('node:crypto'),
|
|
database = require('./database.js'),
|
|
debug = require('debug')('box:backups'),
|
|
eventlog = require('./eventlog.js'),
|
|
hush = require('./hush.js'),
|
|
locks = require('./locks.js'),
|
|
path = require('node:path'),
|
|
paths = require('./paths.js'),
|
|
safe = require('safetydance'),
|
|
tasks = require('./tasks.js');
|
|
|
|
// format: rsync or tgz
|
|
// provider: used to determine the api provider
|
|
// config: depends on the 'provider' field. 'provider' is not stored in config object. but it is injected when calling the api backends
|
|
// s3 providers - accessKeyId, secretAccessKey, bucket, prefix etc . see s3.js
|
|
// gcs - bucket, prefix, projectId, credentials . see gcs.js
|
|
// ext4/xfs/disk (managed providers) - mountOptions (diskPath), prefix, noHardlinks. disk is legacy.
|
|
// nfs/cifs/sshfs (managed providers) - mountOptions (host/username/password/seal/privateKey etc), prefix, noHardlinks
|
|
// filesystem - backupDir, noHardlinks
|
|
// mountpoint - mountPoint, prefix, noHardlinks
|
|
// encryption: 'encryptionPassword' and 'encryptedFilenames' is converted into an 'encryption' object using hush.js. Password is lost forever after conversion.
|
|
const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'enableForUpdates', 'contentsJson', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(',');
|
|
|
|
function storageApi(backupSite) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
|
|
switch (backupSite.provider) {
|
|
case 'nfs': return require('./storage/filesystem.js');
|
|
case 'cifs': return require('./storage/filesystem.js');
|
|
case 'sshfs': return require('./storage/filesystem.js');
|
|
case 'mountpoint': return require('./storage/filesystem.js');
|
|
case 'disk': return require('./storage/filesystem.js');
|
|
case 'ext4': return require('./storage/filesystem.js');
|
|
case 'xfs': return require('./storage/filesystem.js');
|
|
case 's3': return require('./storage/s3.js');
|
|
case 'gcs': return require('./storage/gcs.js');
|
|
case 'filesystem': return require('./storage/filesystem.js');
|
|
case 'minio': return require('./storage/s3.js');
|
|
case 's3-v4-compat': return require('./storage/s3.js');
|
|
case 'digitalocean-spaces': return require('./storage/s3.js');
|
|
case 'exoscale-sos': return require('./storage/s3.js');
|
|
case 'wasabi': return require('./storage/s3.js');
|
|
case 'scaleway-objectstorage': return require('./storage/s3.js');
|
|
case 'backblaze-b2': return require('./storage/s3.js');
|
|
case 'cloudflare-r2': return require('./storage/s3.js');
|
|
case 'linode-objectstorage': return require('./storage/s3.js');
|
|
case 'ovh-objectstorage': return require('./storage/s3.js');
|
|
case 'ionos-objectstorage': return require('./storage/s3.js');
|
|
case 'idrive-e2': return require('./storage/s3.js');
|
|
case 'vultr-objectstorage': return require('./storage/s3.js');
|
|
case 'upcloud-objectstorage': return require('./storage/s3.js');
|
|
case 'contabo-objectstorage': return require('./storage/s3.js');
|
|
case 'hetzner-objectstorage': return require('./storage/s3.js');
|
|
default: throw new BoxError(BoxError.BAD_FIELD, `Unknown provider: ${backupSite.provider}`);
|
|
}
|
|
}
|
|
|
|
function postProcess(result) {
|
|
assert.strictEqual(typeof result, 'object');
|
|
|
|
result.config = result.configJson ? safe.JSON.parse(result.configJson) : {};
|
|
delete result.configJson;
|
|
|
|
result.limits = safe.JSON.parse(result.limitsJson) || {};
|
|
delete result.limitsJson;
|
|
|
|
result.retention = safe.JSON.parse(result.retentionJson) || {};
|
|
delete result.retentionJson;
|
|
|
|
result.encryption = result.encryptionJson ? safe.JSON.parse(result.encryptionJson) : null;
|
|
delete result.encryptionJson;
|
|
|
|
result.integrityKeyPair = result.integrityKeyPairJson ? safe.JSON.parse(result.integrityKeyPairJson) : null;
|
|
delete result.integrityKeyPairJson;
|
|
|
|
result.enableForUpdates = !!result.enableForUpdates;
|
|
|
|
result.contents = safe.JSON.parse(result.contentsJson) || null;
|
|
delete result.contentsJson;
|
|
|
|
return result;
|
|
}
|
|
|
|
function removePrivateFields(site) {
|
|
assert.strictEqual(typeof site, 'object');
|
|
|
|
site.encrypted = site.encryption !== null;
|
|
site.encryptedFilenames = site.encryption?.encryptedFilenames || false;
|
|
site.encryptionPasswordHint = site.encryption?.encryptionPasswordHint || null;
|
|
delete site.encryption;
|
|
|
|
delete site.integrityKeyPair.privateKey;
|
|
|
|
site.config = storageApi(site).removePrivateFields(site.config);
|
|
return site;
|
|
}
|
|
|
|
function validateName(name) {
|
|
assert.strictEqual(typeof name, 'string');
|
|
|
|
if (name.length === 0) return new BoxError(BoxError.BAD_FIELD, 'name cannot be empty');
|
|
if (name.length > 100) return new BoxError(BoxError.BAD_FIELD, 'name too long');
|
|
}
|
|
|
|
function validateContents(contents) {
|
|
assert.strictEqual(typeof contents, 'object');
|
|
|
|
// if you change the structure of contents, look into app.js:del as well
|
|
if (contents === null) return null;
|
|
|
|
if ('exclude' in contents) {
|
|
if (!Array.isArray(contents.exclude)) return new BoxError(BoxError.BAD_FIELD, 'exclude should be an array of strings');
|
|
if (!contents.exclude.every(item => typeof item === 'string')) return new BoxError(BoxError.BAD_FIELD, 'exclude should be an array of strings');
|
|
} else if ('include' in contents) {
|
|
if (!Array.isArray(contents.include)) return new BoxError(BoxError.BAD_FIELD, 'include should be an array of strings');
|
|
if (!contents.include.every(item => typeof item === 'string')) return new BoxError(BoxError.BAD_FIELD, 'include should be an array of strings');
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
function validateSchedule(schedule) {
|
|
assert.strictEqual(typeof schedule, 'string');
|
|
|
|
if (schedule === constants.CRON_PATTERN_NEVER) return null;
|
|
|
|
const job = safe.safeCall(function () { return new CronTime(schedule); });
|
|
if (!job) return new BoxError(BoxError.BAD_FIELD, 'Invalid schedule pattern');
|
|
|
|
return null;
|
|
}
|
|
|
|
function validateRetention(retention) {
|
|
assert.strictEqual(typeof retention, 'object');
|
|
|
|
if (!retention) return new BoxError(BoxError.BAD_FIELD, 'retention is required');
|
|
if (!['keepWithinSecs','keepDaily','keepWeekly','keepMonthly','keepYearly'].find(k => !!retention[k])) return new BoxError(BoxError.BAD_FIELD, 'retention properties missing');
|
|
if ('keepWithinSecs' in retention && typeof retention.keepWithinSecs !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepWithinSecs must be a number');
|
|
if ('keepDaily' in retention && typeof retention.keepDaily !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepDaily must be a number');
|
|
if ('keepWeekly' in retention && typeof retention.keepWeekly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepWeekly must be a number');
|
|
if ('keepMonthly' in retention && typeof retention.keepMonthly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepMonthly must be a number');
|
|
if ('keepYearly' in retention && typeof retention.keepYearly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepYearly must be a number');
|
|
|
|
return null;
|
|
}
|
|
|
|
function validateEncryptionPassword(password) {
|
|
assert.strictEqual(typeof password, 'string');
|
|
|
|
if (password.length < 8) return new BoxError(BoxError.BAD_FIELD, 'password must be atleast 8 characters');
|
|
}
|
|
|
|
async function list() {
|
|
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites ORDER BY name DESC`, []);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
function hasContent({ contents }, id) {
|
|
// if you change the structure of contents, look into app.js:del as well
|
|
if (!contents) return true;
|
|
|
|
if (contents.include && !contents.include.includes(id)) return false;
|
|
if (contents.exclude?.includes(id)) return false;
|
|
|
|
return true;
|
|
}
|
|
|
|
async function listByContentForUpdates(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE enableForUpdates=?`, [ true ]);
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results.filter(r => hasContent(r, id));
|
|
}
|
|
|
|
async function get(id) {
|
|
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE id=?`, [ id ]);
|
|
if (results.length === 0) return null;
|
|
return postProcess(results[0]);
|
|
}
|
|
|
|
async function update(site, data) {
|
|
assert.strictEqual(typeof site, 'object');
|
|
assert(data && typeof data === 'object');
|
|
|
|
const args = [];
|
|
const fields = [];
|
|
for (const k in data) {
|
|
if (k === 'name' || k === 'schedule' || k === 'enableForUpdates') { // format, provider cannot be updated
|
|
fields.push(k + ' = ?');
|
|
args.push(data[k]);
|
|
} else if (k === 'config' || k === 'limits' || k === 'retention' || k === 'contents') { // encryption cannot be updated
|
|
fields.push(`${k}JSON = ?`);
|
|
args.push(JSON.stringify(data[k]));
|
|
}
|
|
}
|
|
args.push(site.id);
|
|
|
|
const [updateError, result] = await safe(database.query('UPDATE backupSites SET ' + fields.join(', ') + ' WHERE id = ?', args));
|
|
if (updateError) throw updateError;
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
|
|
}
|
|
|
|
async function setSchedule(backupSite, schedule, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof schedule, 'string');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const error = validateSchedule(schedule);
|
|
if (error) throw error;
|
|
|
|
await update(backupSite, { schedule });
|
|
await cron.handleBackupScheduleChanged(Object.assign({}, backupSite, { schedule }));
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, schedule });
|
|
}
|
|
|
|
async function setLimits(backupSite, limits, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof limits, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
await update(backupSite, { limits });
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, limits });
|
|
}
|
|
|
|
async function setRetention(backupSite, retention, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof retention, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const error = validateRetention(retention);
|
|
if (error) throw error;
|
|
|
|
await update(backupSite, { retention });
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, retention });
|
|
}
|
|
|
|
async function setEnabledForUpdates(backupSite, enableForUpdates, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof enableForUpdates, 'boolean');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
await update(backupSite, { enableForUpdates });
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, enableForUpdates });
|
|
}
|
|
|
|
async function setEncryption(backupSite, data, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof data, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
let encryption = null;
|
|
if (data.encryptionPassword) {
|
|
const encryptionPasswordError = validateEncryptionPassword(data.encryptionPassword);
|
|
if (encryptionPasswordError) throw encryptionPasswordError;
|
|
if (data.encryptionPassword === data.encryptionPasswordHint) throw new BoxError(BoxError.BAD_FIELD, 'password hint cannot be the same as password');
|
|
encryption = hush.generateEncryptionKeysSync(data.encryptionPassword);
|
|
encryption.encryptedFilenames = !!data.encryptedFilenames;
|
|
encryption.encryptionPasswordHint = data.encryptionPasswordHint || '';
|
|
}
|
|
|
|
const queries = [
|
|
{ query: 'DELETE FROM backups WHERE siteId=?', args: [ backupSite.id ] },
|
|
{ query: 'UPDATE backupSites SET encryptionJson=? WHERE id=?', args: [ encryption ? JSON.stringify(encryption) : null, backupSite.id ] },
|
|
];
|
|
|
|
const [error, result] = await safe(database.transaction(queries));
|
|
if (error) throw error;
|
|
if (result[1].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
|
|
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, encryption: !!encryption });
|
|
}
|
|
|
|
async function setName(backupSite, name, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof name, 'string');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const nameError = validateName(name);
|
|
if (nameError) throw nameError;
|
|
|
|
await update(backupSite, { name });
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name, oldName: backupSite.name });
|
|
}
|
|
|
|
async function setContents(backupSite, contents, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof contents, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const contentsError = validateContents(contents);
|
|
if (contentsError) throw contentsError;
|
|
|
|
await update(backupSite, { contents });
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, contents });
|
|
}
|
|
|
|
async function del(backupSite, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
await safe(storageApi(backupSite).teardown(backupSite.config), { debug }); // ignore error
|
|
|
|
const queries = [
|
|
{ query: 'DELETE FROM archives WHERE backupId IN (SELECT id FROM backups WHERE siteId=?)', args: [ backupSite.id ] },
|
|
{ query: 'DELETE FROM backups WHERE siteId=?', args: [ backupSite.id ] },
|
|
{ query: 'DELETE FROM backupSites WHERE id=?', args: [ backupSite.id ] }
|
|
];
|
|
|
|
const [error, result] = await safe(database.transaction(queries));
|
|
if (error && error.sqlCode === 'ER_NO_REFERENCED_ROW_2') throw new BoxError(BoxError.NOT_FOUND, error);
|
|
if (error) throw error;
|
|
if (result[2].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_REMOVE, auditSource, { name: backupSite.name, backupSite: removePrivateFields(backupSite) });
|
|
|
|
backupSite.schedule = constants.CRON_PATTERN_NEVER;
|
|
await cron.handleBackupScheduleChanged(backupSite);
|
|
|
|
const infoDir = path.join(paths.BACKUP_INFO_DIR, backupSite.id);
|
|
safe.fs.rmSync(infoDir, { recursive: true });
|
|
}
|
|
|
|
async function startBackupTask(site, auditSource) {
|
|
assert.strictEqual(typeof site, 'object');
|
|
|
|
const [error] = await safe(locks.acquire(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${site.id}`));
|
|
if (error) throw new BoxError(BoxError.BAD_STATE, `Another backup task is in progress: ${error.message}`);
|
|
|
|
const memoryLimit = site.limits?.memoryLimit ? Math.max(site.limits.memoryLimit/1024/1024, 1024) : 1024;
|
|
|
|
const taskId = await tasks.add(`${tasks.TASK_FULL_BACKUP_PREFIX}${site.id}`, [ site.id, { /* options */ } ]);
|
|
|
|
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId, siteId: site.id, siteName: site.name });
|
|
|
|
// background
|
|
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit, oomScoreAdjust: -999 })
|
|
.then(async (result) => { // this can be the an array or string depending on site.contents
|
|
await eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, result, siteId: site.id, siteName: site.name });
|
|
})
|
|
.catch(async (error) => {
|
|
const timedOut = error.code === tasks.ETIMEOUT;
|
|
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage: error.message, timedOut, siteId: site.id, siteName: site.name }));
|
|
})
|
|
.finally(async () => {
|
|
await locks.release(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${site.id}`);
|
|
await locks.releaseByTaskId(taskId);
|
|
});
|
|
|
|
return taskId;
|
|
}
|
|
|
|
// keeps track of contents of the snapshot directory. this provides a way to clean up backups of uninstalled apps
|
|
async function getSnapshotInfo(backupSite) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
|
|
const snapshotFilePath = path.join(paths.BACKUP_INFO_DIR, backupSite.id, constants.SNAPSHOT_INFO_FILENAME);
|
|
const contents = safe.fs.readFileSync(snapshotFilePath, 'utf8');
|
|
const info = safe.JSON.parse(contents);
|
|
return info || {};
|
|
}
|
|
|
|
// keeps track of contents of the snapshot directory. this provides a way to clean up backups of uninstalled apps
|
|
async function setSnapshotInfo(backupSite, id, info) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof id, 'string'); // 'box', 'mail' or appId
|
|
assert.strictEqual(typeof info, 'object');
|
|
|
|
const infoDir = path.join(paths.BACKUP_INFO_DIR, backupSite.id);
|
|
const snapshotFilePath = path.join(infoDir, constants.SNAPSHOT_INFO_FILENAME);
|
|
const contents = safe.fs.readFileSync(snapshotFilePath, 'utf8');
|
|
const data = safe.JSON.parse(contents) || {};
|
|
if (info) data[id] = info; else delete data[id];
|
|
if (!safe.fs.writeFileSync(snapshotFilePath, JSON.stringify(data, null, 4), 'utf8')) {
|
|
throw new BoxError(BoxError.FS_ERROR, safe.error.message);
|
|
}
|
|
|
|
if (!info) { // unlink the cache files
|
|
safe.fs.unlinkSync(path.join(infoDir, `${id}.sync.cache`));
|
|
safe.fs.unlinkSync(path.join(infoDir, `${id}.sync.cache.new`));
|
|
}
|
|
}
|
|
|
|
async function startCleanupTask(backupSite, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const taskId = await tasks.add(`${tasks.TASK_CLEAN_BACKUPS_PREFIX}${backupSite.id}`, [ backupSite.id ]);
|
|
|
|
// background
|
|
tasks.startTask(taskId, {})
|
|
.then(async (result) => { // { removedBoxBackupPaths, removedAppBackupPaths, removedMailBackupPaths, missingBackupPaths }
|
|
await eventlog.add(eventlog.ACTION_BACKUP_CLEANUP_FINISH, auditSource, { taskId, errorMessage: null, ...result });
|
|
})
|
|
.catch(async (error) => {
|
|
await eventlog.add(eventlog.ACTION_BACKUP_CLEANUP_FINISH, auditSource, { taskId, errorMessage: error.message });
|
|
});
|
|
|
|
return taskId;
|
|
}
|
|
|
|
async function remount(site) {
|
|
assert.strictEqual(typeof site, 'object');
|
|
|
|
await storageApi(site).setup(site.config);
|
|
}
|
|
|
|
async function getStatus(site) {
|
|
assert.strictEqual(typeof site, 'object');
|
|
|
|
return await storageApi(site).getStatus(site.config); // { state, message }
|
|
}
|
|
|
|
async function ensureMounted(site) {
|
|
assert.strictEqual(typeof site, 'object');
|
|
|
|
const status = await getStatus(site);
|
|
if (status.state === 'active') return status;
|
|
|
|
await remount(site);
|
|
return await getStatus(site);
|
|
}
|
|
|
|
async function setConfig(backupSite, newConfig, auditSource) {
|
|
assert.strictEqual(typeof backupSite, 'object');
|
|
assert.strictEqual(typeof newConfig, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode');
|
|
|
|
const oldConfig = backupSite.config;
|
|
|
|
newConfig = structuredClone(newConfig); // make a copy
|
|
storageApi(backupSite).injectPrivateFields(newConfig, oldConfig);
|
|
|
|
debug('setConfig: validating new storage configuration');
|
|
const sanitizedConfig = await storageApi(backupSite).verifyConfig({ id: backupSite.id, provider: backupSite.provider, config: newConfig });
|
|
|
|
await update(backupSite, { config: sanitizedConfig });
|
|
|
|
debug('setConfig: setting up new storage configuration');
|
|
await storageApi(backupSite).setup(sanitizedConfig);
|
|
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_UPDATE, auditSource, { name: backupSite.name, config: storageApi(backupSite).removePrivateFields(newConfig) });
|
|
}
|
|
|
|
async function add(data, auditSource) {
|
|
assert.strictEqual(typeof data, 'object');
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode');
|
|
|
|
const { provider, name, config, format, contents, retention, schedule, enableForUpdates } = data; // required
|
|
const limits = data.limits || null,
|
|
encryptionPassword = data.encryptionPassword || null,
|
|
encryptedFilenames = data.encryptedFilenames || false,
|
|
encryptionPasswordHint = data.encryptionPasswordHint || '';
|
|
|
|
const formatError = backupFormats.validateFormat(format);
|
|
if (formatError) throw formatError;
|
|
|
|
const nameError = validateName(name);
|
|
if (nameError) throw nameError;
|
|
|
|
const contentsError = validateContents(contents);
|
|
if (contentsError) throw contentsError;
|
|
|
|
let encryption = null;
|
|
if (encryptionPassword) {
|
|
const encryptionPasswordError = validateEncryptionPassword(encryptionPassword);
|
|
if (encryptionPasswordError) throw encryptionPasswordError;
|
|
if (data.encryptionPassword === data.encryptionPasswordHint) throw new BoxError(BoxError.BAD_FIELD, 'Password hint cannot be the same as password');
|
|
encryption = hush.generateEncryptionKeysSync(encryptionPassword);
|
|
encryption.encryptedFilenames = !!encryptedFilenames;
|
|
encryption.encryptionPasswordHint = encryptionPasswordHint;
|
|
}
|
|
|
|
const integrityKeyPair = crypto.generateKeyPairSync('ed25519', {
|
|
publicKeyEncoding: { type: 'spki', format: 'pem' },
|
|
privateKeyEncoding: { type: 'pkcs8', format: 'pem' }
|
|
});
|
|
|
|
const id = crypto.randomUUID();
|
|
if (!safe.fs.mkdirSync(`${paths.BACKUP_INFO_DIR}/${id}`)) throw new BoxError(BoxError.FS_ERROR, `Failed to create info dir: ${safe.error.message}`);
|
|
|
|
debug('add: validating new storage configuration');
|
|
const sanitizedConfig = await storageApi({ provider }).verifyConfig({id, provider, config });
|
|
|
|
await database.query('INSERT INTO backupSites (id, name, provider, configJson, contentsJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, enableForUpdates) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
|
[ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(contents), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, enableForUpdates ]);
|
|
|
|
debug('add: setting up new storage configuration');
|
|
await storageApi({ provider }).setup(sanitizedConfig);
|
|
|
|
await eventlog.add(eventlog.ACTION_BACKUP_SITE_ADD, auditSource, { id, name, provider, contents, schedule, format });
|
|
|
|
return id;
|
|
}
|
|
|
|
async function addDefault(auditSource) {
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
debug('addDefault: adding default backup site');
|
|
const defaultBackupSite = {
|
|
name: 'Default',
|
|
provider: 'filesystem',
|
|
config: { backupDir: paths.DEFAULT_BACKUP_DIR },
|
|
retention: { keepWithinSecs: 2 * 24 * 60 * 60 },
|
|
schedule: '00 00 23 * * *',
|
|
format: 'tgz',
|
|
contents: null,
|
|
enableForUpdates: true
|
|
};
|
|
return await add(defaultBackupSite, auditSource);
|
|
}
|
|
|
|
// creates a backup site object that is not in the database
|
|
async function createPseudo(data) {
|
|
assert.strictEqual(typeof data, 'object');
|
|
|
|
const { id, provider, config, format } = data; // required
|
|
const encryptionPassword = data.encryptionPassword ?? null,
|
|
encryptedFilenames = !!data.encryptedFilenames;
|
|
|
|
const formatError = backupFormats.validateFormat(format);
|
|
if (formatError) throw formatError;
|
|
|
|
let encryption = null;
|
|
if (encryptionPassword) { // intentionally do not validate password!
|
|
encryption = hush.generateEncryptionKeysSync(encryptionPassword);
|
|
encryption.encryptedFilenames = !!encryptedFilenames;
|
|
encryption.encryptionPasswordHint = '';
|
|
}
|
|
|
|
debug('add: validating new storage configuration');
|
|
const sanitizedConfig = await storageApi({ provider }).verifyConfig({id, provider, config });
|
|
return { id, format, provider, config: sanitizedConfig, encryption };
|
|
}
|
|
|
|
// after a restore, this recreates the working directories of the sites
|
|
async function reinitAll() {
|
|
for (const site of await list()) {
|
|
if (!safe.fs.mkdirSync(`${paths.BACKUP_INFO_DIR}/${site.id}`, { recursive: true })) throw new BoxError(BoxError.FS_ERROR, `Failed to create info dir: ${safe.error.message}`);
|
|
const status = await getStatus(site);
|
|
if (status.state === 'active') continue;
|
|
safe(remount(site), { debug }); // background
|
|
}
|
|
}
|