backups: add backup multiple targets

This commit is contained in:
Girish Ramakrishnan
2025-07-24 19:02:02 +02:00
parent 100bea981d
commit 3aafbd2ccb
25 changed files with 744 additions and 535 deletions
+8 -5
View File
@@ -164,6 +164,7 @@ const appTaskManager = require('./apptaskmanager.js'),
domains = require('./domains.js'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
hush = require('./hush.js'),
Location = require('./location.js'),
locks = require('./locks.js'),
logs = require('./logs.js'),
@@ -2363,7 +2364,7 @@ async function importApp(app, data, auditSource) {
if (error) throw error;
if ('password' in backupConfig) {
backupConfig.encryption = backupTargets.generateEncryptionKeysSync(backupConfig.password);
backupConfig.encryption = hush.generateEncryptionKeysSync(backupConfig.password);
delete backupConfig.password;
} else {
backupConfig.encryption = null;
@@ -2404,7 +2405,8 @@ async function exportApp(app, data, auditSource) {
if (!canBackupApp(app)) throw new BoxError(BoxError.BAD_STATE, 'App cannot be backed up in this state');
const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ appId, { snapshotOnly: true } ]);
const backupTarget = await backupTargets._getDefault();
const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ appId, backupTarget.id, { snapshotOnly: true } ]);
safe(tasks.startTask(taskId, {}), { debug }); // background
return { taskId };
}
@@ -2778,10 +2780,11 @@ async function backup(app, auditSource) {
if (!canBackupApp(app)) throw new BoxError(BoxError.BAD_STATE, 'App cannot be backed up in this state');
const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ app.id, { snapshotOnly: false } ]);
const backupTarget = await backupTargets._getDefault();
const backupConfig = await backupTargets.getConfig();
const memoryLimit = backupConfig.limits?.memoryLimit ? Math.max(backupConfig.limits.memoryLimit/1024/1024, 1024) : 1024;
const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ app.id, backupTarget.id, { snapshotOnly: false } ]);
const memoryLimit = backupTarget.limits?.memoryLimit ? Math.max(backupTarget.limits.memoryLimit/1024/1024, 1024) : 1024;
// background
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit, oomScoreAdjust: -999 })
+41 -43
View File
@@ -80,20 +80,20 @@ function applyBackupRetention(allBackups, retention, referencedBackupIds) {
}
}
async function removeBackup(backupConfig, backup, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function removeBackup(target, backup, progressCallback) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof backup, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const backupFilePath = backupFormat.api(backupConfig.format).getBackupFilePath(backupConfig, backup.remotePath);
const backupFilePath = backupFormat.api(target.format).getBackupFilePath(target, backup.remotePath);
let removeError;
if (backupConfig.format ==='tgz') {
if (target.format ==='tgz') {
progressCallback({ message: `${backup.remotePath}: Removing ${backupFilePath}`});
[removeError] = await safe(storage.api(backupConfig.provider).remove(backupConfig, backupFilePath));
[removeError] = await safe(storage.api(target.provider).remove(target.config, backupFilePath));
} else {
progressCallback({ message: `${backup.remotePath}: Removing directory ${backupFilePath}`});
[removeError] = await safe(storage.api(backupConfig.provider).removeDir(backupConfig, backupFilePath, progressCallback));
[removeError] = await safe(storage.api(target.provider).removeDir(target.config, backupFilePath, progressCallback));
}
if (removeError) {
@@ -102,7 +102,7 @@ async function removeBackup(backupConfig, backup, progressCallback) {
}
// prune empty directory if possible
const [pruneError] = await safe(storage.api(backupConfig.provider).remove(backupConfig, path.dirname(backupFilePath)));
const [pruneError] = await safe(storage.api(target.provider).remove(target.config, path.dirname(backupFilePath)));
if (pruneError) debug(`removeBackup: unable to prune backup directory ${path.dirname(backupFilePath)}: ${pruneError.message}`);
const [delError] = await safe(backupListing.del(backup.id));
@@ -110,9 +110,8 @@ async function removeBackup(backupConfig, backup, progressCallback) {
else debug(`removeBackup: removed ${backup.remotePath}`);
}
async function cleanupAppBackups(backupConfig, retention, referencedBackupIds, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof retention, 'object');
async function cleanupAppBackups(target, referencedBackupIds, progressCallback) {
assert.strictEqual(typeof target, 'object');
assert(Array.isArray(referencedBackupIds));
assert.strictEqual(typeof progressCallback, 'function');
@@ -134,7 +133,7 @@ async function cleanupAppBackups(backupConfig, retention, referencedBackupIds, p
// apply backup policy per app. keep latest backup only for existing apps
let appBackupsToRemove = [];
for (const appId of Object.keys(appBackupsById)) {
const appRetention = Object.assign({ keepLatest: allAppIds.includes(appId) }, retention);
const appRetention = Object.assign({ keepLatest: allAppIds.includes(appId) }, target.retention);
debug(`cleanupAppBackups: applying retention for appId ${appId} retention: ${JSON.stringify(appRetention)}`);
applyBackupRetention(appBackupsById[appId], appRetention, referencedBackupIds);
appBackupsToRemove = appBackupsToRemove.concat(appBackupsById[appId].filter(b => !b.keepReason));
@@ -143,7 +142,7 @@ async function cleanupAppBackups(backupConfig, retention, referencedBackupIds, p
for (const appBackup of appBackupsToRemove) {
await progressCallback({ message: `Removing app backup (${appBackup.identifier}): ${appBackup.id}`});
removedAppBackupPaths.push(appBackup.remotePath);
await removeBackup(backupConfig, appBackup, progressCallback); // never errors
await removeBackup(target, appBackup, progressCallback); // never errors
}
debug('cleanupAppBackups: done');
@@ -151,9 +150,8 @@ async function cleanupAppBackups(backupConfig, retention, referencedBackupIds, p
return removedAppBackupPaths;
}
async function cleanupMailBackups(backupConfig, retention, referencedBackupIds, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof retention, 'object');
async function cleanupMailBackups(target, referencedBackupIds, progressCallback) {
assert.strictEqual(typeof target, 'object');
assert(Array.isArray(referencedBackupIds));
assert.strictEqual(typeof progressCallback, 'function');
@@ -161,13 +159,13 @@ async function cleanupMailBackups(backupConfig, retention, referencedBackupIds,
const mailBackups = await backupListing.getByTypePaged(backupListing.BACKUP_TYPE_MAIL, 1, 100000);
applyBackupRetention(mailBackups, Object.assign({ keepLatest: true }, retention), referencedBackupIds);
applyBackupRetention(mailBackups, Object.assign({ keepLatest: true }, target.retention), referencedBackupIds);
for (const mailBackup of mailBackups) {
if (mailBackup.keepReason) continue;
await progressCallback({ message: `Removing mail backup ${mailBackup.remotePath}`});
removedMailBackupPaths.push(mailBackup.remotePath);
await removeBackup(backupConfig, mailBackup, progressCallback); // never errors
await removeBackup(target, mailBackup, progressCallback); // never errors
}
debug('cleanupMailBackups: done');
@@ -175,9 +173,7 @@ async function cleanupMailBackups(backupConfig, retention, referencedBackupIds,
return removedMailBackupPaths;
}
async function cleanupBoxBackups(backupConfig, retention, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof retention, 'object');
async function cleanupBoxBackups(target, progressCallback) {
assert.strictEqual(typeof progressCallback, 'function');
let referencedBackupIds = [];
@@ -188,7 +184,7 @@ async function cleanupBoxBackups(backupConfig, retention, progressCallback) {
// 100000 here should be seen as infinity
const boxBackups = await backupListing.getByTypePaged(backupListing.BACKUP_TYPE_BOX, 1, 100000);
applyBackupRetention(boxBackups, Object.assign({ keepLatest: true }, retention), [] /* references */);
applyBackupRetention(boxBackups, Object.assign({ keepLatest: true }, target.retention), [] /* references */);
for (const boxBackup of boxBackups) {
if (boxBackup.keepReason) {
@@ -199,7 +195,7 @@ async function cleanupBoxBackups(backupConfig, retention, progressCallback) {
await progressCallback({ message: `Removing box backup ${boxBackup.remotePath}`});
removedBoxBackupPaths.push(boxBackup.remotePath);
await removeBackup(backupConfig, boxBackup, progressCallback);
await removeBackup(target, boxBackup, progressCallback);
}
debug('cleanupBoxBackups: done');
@@ -208,8 +204,8 @@ async function cleanupBoxBackups(backupConfig, retention, progressCallback) {
}
// cleans up the database by checking if backup exists in the remote. this can happen if user had set some bucket policy
async function cleanupMissingBackups(backupConfig, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function cleanupMissingBackups(target, progressCallback) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const perPage = 1000;
@@ -224,10 +220,10 @@ async function cleanupMissingBackups(backupConfig, progressCallback) {
for (const backup of result) {
if (backup.state !== backupListing.BACKUP_STATE_NORMAL) continue; // note: errored and incomplete backups are cleaned up by the backup retention logic
let backupFilePath = backupFormat.api(backupConfig.format).getBackupFilePath(backupConfig, backup.remotePath);
if (backupConfig.format === 'rsync') backupFilePath = backupFilePath + '/'; // add trailing slash to indicate directory
let backupFilePath = backupFormat.api(target.format).getBackupFilePath(target, backup.remotePath);
if (target.format === 'rsync') backupFilePath = backupFilePath + '/'; // add trailing slash to indicate directory
const [existsError, exists] = await safe(storage.api(backupConfig.provider).exists(backupConfig, backupFilePath));
const [existsError, exists] = await safe(storage.api(target.provider).exists(target.config, backupFilePath));
if (existsError || exists) continue;
await progressCallback({ message: `Removing missing backup ${backup.remotePath}`});
@@ -247,8 +243,8 @@ async function cleanupMissingBackups(backupConfig, progressCallback) {
}
// removes the snapshots of apps that have been uninstalled
async function cleanupSnapshots(backupConfig) {
assert.strictEqual(typeof backupConfig, 'object');
async function cleanupSnapshots(backupTarget) {
assert.strictEqual(typeof backupTarget, 'object');
const contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8');
const info = safe.JSON.parse(contents);
@@ -263,9 +259,9 @@ async function cleanupSnapshots(backupConfig) {
if (app) continue; // app is still installed
if (info[appId].format ==='tgz') {
await safe(storage.api(backupConfig.provider).remove(backupConfig, backupFormat.api(info[appId].format).getBackupFilePath(backupConfig, `snapshot/app_${appId}`)), { debug });
await safe(storage.api(backupTarget.provider).remove(backupTarget.config, backupFormat.api(info[appId].format).getBackupFilePath(backupTarget, `snapshot/app_${appId}`)), { debug });
} else {
await safe(storage.api(backupConfig.provider).removeDir(backupConfig, backupFormat.api(info[appId].format).getBackupFilePath(backupConfig, `snapshot/app_${appId}`), progressCallback), { debug });
await safe(storage.api(backupTarget.provider).removeDir(backupTarget.config, backupFormat.api(info[appId].format).getBackupFilePath(backupTarget, `snapshot/app_${appId}`), progressCallback), { debug });
}
safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, `${appId}.sync.cache`));
@@ -278,40 +274,42 @@ async function cleanupSnapshots(backupConfig) {
debug('cleanupSnapshots: done');
}
async function run(progressCallback) {
async function run(targetId, progressCallback) {
assert.strictEqual(typeof targetId, 'string');
assert.strictEqual(typeof progressCallback, 'function');
const backupConfig = await backupTargets.getConfig();
const { retention } = await backupTargets.getPolicy();
debug(`run: retention is ${JSON.stringify(retention)}`);
const target = await backupTargets.get(targetId);
if (!target) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Target not found');
const status = await backupTargets.ensureMounted();
debug(`run: retention is ${JSON.stringify(target.retention)}`);
const status = await backupTargets.ensureMounted(target);
debug(`run: mount point status is ${JSON.stringify(status)}`);
if (status.state !== 'active') throw new BoxError(BoxError.MOUNT_ERROR, `Backup endpoint is not mounted: ${status.message}`);
if (retention.keepWithinSecs < 0) {
if (target.retention.keepWithinSecs < 0) {
debug('run: keeping all backups');
return {};
}
await progressCallback({ percent: 10, message: 'Cleaning box backups' });
const { removedBoxBackupPaths, referencedBackupIds } = await cleanupBoxBackups(backupConfig, retention, progressCallback); // references is app or mail backup ids
const { removedBoxBackupPaths, referencedBackupIds } = await cleanupBoxBackups(target, progressCallback); // references is app or mail backup ids
await progressCallback({ percent: 20, message: 'Cleaning mail backups' });
const removedMailBackupPaths = await cleanupMailBackups(backupConfig, retention, referencedBackupIds, progressCallback);
const removedMailBackupPaths = await cleanupMailBackups(target, referencedBackupIds, progressCallback);
await progressCallback({ percent: 40, message: 'Cleaning app backups' });
const archivedBackupIds = await archives.listBackupIds();
const removedAppBackupPaths = await cleanupAppBackups(backupConfig, retention, referencedBackupIds.concat(archivedBackupIds), progressCallback);
const removedAppBackupPaths = await cleanupAppBackups(target, referencedBackupIds.concat(archivedBackupIds), progressCallback);
await progressCallback({ percent: 70, message: 'Checking storage backend and removing stale entries in database' });
const missingBackupPaths = await cleanupMissingBackups(backupConfig, progressCallback);
const missingBackupPaths = await cleanupMissingBackups(target, progressCallback);
await progressCallback({ percent: 80, message: 'Cleaning snapshots' });
await cleanupSnapshots(backupConfig);
await cleanupSnapshots(target);
await progressCallback({ percent: 80, message: 'Cleaning storage artifacts' });
await storage.api(backupConfig.provider).cleanup(backupConfig, progressCallback);
await storage.api(target.provider).cleanup(target.config, progressCallback);
return { removedBoxBackupPaths, removedMailBackupPaths, removedAppBackupPaths, missingBackupPaths };
}
+15 -15
View File
@@ -16,15 +16,15 @@ const assert = require('assert'),
tar = require('tar-stream'),
zlib = require('zlib');
function getBackupFilePath(backupConfig, remotePath) {
assert.strictEqual(typeof backupConfig, 'object');
function getBackupFilePath(backupTarget, remotePath) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof remotePath, 'string');
const rootPath = backupConfig.rootPath;
const fileType = backupConfig.encryption ? '.tar.gz.enc' : '.tar.gz';
const rootPath = backupTarget.config.rootPath;
const fileType = backupTarget.encryption ? '.tar.gz.enc' : '.tar.gz';
// we don't have a rootPath for noop
if (backupConfig.provider === 'noop') return remotePath + fileType;
if (backupTarget.provider === 'noop') return remotePath + fileType;
return path.join(rootPath, remotePath + fileType);
}
@@ -231,39 +231,39 @@ async function tarExtract(inStream, dataLayout, encryption, progressCallback) {
debug(`tarExtract: pipeline finished: ${ps.stats()}`);
}
async function download(backupConfig, remotePath, dataLayout, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function download(backupTarget, remotePath, dataLayout, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof remotePath, 'string');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof progressCallback, 'function');
debug(`download: Downloading ${remotePath} to ${dataLayout.toString()}`);
const backupFilePath = getBackupFilePath(backupConfig, remotePath);
const backupFilePath = getBackupFilePath(backupTarget, remotePath);
await promiseRetry({ times: 5, interval: 20000, debug }, async () => {
progressCallback({ message: `Downloading backup ${backupFilePath}` });
const sourceStream = await storage.api(backupConfig.provider).download(backupConfig, backupFilePath);
await tarExtract(sourceStream, dataLayout, backupConfig.encryption, progressCallback);
const sourceStream = await storage.api(backupTarget.provider).download(backupTarget.config, backupFilePath);
await tarExtract(sourceStream, dataLayout, backupTarget.encryption, progressCallback);
});
}
async function upload(backupConfig, remotePath, dataLayout, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function upload(backupTarget, remotePath, dataLayout, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof remotePath, 'string');
assert.strictEqual(typeof dataLayout, 'object');
assert.strictEqual(typeof progressCallback, 'function');
debug(`upload: Uploading ${dataLayout.toString()} to ${remotePath}`);
const backupFilePath = getBackupFilePath(backupConfig, remotePath);
const backupFilePath = getBackupFilePath(backupTarget, remotePath);
await promiseRetry({ times: 5, interval: 20000, debug }, async () => {
progressCallback({ message: `Uploading backup ${backupFilePath}` });
const uploader = await storage.api(backupConfig.provider).upload(backupConfig, backupFilePath);
await tarPack(dataLayout, backupConfig.encryption, uploader, progressCallback);
const uploader = await storage.api(backupTarget.provider).upload(backupTarget.config, backupFilePath);
await tarPack(dataLayout, backupTarget.encryption, uploader, progressCallback);
});
}
+259 -250
View File
@@ -1,40 +1,36 @@
'use strict';
exports = module.exports = {
get,
list,
add,
del,
setConfig,
setLimits,
setSchedule,
setRetention,
removePrivateFields,
startBackupTask,
startCleanupTask,
cleanupCacheFilesSync,
removePrivateFields,
generateEncryptionKeysSync,
getSnapshotInfo,
setSnapshotInfo,
validatePolicy,
testStorage,
validateFormat,
getPolicy,
setPolicy,
getTarget,
getConfig,
setConfig,
setStorage,
setLimits,
getRootPath,
setupManagedStorage,
remount,
getMountStatus,
ensureMounted,
_addDefaultTarget: addDefaultTarget,
_addDefault: addDefault,
_getDefault: getDefault,
};
const assert = require('assert'),
@@ -43,22 +39,35 @@ const assert = require('assert'),
constants = require('./constants.js'),
cron = require('./cron.js'),
{ CronTime } = require('cron'),
crypto = require('crypto'),
database = require('./database.js'),
debug = require('debug')('box:backups'),
eventlog = require('./eventlog.js'),
hush = require('./hush.js'),
locks = require('./locks.js'),
mounts = require('./mounts.js'),
path = require('path'),
paths = require('./paths.js'),
safe = require('safetydance'),
settings = require('./settings.js'),
storage = require('./storage.js'),
tasks = require('./tasks.js'),
uuid = require('uuid'),
_ = require('./underscore.js');
uuid = require('uuid');
const BACKUP_TARGET_FIELDS = [ 'id', 'label', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'priority', 'creationTime', 'ts' ].join(',');
const BACKUP_TARGET_FIELDS = [ 'id', 'label', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'priority', 'creationTime', 'ts' ].join(',');
function getRootPath(provider, config, mountPath) {
assert.strictEqual(typeof config, 'object');
assert.strictEqual(typeof mountPath, 'string');
if (mounts.isManagedProvider(provider)) {
return path.join(mountPath, config.prefix);
} else if (provider === 'mountpoint') {
return path.join(config.mountPoint, config.prefix);
} else if (provider === 'filesystem') {
return config.backupFolder;
} else {
return config.prefix;
}
}
function postProcess(result) {
assert.strictEqual(typeof result, 'object');
@@ -66,6 +75,10 @@ function postProcess(result) {
result.config = result.configJson ? safe.JSON.parse(result.configJson) : {};
delete result.configJson;
// note: rootPath will be dynamic for managed mount providers during app import . since it's used in api backends it has to be inside config
result.config.rootPath = getRootPath(result.provider, result.config, paths.MANAGED_BACKUP_MOUNT_DIR);
result.config.provider = result.provider; // this allows api backends to identify the real provider
result.limits = result.limitsJson ? safe.JSON.parse(result.limitsJson) : {};
delete result.limitsJson;
@@ -80,36 +93,45 @@ function postProcess(result) {
return result;
}
function removePrivateFields(backupConfig) {
assert.strictEqual(typeof backupConfig, 'object');
if (backupConfig.encryption) {
delete backupConfig.encryption;
backupConfig.password = constants.SECRET_PLACEHOLDER;
function removePrivateFields(target) {
assert.strictEqual(typeof target, 'object');
if (target.encryption) {
delete target.encryption;
target.password = constants.SECRET_PLACEHOLDER;
}
delete backupConfig.rootPath;
return storage.api(backupConfig.provider).removePrivateFields(backupConfig);
delete target.rootPath;
return storage.api(target.provider).removePrivateFields(target.config);
}
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
function generateEncryptionKeysSync(password) {
assert.strictEqual(typeof password, 'string');
function validateFormat(format) {
assert.strictEqual(typeof format, 'string');
const aesKeys = crypto.scryptSync(password, Buffer.from('CLOUDRONSCRYPTSALT', 'utf8'), 128);
return {
dataKey: aesKeys.subarray(0, 32).toString('hex'),
dataHmacKey: aesKeys.subarray(32, 64).toString('hex'),
filenameKey: aesKeys.subarray(64, 96).toString('hex'),
filenameHmacKey: aesKeys.subarray(96).toString('hex')
};
if (format === 'tgz' || format == 'rsync') return null;
return new BoxError(BoxError.BAD_FIELD, 'Invalid backup format');
}
async function validatePolicy(policy) {
assert.strictEqual(typeof policy, 'object');
function validateLabel(label) {
assert.strictEqual(typeof label, 'string');
const job = safe.safeCall(function () { return new CronTime(policy.schedule); });
if (label.length > 48) return new BoxError(BoxError.BAD_FIELD, 'Label too long');
}
function validateSchedule(schedule) {
assert.strictEqual(typeof schedule, 'string');
if (schedule === constants.CRON_PATTERN_NEVER) return null;
const job = safe.safeCall(function () { return new CronTime(schedule); });
if (!job) return new BoxError(BoxError.BAD_FIELD, 'Invalid schedule pattern');
const retention = policy.retention;
return null;
}
function validateRetention(retention) {
assert.strictEqual(typeof retention, 'object');
if (!retention) return new BoxError(BoxError.BAD_FIELD, 'retention is required');
if (!['keepWithinSecs','keepDaily','keepWeekly','keepMonthly','keepYearly'].find(k => !!retention[k])) return new BoxError(BoxError.BAD_FIELD, 'retention properties missing');
if ('keepWithinSecs' in retention && typeof retention.keepWithinSecs !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepWithinSecs must be a number');
@@ -117,17 +139,133 @@ async function validatePolicy(policy) {
if ('keepWeekly' in retention && typeof retention.keepWeekly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepWeekly must be a number');
if ('keepMonthly' in retention && typeof retention.keepMonthly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepMonthly must be a number');
if ('keepYearly' in retention && typeof retention.keepYearly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepYearly must be a number');
return null;
}
async function startBackupTask(auditSource) {
function validateEncryptionPassword(password) {
assert.strictEqual(typeof password, 'string');
if (password.length < 8) return new BoxError(BoxError.BAD_FIELD, 'password must be atleast 8 characters');
}
async function list(page, perPage) {
assert(typeof page === 'number' && page > 0);
assert(typeof perPage === 'number' && perPage > 0);
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets ORDER BY creationTime DESC LIMIT ?,?`, [ (page-1)*perPage, perPage ]);
results.forEach(function (result) { postProcess(result); });
return results;
}
async function getDefault() {
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE priority=? LIMIT 1`, [ true ]);
return postProcess(results[0]);
}
async function get(id) {
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE id=?`, [ id ]);
if (results.length === 0) return null;
return postProcess(results[0]);
}
async function addDefault() {
const label = '', priority = true;
const limits = null, encryption = null;
const retention = { keepWithinSecs: 2 * 24 * 60 * 60 };
const schedule = '00 00 23 * * *';;
const config = { backupFolder: paths.DEFAULT_BACKUP_DIR };
const provider = 'filesystem';
const format = 'tgz';
const id = `bc-${uuid.v4()}`;
await database.query('INSERT INTO backupTargets (id, label, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, label, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, priority ]);
return id;
}
async function update(target, data) {
assert.strictEqual(typeof target, 'object');
assert(data && typeof data === 'object');
const args = [];
const fields = [];
for (const k in data) {
if (k === 'label' || k === 'schedule' || k === 'priority') { // format, provider cannot be updated
fields.push(k + ' = ?');
args.push(data[k]);
} else if (k === 'config' || k === 'limits' || k === 'retention') { // encryption cannot be updated
fields.push(`${k}JSON = ?`);
args.push(JSON.stringify(data[k]));
}
}
args.push(target.id);
const [updateError, result] = await safe(database.query('UPDATE backupTargets SET ' + fields.join(', ') + ' WHERE id = ?', args));
if (updateError) throw updateError;
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
}
async function setSchedule(target, schedule) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof schedule, 'string');
const error = await validateSchedule(schedule);
if (error) throw error;
await update(target, { schedule });
await cron.handleBackupScheduleChanged(target);
}
async function setLimits(target, limits) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof limits, 'object');
await update(target, { limits });
}
async function setRetention(target, retention) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof retention, 'object');
const error = await validateRetention(retention);
if (error) throw error;
await update(target, { retention });
}
async function del(target, auditSource) {
assert.strictEqual(typeof target, 'object');
assert(auditSource && typeof auditSource === 'object');
if (target.priority) throw new BoxError(BoxError.CONFLICT, 'Cannot delete the primary backup target');
const queries = [];
queries.push({ query: 'DELETE FROM backups WHERE targetId = ?', args: [ target.id ] });
queries.push({ query: 'DELETE FROM backupTargets WHERE id=? AND priority=?', args: [ target.id, false ] });
const [error, result] = await safe(database.transaction(queries));
if (error && error.code === 'ER_NO_REFERENCED_ROW_2') throw new BoxError(BoxError.NOT_FOUND, error);
if (error) throw error;
if (result[1].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
// await eventlog.add(eventlog.ACTION_ARCHIVES_DEL, auditSource, { id: archive.id, backupId: archive.backupId });
debug('del: clearing backup cache');
cleanupCacheFilesSync();
}
async function startBackupTask(target, auditSource) {
assert.strictEqual(typeof target, 'object');
const [error] = await safe(locks.acquire(locks.TYPE_FULL_BACKUP_TASK));
if (error) throw new BoxError(BoxError.BAD_STATE, `Another backup task is in progress: ${error.message}`);
const backupConfig = await getConfig();
const memoryLimit = target.limits?.memoryLimit ? Math.max(target.limits.memoryLimit/1024/1024, 1024) : 1024;
const memoryLimit = backupConfig.limits?.memoryLimit ? Math.max(backupConfig.limits.memoryLimit/1024/1024, 1024) : 1024;
const taskId = await tasks.add(tasks.TASK_BACKUP, [ { /* options */ } ]);
const taskId = await tasks.add(`${tasks.TASK_FULL_BACKUP_PREFIX}${target.id}`, [ target.id, { /* options */ } ]);
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId });
@@ -150,8 +288,8 @@ async function startBackupTask(auditSource) {
}
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
function cleanupCacheFilesSync() {
const files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR));
function cleanupCacheFilesSync(target) {
const files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR, target.id));
if (!files) return;
files
@@ -183,10 +321,11 @@ async function setSnapshotInfo(id, info) {
}
}
async function startCleanupTask(auditSource) {
async function startCleanupTask(target, auditSource) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof auditSource, 'object');
const taskId = await tasks.add(tasks.TASK_CLEAN_BACKUPS, []);
const taskId = await tasks.add(`${tasks.TASK_CLEAN_BACKUPS_PREFIX}${target.id}`, [ target.id ]);
// background
tasks.startTask(taskId, {})
@@ -201,239 +340,109 @@ async function startCleanupTask(auditSource) {
return taskId;
}
async function testStorage(storageConfig) {
assert.strictEqual(typeof storageConfig, 'object');
const func = storage.api(storageConfig.provider);
if (!func) return new BoxError(BoxError.BAD_FIELD, 'unknown storage provider');
await storage.api(storageConfig.provider).testConfig(storageConfig);
}
function validateEncryptionPassword(password) {
assert.strictEqual(typeof password, 'string');
if (password.length < 8) return new BoxError(BoxError.BAD_FIELD, 'password must be atleast 8 characters');
}
function managedBackupMountObject(backupConfig) {
assert(mounts.isManagedProvider(backupConfig.provider));
function managedBackupMountObject(config) {
assert(mounts.isManagedProvider(config.provider));
return {
name: 'backup',
hostPath: paths.MANAGED_BACKUP_MOUNT_DIR,
mountType: backupConfig.provider,
mountOptions: backupConfig.mountOptions
mountType: config.provider,
mountOptions: config.mountOptions
};
}
async function remount() {
const backupConfig = await getConfig();
async function remount(target) {
assert.strictEqual(typeof target, 'object');
if (mounts.isManagedProvider(backupConfig.provider)) {
await mounts.remount(managedBackupMountObject(backupConfig));
if (mounts.isManagedProvider(target.provider)) {
await mounts.remount(managedBackupMountObject(target.config));
}
}
async function getMountStatus() {
const backupConfig = await getConfig();
async function getMountStatus(target) {
assert.strictEqual(typeof target, 'object');
let hostPath;
if (mounts.isManagedProvider(backupConfig.provider)) {
if (mounts.isManagedProvider(target.provider)) {
hostPath = paths.MANAGED_BACKUP_MOUNT_DIR;
} else if (backupConfig.provider === 'mountpoint') {
hostPath = backupConfig.mountPoint;
} else if (backupConfig.provider === 'filesystem') {
hostPath = backupConfig.backupFolder;
} else if (target.provider === 'mountpoint') {
hostPath = target.config.mountPoint;
} else if (target.provider === 'filesystem') {
hostPath = target.config.backupFolder;
} else {
return { state: 'active' };
}
return await mounts.getStatus(backupConfig.provider, hostPath); // { state, message }
return await mounts.getStatus(target.provider, hostPath); // { state, message }
}
async function ensureMounted() {
const status = await getMountStatus();
async function ensureMounted(target) {
assert.strictEqual(typeof target, 'object');
const status = await getMountStatus(target);
if (status.state === 'active') return status;
await remount();
return await getMountStatus();
return await getMountStatus(target);
}
async function getPolicy() {
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE priority=?`, [ true ]);
const result = postProcess(results[0]);
return { retention: result.retention, schedule: result.schedule };
}
async function setPolicy(policy) {
assert.strictEqual(typeof policy, 'object');
const error = await validatePolicy(policy);
if (error) throw error;
await updateTarget(policy);
await cron.handleBackupPolicyChanged(policy);
}
function getRootPath(storageConfig, mountPath) {
assert.strictEqual(typeof storageConfig, 'object');
assert.strictEqual(typeof mountPath, 'string');
if (mounts.isManagedProvider(storageConfig.provider)) {
return path.join(mountPath, storageConfig.prefix);
} else if (storageConfig.provider === 'mountpoint') {
return path.join(storageConfig.mountPoint, storageConfig.prefix);
} else if (storageConfig.provider === 'filesystem') {
return storageConfig.backupFolder;
} else {
return storageConfig.prefix;
}
}
async function updateTarget(data) {
assert(data && typeof data === 'object');
const args = [];
const fields = [];
for (const k in data) {
if (k === 'label' || k === 'schedule' || k === 'format' || k === 'priority') {
fields.push(k + ' = ?');
args.push(data[k]);
} else if (k === 'config' || k === 'limits' || k === 'retention' || k === 'encryption') {
fields.push(`${k}JSON = ?`);
args.push(JSON.stringify(data[k]));
}
}
args.push(true); // primary flag
const [updateError, result] = await safe(database.query('UPDATE backupTargets SET ' + fields.join(', ') + ' WHERE priority = ?', args));
if (updateError) throw updateError;
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found');
}
async function addDefaultTarget() {
const label = '', priority = true;
const limits = null, encryption = null;
const retention = { keepWithinSecs: 2 * 24 * 60 * 60 };
const schedule = '00 00 23 * * *';;
const config = { provider: 'filesystem', backupFolder: paths.DEFAULT_BACKUP_DIR };
const format = 'tgz';
const id = `bc-${uuid.v4()}`;
await database.query('INSERT INTO backupTargets (id, label, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, label, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, priority ]);
return id;
}
async function getTarget(id) {
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE id=?`, [ id ]);
if (results.length === 0) return null;
return postProcess(results[0]);
}
async function getConfig() {
const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE priority=?`, [ true ]);
const result = postProcess(results[0]);
const config = result.config;
config.format = result.format;
config.encryption = result.encryption;
config.rootPath = getRootPath(config, paths.MANAGED_BACKUP_MOUNT_DIR); // note: rootPath will be dynamic for managed mount providers during app import
return config;
}
async function setConfig(backupConfig) {
assert.strictEqual(typeof backupConfig, 'object');
await updateTarget({
config: _.omit(backupConfig, ['limits', 'format', 'encryption']),
format: backupConfig.format,
encryption: backupConfig.encryption || null,
limits: backupConfig.limits || null
});
}
async function setLimits(limits) {
assert.strictEqual(typeof limits, 'object');
await settings.setJson(settings.BACKUP_LIMITS_KEY, limits);
}
function validateFormat(format) {
assert.strictEqual(typeof format, 'string');
if (format === 'tgz' || format == 'rsync') return null;
return new BoxError(BoxError.BAD_FIELD, 'Invalid backup format');
}
async function setupManagedStorage(storageConfig, hostPath) {
assert.strictEqual(typeof storageConfig, 'object');
assert.strictEqual(typeof hostPath, 'string');
if (!mounts.isManagedProvider(storageConfig.provider)) return null;
if (!storageConfig.mountOptions || typeof storageConfig.mountOptions !== 'object') throw new BoxError(BoxError.BAD_FIELD, 'mountOptions must be an object');
const error = mounts.validateMountOptions(storageConfig.provider, storageConfig.mountOptions);
if (error) throw error;
debug(`setupManagedStorage: setting up mount at ${hostPath} with ${storageConfig.provider}`);
const newMount = {
name: path.basename(hostPath),
hostPath,
mountType: storageConfig.provider,
mountOptions: storageConfig.mountOptions
};
await mounts.tryAddMount(newMount, { timeout: 10 }); // 10 seconds
return newMount;
}
async function setStorage(storageConfig) {
assert.strictEqual(typeof storageConfig, 'object');
async function setConfig(target, newConfig) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof newConfig, 'object');
if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode');
const oldConfig = await getConfig();
const oldConfig = target.config;
if (storageConfig.provider === oldConfig.provider) storage.api(storageConfig.provider).injectPrivateFields(storageConfig, oldConfig);
storage.api(target.provider).injectPrivateFields(newConfig, oldConfig);
const formatError = validateFormat(storageConfig.format);
debug('setConfig: validating new storage configuration');
await storage.testMount(target.provider, newConfig, '/mnt/backup-storage-validation');
debug('setConfig: removing old storage configuration');
if (mounts.isManagedProvider(target.provider)) await safe(mounts.removeMount(managedBackupMountObject(oldConfig)));
debug('setConfig: setting up new storage configuration');
await storage.setupManagedMount(target.provider, newConfig, paths.MANAGED_BACKUP_MOUNT_DIR);
debug('setConfig: clearing backup cache');
cleanupCacheFilesSync(target);
await update(target, { config: newConfig });
}
async function add(data) {
assert.strictEqual(typeof data, 'object');
if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode');
const { provider, label, config, format, retention, schedule } = data; // required
const limits = data.limits || null,
encryptionPassword = data.encryptionPassword || null,
encryptedFilenames = data.encryptedFilenames || false;
const formatError = validateFormat(format);
if (formatError) throw formatError;
storageConfig.encryption = null;
if ('password' in storageConfig) { // user set password
if (storageConfig.password === constants.SECRET_PLACEHOLDER) {
storageConfig.encryption = oldConfig.encryption || null;
} else {
const encryptionPasswordError = validateEncryptionPassword(storageConfig.password);
if (encryptionPasswordError) throw encryptionPasswordError;
const labelError = validateLabel(label);
if (labelError) throw labelError;
storageConfig.encryption = generateEncryptionKeysSync(storageConfig.password);
}
delete storageConfig.password;
let encryption = null;
if (encryptionPassword) {
const encryptionPasswordError = validateEncryptionPassword(encryptionPassword);
if (encryptionPasswordError) throw encryptionPasswordError;
encryption = hush.generateEncryptionKeysSync(encryptionPassword);
encryption.encryptedFilenames = !!encryptedFilenames;
}
debug('setStorage: validating new storage configuration');
const testMountObject = await setupManagedStorage(storageConfig, '/mnt/backup-storage-validation'); // this validates mountOptions
const testStorageError = await testStorage(Object.assign({ mountPath: '/mnt/backup-storage-validation' }, storageConfig)); // this validates provider and it's api options. requires mountPath
if (testMountObject) await mounts.removeMount(testMountObject);
if (testStorageError) throw testStorageError;
debug('setStorage: removing old storage configuration');
if (mounts.isManagedProvider(oldConfig.provider)) await safe(mounts.removeMount(managedBackupMountObject(oldConfig)));
debug('add: validating new storage configuration');
await storage.testMount(provider, config, '/mnt/backup-storage-validation');
debug('setStorage: setting up new storage configuration');
await setupManagedStorage(storageConfig, paths.MANAGED_BACKUP_MOUNT_DIR);
await storage.setupManagedMount(provider, config, paths.MANAGED_BACKUP_MOUNT_DIR);
debug('setStorage: clearing backup cache');
cleanupCacheFilesSync();
await setConfig(storageConfig);
const id = `bc-${uuid.v4()}`;
await database.query('INSERT INTO backupTargets (id, label, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, label, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]);
return id;
}
+75 -77
View File
@@ -9,7 +9,6 @@ exports = module.exports = {
downloadApp,
backupApp,
backupMail,
downloadMail,
upload,
@@ -36,17 +35,17 @@ const apps = require('./apps.js'),
const BACKUP_UPLOAD_CMD = path.join(__dirname, 'scripts/backupupload.js');
async function checkPreconditions(backupConfig, dataLayout) {
assert.strictEqual(typeof backupConfig, 'object');
async function checkPreconditions(backupTarget, dataLayout) {
assert.strictEqual(typeof backupTarget, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
// check mount status before uploading
const status = await backupTargets.ensureMounted();
const status = await backupTargets.ensureMounted(backupTarget);
debug(`checkPreconditions: mount point status is ${JSON.stringify(status)}`);
if (status.state !== 'active') throw new BoxError(BoxError.MOUNT_ERROR, `Backup endpoint is not active: ${status.message}`);
// check availabe size. this requires root for df to work
const available = await storage.api(backupConfig.provider).getAvailableSize(backupConfig);
const available = await storage.api(backupTarget.provider).getAvailableSize(backupTarget.config);
let used = 0;
for (const localPath of dataLayout.localPaths()) {
debug(`checkPreconditions: getting disk usage of ${localPath}`);
@@ -64,20 +63,22 @@ async function checkPreconditions(backupConfig, dataLayout) {
}
// this function is called via backupupload (since it needs root to traverse app's directory)
async function upload(remotePath, format, dataLayoutString, progressCallback) {
async function upload(remotePath, targetId, dataLayoutString, progressCallback) {
assert.strictEqual(typeof remotePath, 'string');
assert.strictEqual(typeof format, 'string');
assert.strictEqual(typeof targetId, 'string');
assert.strictEqual(typeof dataLayoutString, 'string');
assert.strictEqual(typeof progressCallback, 'function');
debug(`upload: path ${remotePath} format ${format} dataLayout ${dataLayoutString}`);
debug(`upload: path ${remotePath} target ${targetId} dataLayout ${dataLayoutString}`);
const backupTarget = await backupTargets.get(targetId);
if (!backupTarget) throw new BoxError(BoxError.NOT_FOUND, 'Backup target not found');
const dataLayout = DataLayout.fromString(dataLayoutString);
const backupConfig = await backupTargets.getConfig();
await checkPreconditions(backupConfig, dataLayout);
await checkPreconditions(backupTarget, dataLayout);
await backupFormat.api(format).upload(backupConfig, remotePath, dataLayout, progressCallback);
await backupFormat.api(backupTarget.format).upload(backupTarget, remotePath, dataLayout, progressCallback);
}
async function download(backupConfig, remotePath, format, dataLayout, progressCallback) {
@@ -131,16 +132,16 @@ async function runBackupUpload(uploadConfig, progressCallback) {
assert.strictEqual(typeof uploadConfig, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const { remotePath, backupConfig, dataLayout, progressTag } = uploadConfig;
const { remotePath, backupTarget, dataLayout, progressTag } = uploadConfig;
assert.strictEqual(typeof remotePath, 'string');
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof progressTag, 'string');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
// https://stackoverflow.com/questions/48387040/node-js-recommended-max-old-space-size
const envCopy = Object.assign({}, process.env);
if (backupConfig.limits?.memoryLimit >= 2*1024*1024*1024) {
const heapSize = Math.min((backupConfig.limits.memoryLimit/1024/1024) - 256, 8192);
if (backupTarget.limits?.memoryLimit >= 2*1024*1024*1024) {
const heapSize = Math.min((backupTarget.limits.memoryLimit/1024/1024) - 256, 8192);
debug(`runBackupUpload: adjusting heap size to ${heapSize}M`);
envCopy.NODE_OPTIONS = `--max-old-space-size=${heapSize}`;
}
@@ -153,7 +154,7 @@ async function runBackupUpload(uploadConfig, progressCallback) {
}
// do not use debug for logging child output because it already has timestamps via it's own debug
const [error] = await safe(shell.sudo([ BACKUP_UPLOAD_CMD, remotePath, backupConfig.format, dataLayout.toString() ], { env: envCopy, preserveEnv: true, onMessage, logger: process.stdout.write }));
const [error] = await safe(shell.sudo([ BACKUP_UPLOAD_CMD, remotePath, backupTarget.id, dataLayout.toString() ], { env: envCopy, preserveEnv: true, onMessage, logger: process.stdout.write }));
if (error && (error.code === null /* signal */ || (error.code !== 0 && error.code !== 50))) { // backuptask crashed
debug(`runBackupUpload: backuptask crashed`, error);
throw new BoxError(BoxError.INTERNAL_ERROR, 'Backuptask crashed');
@@ -173,8 +174,8 @@ async function snapshotBox(progressCallback) {
debug(`snapshotBox: took ${(new Date() - startTime)/1000} seconds`);
}
async function uploadBoxSnapshot(backupConfig, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function uploadBoxSnapshot(backupTarget, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof progressCallback, 'function');
await snapshotBox(progressCallback);
@@ -184,7 +185,7 @@ async function uploadBoxSnapshot(backupConfig, progressCallback) {
const uploadConfig = {
remotePath: 'snapshot/box',
backupConfig,
backupTarget,
dataLayout: new DataLayout(boxDataDir, []),
progressTag: 'box'
};
@@ -197,21 +198,21 @@ async function uploadBoxSnapshot(backupConfig, progressCallback) {
debug(`uploadBoxSnapshot: took ${(new Date() - startTime)/1000} seconds`);
await backupTargets.setSnapshotInfo('box', { timestamp: new Date().toISOString(), format: backupConfig.format });
await backupTargets.setSnapshotInfo('box', { timestamp: new Date().toISOString(), format: backupTarget.format });
}
async function copy(backupConfig, srcRemotePath, destRemotePath, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function copy(backupTarget, srcRemotePath, destRemotePath, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof srcRemotePath, 'string');
assert.strictEqual(typeof destRemotePath, 'string');
assert.strictEqual(typeof progressCallback, 'function');
const { provider, format } = backupConfig;
const oldFilePath = backupFormat.api(format).getBackupFilePath(backupConfig, srcRemotePath);
const newFilePath = backupFormat.api(format).getBackupFilePath(backupConfig, destRemotePath);
const { config, format } = backupTarget;
const oldFilePath = backupFormat.api(format).getBackupFilePath(backupTarget, srcRemotePath);
const newFilePath = backupFormat.api(format).getBackupFilePath(backupTarget, destRemotePath);
const startTime = new Date();
const [copyError] = await safe(storage.api(provider).copy(backupConfig, oldFilePath, newFilePath, progressCallback));
const [copyError] = await safe(storage.api(config.provider).copy(config, oldFilePath, newFilePath, progressCallback));
if (copyError) {
debug(`copy: copied to ${destRemotePath} errored. error: ${copyError.message}`);
throw copyError;
@@ -219,8 +220,8 @@ async function copy(backupConfig, srcRemotePath, destRemotePath, progressCallbac
debug(`copy: copied successfully to ${destRemotePath}. Took ${(new Date() - startTime)/1000} seconds`);
}
async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function rotateBoxBackup(backupTarget, tag, options, dependsOn, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof tag, 'string');
assert.strictEqual(typeof options, 'object');
assert(Array.isArray(dependsOn));
@@ -232,7 +233,7 @@ async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCa
const data = {
remotePath,
encryptionVersion: backupConfig.encryption ? 2 : null,
encryptionVersion: backupTarget.encryption ? 2 : null,
packageVersion: constants.VERSION,
type: backupListing.BACKUP_TYPE_BOX,
state: backupListing.BACKUP_STATE_CREATING,
@@ -244,7 +245,7 @@ async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCa
};
const id = await backupListing.add(data);
const [error] = await safe(copy(backupConfig, 'snapshot/box', remotePath, progressCallback));
const [error] = await safe(copy(backupTarget, 'snapshot/box', remotePath, progressCallback));
const state = error ? backupListing.BACKUP_STATE_ERROR : backupListing.BACKUP_STATE_NORMAL;
await backupListing.setState(id, state);
if (error) throw error;
@@ -252,20 +253,19 @@ async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCa
return id;
}
async function backupBox(dependsOn, tag, options, progressCallback) {
async function backupBox(backupTarget, dependsOn, tag, options, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert(Array.isArray(dependsOn));
assert.strictEqual(typeof tag, 'string');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const backupConfig = await backupTargets.getConfig();
await uploadBoxSnapshot(backupConfig, progressCallback);
return await rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCallback);
await uploadBoxSnapshot(backupTarget, progressCallback);
return await rotateBoxBackup(backupTarget, tag, options, dependsOn, progressCallback);
}
async function rotateAppBackup(backupConfig, app, tag, options, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function rotateAppBackup(backupTarget, app, tag, options, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof tag, 'string');
assert.strictEqual(typeof options, 'object');
@@ -280,7 +280,7 @@ async function rotateAppBackup(backupConfig, app, tag, options, progressCallback
const data = {
remotePath,
encryptionVersion: backupConfig.encryption ? 2 : null,
encryptionVersion: backupTarget.encryption ? 2 : null,
packageVersion: manifest.version,
type: backupListing.BACKUP_TYPE_APP,
state: backupListing.BACKUP_STATE_CREATING,
@@ -292,7 +292,7 @@ async function rotateAppBackup(backupConfig, app, tag, options, progressCallback
};
const id = await backupListing.add(data);
const [error] = await safe(copy(backupConfig, `snapshot/app_${app.id}`, remotePath, progressCallback));
const [error] = await safe(copy(backupTarget, `snapshot/app_${app.id}`, remotePath, progressCallback));
const state = error ? backupListing.BACKUP_STATE_ERROR : backupListing.BACKUP_STATE_NORMAL;
await backupListing.setState(id, state);
if (error) throw error;
@@ -300,8 +300,9 @@ async function rotateAppBackup(backupConfig, app, tag, options, progressCallback
return id;
}
async function backupApp(app, options, progressCallback) {
async function backupApp(app, backupTarget, options, progressCallback) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
@@ -311,7 +312,7 @@ async function backupApp(app, options, progressCallback) {
await snapshotApp(app, progressCallback);
} else {
const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,'');
backupId = await backupAppWithTag(app, tag, options, progressCallback);
backupId = await backupAppWithTag(app, backupTarget, tag, options, progressCallback);
}
await locks.release(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`);
@@ -331,8 +332,8 @@ async function snapshotApp(app, progressCallback) {
debug(`snapshotApp: ${app.fqdn} took ${(new Date() - startTime)/1000} seconds`);
}
async function uploadAppSnapshot(backupConfig, app, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function uploadAppSnapshot(backupTarget, app, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof progressCallback, 'function');
@@ -348,7 +349,7 @@ async function uploadAppSnapshot(backupConfig, app, progressCallback) {
const uploadConfig = {
remotePath,
backupConfig,
backupTarget,
dataLayout,
progressTag: app.fqdn
};
@@ -359,11 +360,12 @@ async function uploadAppSnapshot(backupConfig, app, progressCallback) {
debug(`uploadAppSnapshot: ${app.fqdn} uploaded to ${remotePath}. ${(new Date() - startTime)/1000} seconds`);
await backupTargets.setSnapshotInfo(app.id, { timestamp: new Date().toISOString(), manifest: app.manifest, format: backupConfig.format });
await backupTargets.setSnapshotInfo(app.id, { timestamp: new Date().toISOString(), manifest: app.manifest, format: backupTarget.format });
}
async function backupAppWithTag(app, tag, options, progressCallback) {
async function backupAppWithTag(app, backupTarget, tag, options, progressCallback) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof tag, 'string');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
@@ -375,14 +377,12 @@ async function backupAppWithTag(app, tag, options, progressCallback) {
return results[0].id;
}
const backupConfig = await backupTargets.getConfig();
await uploadAppSnapshot(backupConfig, app, progressCallback);
return await rotateAppBackup(backupConfig, app, tag, options, progressCallback);
await uploadAppSnapshot(backupTarget, app, progressCallback);
return await rotateAppBackup(backupTarget, app, tag, options, progressCallback);
}
async function uploadMailSnapshot(backupConfig, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function uploadMailSnapshot(backupTarget, progressCallback) {
assert.strictEqual(typeof backupTarget, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const mailDataDir = safe.fs.realpathSync(paths.MAIL_DATA_DIR);
@@ -390,7 +390,7 @@ async function uploadMailSnapshot(backupConfig, progressCallback) {
const uploadConfig = {
remotePath: 'snapshot/mail',
backupConfig,
backupTarget,
dataLayout: new DataLayout(mailDataDir, []),
progressTag: 'mail'
};
@@ -403,11 +403,11 @@ async function uploadMailSnapshot(backupConfig, progressCallback) {
debug(`uploadMailSnapshot: took ${(new Date() - startTime)/1000} seconds`);
await backupTargets.setSnapshotInfo('mail', { timestamp: new Date().toISOString(), format: backupConfig.format });
await backupTargets.setSnapshotInfo('mail', { timestamp: new Date().toISOString(), format: backupTarget.format });
}
async function rotateMailBackup(backupConfig, tag, options, progressCallback) {
assert.strictEqual(typeof backupConfig, 'object');
async function rotateMailBackup(target, tag, options, progressCallback) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof tag, 'string');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
@@ -418,7 +418,7 @@ async function rotateMailBackup(backupConfig, tag, options, progressCallback) {
const data = {
remotePath,
encryptionVersion: backupConfig.encryption ? 2 : null,
encryptionVersion: target.encryption ? 2 : null,
packageVersion: constants.VERSION,
type: backupListing.BACKUP_TYPE_MAIL,
state: backupListing.BACKUP_STATE_CREATING,
@@ -430,7 +430,7 @@ async function rotateMailBackup(backupConfig, tag, options, progressCallback) {
};
const id = await backupListing.add(data);
const [error] = await safe(copy(backupConfig, 'snapshot/mail', remotePath, progressCallback));
const [error] = await safe(copy(target, 'snapshot/mail', remotePath, progressCallback));
const state = error ? backupListing.BACKUP_STATE_ERROR : backupListing.BACKUP_STATE_NORMAL;
await backupListing.setState(id, state);
if (error) throw error;
@@ -438,25 +438,16 @@ async function rotateMailBackup(backupConfig, tag, options, progressCallback) {
return id;
}
async function backupMailWithTag(tag, options, progressCallback) {
async function backupMailWithTag(target, tag, options, progressCallback) {
assert.strictEqual(typeof target, 'object');
assert.strictEqual(typeof tag, 'string');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
debug(`backupMailWithTag: backing up mail with tag ${tag}`);
const backupConfig = await backupTargets.getConfig();
await uploadMailSnapshot(backupConfig, progressCallback);
return await rotateMailBackup(backupConfig, tag, options, progressCallback);
}
async function backupMail(options, progressCallback) {
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,'');
debug(`backupMail: backing up mail with tag ${tag}`);
return await backupMailWithTag(tag, options, progressCallback);
await uploadMailSnapshot(target, progressCallback);
return await rotateMailBackup(target, tag, options, progressCallback);
}
async function downloadMail(restoreConfig, progressCallback) {
@@ -474,10 +465,14 @@ async function downloadMail(restoreConfig, progressCallback) {
}
// this function is called from external process. calling process is expected to have a lock
async function fullBackup(options, progressCallback) {
async function fullBackup(backupTargetId, options, progressCallback) {
assert.strictEqual(typeof backupTargetId, 'string');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const backupTarget = await backupTargets.get(backupTargetId);
if (!backupTarget) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Backup target not found');
const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,''); // unique tag under which all apps/mail/box backs up
const allApps = await apps.list();
@@ -498,7 +493,7 @@ async function fullBackup(options, progressCallback) {
progressCallback({ percent, message: `Backing up ${app.fqdn} (${i+1}/${allApps.length}). Waiting for lock` });
await locks.wait(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`);
const startTime = new Date();
const [appBackupError, appBackupId] = await safe(backupAppWithTag(app, tag, options, (progress) => progressCallback({ percent, message: progress.message })));
const [appBackupError, appBackupId] = await safe(backupAppWithTag(app, backupTarget, tag, options, (progress) => progressCallback({ percent, message: progress.message })));
debug(`fullBackup: app ${app.fqdn} backup finished. Took ${(new Date() - startTime)/1000} seconds`);
await locks.release(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`);
if (appBackupError) throw appBackupError;
@@ -507,18 +502,18 @@ async function fullBackup(options, progressCallback) {
progressCallback({ percent, message: 'Backing up mail' });
percent += step;
const mailBackupId = await backupMailWithTag(tag, options, (progress) => progressCallback({ percent, message: progress.message }));
const mailBackupId = await backupMailWithTag(backupTarget, tag, options, (progress) => progressCallback({ percent, message: progress.message }));
progressCallback({ percent, message: 'Backing up system data' });
percent += step;
const dependsOn = appBackupIds.concat(mailBackupId);
const backupId = await backupBox(dependsOn, tag, options, (progress) => progressCallback({ percent, message: progress.message }));
const backupId = await backupBox(backupTarget, dependsOn, tag, options, (progress) => progressCallback({ percent, message: progress.message }));
return backupId;
}
// this function is called from external process
async function appBackup(appId, options, progressCallback) {
async function appBackup(appId, backupTargetId, options, progressCallback) {
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof progressCallback, 'function');
@@ -526,9 +521,12 @@ async function appBackup(appId, options, progressCallback) {
const app = await apps.get(appId);
if (!app) throw new BoxError(BoxError.BAD_FIELD, 'App not found');
const backupTarget = await backupTargets.get(backupTargetId);
if (!backupTarget) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Backup target not found');
await progressCallback({ percent: 1, message: `Backing up ${app.fqdn}. Waiting for lock` });
const startTime = new Date();
const backupId = await backupApp(app, options, progressCallback);
const backupId = await backupApp(app, backupTarget, options, progressCallback);
await progressCallback({ percent: 100, message: `app ${app.fqdn} backup finished. Took ${(new Date() - startTime)/1000} seconds` });
return backupId;
}
+6 -6
View File
@@ -11,7 +11,7 @@ exports = module.exports = {
stopJobs,
handleBackupPolicyChanged,
handleBackupScheduleChanged,
handleTimeZoneChanged,
handleAutoupdatePatternChanged,
handleDynamicDnsChanged,
@@ -203,24 +203,24 @@ async function startJobs() {
start: true
});
await handleBackupPolicyChanged(await backupTargets.getPolicy());
await handleBackupScheduleChanged(await backupTargets._getDefault());
await handleAutoupdatePatternChanged(await updater.getAutoupdatePattern());
await handleDynamicDnsChanged(await network.getDynamicDns());
await handleExternalLdapChanged(await externalLdap.getConfig());
}
async function handleBackupPolicyChanged(value) {
assert.strictEqual(typeof value, 'object');
async function handleBackupScheduleChanged(target) {
assert.strictEqual(typeof target, 'object');
const tz = await cloudron.getTimeZone();
debug(`backupPolicyChanged: schedule ${value.schedule} (${tz})`);
debug(`backupPolicyChanged: schedule ${target.schedule} (${tz})`);
if (gJobs.backup) gJobs.backup.stop();
gJobs.backup = null;
gJobs.backup = CronJob.from({
cronTime: value.schedule,
cronTime: target.schedule,
onTick: async () => await safe(backupTargets.startBackupTask(AuditSource.CRON), { debug }),
start: true,
timeZone: tz
+15
View File
@@ -152,10 +152,25 @@ function decryptFilePath(filePath, encryption) {
return { result: decryptedParts.join('/') };
}
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
function generateEncryptionKeysSync(password) {
assert.strictEqual(typeof password, 'string');
const aesKeys = crypto.scryptSync(password, Buffer.from('CLOUDRONSCRYPTSALT', 'utf8'), 128);
return {
dataKey: aesKeys.subarray(0, 32).toString('hex'),
dataHmacKey: aesKeys.subarray(32, 64).toString('hex'),
filenameKey: aesKeys.subarray(64, 96).toString('hex'),
filenameHmacKey: aesKeys.subarray(96).toString('hex')
};
}
exports = module.exports = {
EncryptStream,
DecryptStream,
encryptFilePath,
decryptFilePath,
generateEncryptionKeysSync,
};
+1 -1
View File
@@ -18,7 +18,7 @@ async function promiseRetry(options, asyncFunction) {
} catch (error) {
if (i === times - 1) throw error;
if (options.retry && !options.retry(error)) throw error; // no more retry
if (options.debug) options.debug(`Attempt ${i+1} failed. Will retry: ${error.message}`);
if (options.debug) options.debug(`Attempt ${i+1} failed. Will retry: ${error.message} ${error.stack}`);
await timers.setTimeout(interval);
}
}
+2 -1
View File
@@ -20,6 +20,7 @@ const appstore = require('./appstore.js'),
domains = require('./domains.js'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
hush = require('./hush.js'),
mail = require('./mail.js'),
mailServer = require('./mailserver.js'),
network = require('./network.js'),
@@ -239,7 +240,7 @@ async function restore(backupConfig, remotePath, version, ipv4Config, ipv6Config
if (error) throw error;
if ('password' in backupConfig) {
backupConfig.encryption = backupTargets.generateEncryptionKeysSync(backupConfig.password);
backupConfig.encryption = hush.generateEncryptionKeysSync(backupConfig.password);
delete backupConfig.password;
} else {
backupConfig.encryption = null;
+122 -45
View File
@@ -1,17 +1,23 @@
'use strict';
exports = module.exports = {
load,
list,
get,
add,
del,
// separate update routes to skip (slow) storage validation
setConfig,
setLimits,
setSchedule,
setRetention,
create,
cleanup,
remount,
getMountStatus,
getConfig,
setStorage,
setLimits,
getPolicy,
setPolicy
};
const assert = require('assert'),
@@ -22,40 +28,122 @@ const assert = require('assert'),
HttpSuccess = require('@cloudron/connect-lastmile').HttpSuccess,
safe = require('safetydance');
async function load(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
const [error, result] = await safe(backupTargets.get(req.params.id));
if (error) return next(BoxError.toHttpError(error));
if (!result) return next(new HttpError(404, 'Backup target not found'));
req.resources.backupTarget = result;
next();
}
async function get(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
next(new HttpSuccess(200, backupTargets.removePrivateFields(req.resources.backupTarget)));
}
async function list(req, res, next) {
const page = typeof req.query.page === 'string' ? parseInt(req.query.page) : 1;
if (!page || page < 0) return next(new HttpError(400, 'page query param has to be a postive number'));
const perPage = typeof req.query.per_page === 'string'? parseInt(req.query.per_page) : 25;
if (!perPage || perPage < 0) return next(new HttpError(400, 'per_page query param has to be a postive number'));
const [error, result] = await safe(backupTargets.list(page, perPage));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { backupTargets: result.map(backupTargets.removePrivateFields) }));
}
// Target has three parts. these fields are merged into one top level object
// 1. format. rsync or tgz
// 2. config. the 'provider' (see api() function in src/storage.js) differentiates further options
// s3 providers - accessKeyId, secretAccessKey, bucket, prefix etc . see s3.js
// gcs - bucket, prefix, projectId, credentials . see gcs.js
// ext4/xfs/disk (managed providers) - mountOptions (diskPath), prefix, noHardlinks. disk is legacy.
// nfs/cifs/sshfs (managed providers) - mountOptions (host/username/password/seal/privateKey etc), prefix, noHardlinks
// filesystem - backupFolder, noHardlinks
// mountpoint - mountPoint, prefix, noHardlinks
// 3. encryption. password and encryptedFilenames
async function add(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
const { label, format, config } = req.body;
if (typeof format !== 'string') return next(new HttpError(400, 'format must be a string'));
if (typeof label !== 'string') return next(new HttpError(400, 'label must be a string'));
if (typeof provider !== 'string') return next(new HttpError(400, 'provider is required'));
// provider specific options are validated by provider backends
if (!config || typeof req.body.config !== 'object') return next(new HttpError(400, 'config is required'));
if (typeof req.body.schedule !== 'string') return next(new HttpError(400, 'schedule is required'));
if (!req.body.retention || typeof req.body.retention !== 'object') return next(new HttpError(400, 'retention is required'));
if ('limits' in req.body && typeof req.body.limits !== 'object') return next(new HttpError(400, 'limits must be an object'));
if ('encryptionPassword' in req.body && typeof req.body.encryptionPassword !== 'string') return next(new HttpError(400, 'encryptionPassword must be a string'));
if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean'));
// testing the backup using put/del takes a bit of time at times
req.clearTimeout();
const [error, id] = await safe(backupTargets.add(req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { id }));
}
async function del(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
assert.strictEqual(typeof req.resources.backupTarget, 'object');
const [error] = await safe(backupTargets.del(req.resources.backupTarget, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(204));
}
async function create(req, res, next) {
const [error, taskId] = await safe(backupTargets.startBackupTask(AuditSource.fromRequest(req)));
assert.strictEqual(typeof req.resources.backupTarget, 'object');
const [error, taskId] = await safe(backupTargets.startBackupTask(req.resources.backupTarget, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(202, { taskId }));
}
async function cleanup(req, res, next) {
const [error, taskId] = await safe(backupTargets.startCleanupTask(AuditSource.fromRequest(req)));
assert.strictEqual(typeof req.resources.backupTarget, 'object');
const [error, taskId] = await safe(backupTargets.startCleanupTask(req.resources.backupTarget, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(202, { taskId }));
}
async function remount(req, res, next) {
const [error] = await safe(backupTargets.remount());
assert.strictEqual(typeof req.resources.backupTarget, 'object');
const [error] = await safe(backupTargets.remount(req.resources.backupTarget));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(202, {}));
}
async function getMountStatus(req, res, next) {
const [error, mountStatus] = await safe(backupTargets.getMountStatus());
assert.strictEqual(typeof req.resources.backupTarget, 'object');
const [error, mountStatus] = await safe(backupTargets.getMountStatus(req.resources.backupTarget));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, mountStatus));
}
async function getConfig(req, res, next) {
const [error, backupConfig] = await safe(backupTargets.getConfig());
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, backupTargets.removePrivateFields(backupConfig)));
}
async function setLimits(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
@@ -84,55 +172,44 @@ async function setLimits(req, res, next) {
if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer'));
const [error] = await safe(backupTargets.setLimits(req.body));
const [error] = await safe(backupTargets.setLimits(req.resources.backupTarget, req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
}
// storage has three parts. these fields are merged into one top level object
// 1. format. rsync or tgz
// 2. config. the 'provider' (see api() function in src/storage.js) differentiates further options
// s3 providers - accessKeyId, secretAccessKey, bucket, prefix etc . see s3.js
// gcs - bucket, prefix, projectId, credentials . see gcs.js
// ext4/xfs/disk (managed providers) - mountOptions (diskPath), prefix, noHardlinks. disk is legacy.
// nfs/cifs/sshfs (managed providers) - mountOptions (host/username/password/seal/privateKey etc), prefix, noHardlinks
// filesystem - backupFolder, noHardlinks
// mountpoint - mountPoint, prefix, noHardlinks
// 3. encryption. password and encryptedFilenames
async function setStorage(req, res, next) {
async function setConfig(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
// provider specific options are validated by provider backends
if (typeof req.body.provider !== 'string') return next(new HttpError(400, 'provider is required'));
if (typeof req.body.format !== 'string') return next(new HttpError(400, 'format must be a string'));
if ('password' in req.body && typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a string'));
if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean'));
// testing the backup using put/del takes a bit of time at times
req.clearTimeout();
const [error] = await safe(backupTargets.setStorage(req.body));
const [error] = await safe(backupTargets.setConfig(req.resources.backupTarget, req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
}
async function getPolicy(req, res, next) {
const [error, policy] = await safe(backupTargets.getPolicy());
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { policy }));
}
async function setPolicy(req, res, next) {
async function setSchedule(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (typeof req.body.schedule !== 'string') return next(new HttpError(400, 'schedule is required'));
if (!req.body.retention || typeof req.body.retention !== 'object') return next(new HttpError(400, 'retention is required'));
const [error] = await safe(backupTargets.setPolicy(req.body));
const [error] = await safe(backupTargets.setSchedule(req.resources.backupTarget, req.body.schedule));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
}
async function setRetention(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (!req.body.retention || typeof req.body.retention !== 'object') return next(new HttpError(400, 'retention is required'));
const [error] = await safe(backupTargets.setRetention(req.resources.backupTarget, req.body.retention));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
+3 -2
View File
@@ -119,11 +119,12 @@ describe('Backups API', function () {
};
it('can get backup_config (default)', async function () {
const response = await superagent.get(`${serverUrl}/api/v1/backups/config`)
const response = await superagent.get(`${serverUrl}/api/v1/backup_targets`)
.query({ access_token: owner.token });
expect(response.status).to.equal(200);
expect(response.body).to.eql(defaultConfig);
expect(response.body.config).to.eql(defaultConfig);
expect(response.body.config).to.eql(defaultConfig);
});
it('cannot set backup_config without provider', async function () {
+1 -1
View File
@@ -115,7 +115,7 @@ async function setupServer() {
await database.initialize();
await database._clear();
await appstore._setApiServerOrigin(exports.mockApiServerOrigin);
await backupTargets._addDefaultTarget();
await backupTargets._addDefault();
await oidcServer.stop();
await server.start();
debug('Set up server complete');
+13 -9
View File
@@ -155,15 +155,19 @@ async function initializeExpressSync() {
router.get ('/api/v1/backups', token, authorizeAdmin, routes.backupListing.list);
router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backupListing.update);
router.get ('/api/v1/backups/mount_status', token, authorizeAdmin, routes.backupTargets.getMountStatus);
router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backupTargets.create);
router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backupTargets.cleanup);
router.post('/api/v1/backups/remount', json, token, authorizeAdmin, routes.backupTargets.remount);
router.get ('/api/v1/backups/config', token, authorizeAdmin, routes.backupTargets.getConfig);
router.post('/api/v1/backups/config/storage', json, token, authorizeOwner, routes.backupTargets.setStorage);
router.post('/api/v1/backups/config/limits', json, token, authorizeOwner, routes.backupTargets.setLimits);
router.get ('/api/v1/backups/policy', token, authorizeAdmin, routes.backupTargets.getPolicy);
router.post('/api/v1/backups/policy', json, token, authorizeOwner, routes.backupTargets.setPolicy);
router.get ('/api/v1/backup_targets/', token, authorizeAdmin, routes.backupTargets.list);
router.get ('/api/v1/backup_targets/:id', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.get);
router.post('/api/v1/backup_targets/:id', token, authorizeOwner, routes.backupTargets.add);
router.del ('/api/v1/backup_targets/:id', token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.del);
router.get ('/api/v1/backup_targets/:id/mount_status', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.getMountStatus);
router.post('/api/v1/backup_targets/:id/create', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.create);
router.post('/api/v1/backup_targets/:id/cleanup', json, token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.cleanup);
router.post('/api/v1/backup_targets/:id/remount', json, token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.remount);
router.post('/api/v1/backup_targets/:id/configure/config', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setConfig);
router.post('/api/v1/backup_targets/:id/configure/limits', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setLimits);
router.post('/api/v1/backup_targets/:id/configure/policy', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setPolicy);
router.post('/api/v1/backup_targets/:id/configure/schedule', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setSchedule);
router.post('/api/v1/backup_targets/:id/configure/retention', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setRetention);
// app archive routes
router.get ('/api/v1/archives', token, authorizeAdmin, routes.archives.list);
-3
View File
@@ -13,9 +13,6 @@ exports = module.exports = {
APPSTORE_API_TOKEN_KEY: 'appstore_api_token',
API_SERVER_ORIGIN_KEY: 'api_server_origin',
AUTOUPDATE_PATTERN_KEY: 'autoupdate_pattern',
BACKUP_STORAGE_KEY: 'backup_storage',
BACKUP_LIMITS_KEY: 'backup_limits',
BACKUP_POLICY_KEY: 'backup_policy',
CLOUDRON_AVATAR_KEY: 'cloudron_avatar',
CLOUDRON_BACKGROUND_KEY: 'cloudron_background',
CLOUDRON_ID_KEY: 'cloudron_id',
+51
View File
@@ -2,8 +2,17 @@
exports = module.exports = {
api,
testMount,
setupManagedMount
};
const assert = require('assert'),
BoxError = require('./boxerror.js'),
debug = require('debug')('box:storage'),
mounts = require('./mounts.js'),
path = require('path'),
safe = require('safetydance');
// choose which storage backend we use for test purpose we use s3
function api(provider) {
switch (provider) {
@@ -36,3 +45,45 @@ function api(provider) {
default: return null;
}
}
async function setupManagedMount(provider, backupConfig, hostPath) {
assert.strictEqual(typeof provider, 'string');
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof hostPath, 'string');
if (!mounts.isManagedProvider(provider)) return null;
if (!backupConfig.mountOptions || typeof backupConfig.mountOptions !== 'object') throw new BoxError(BoxError.BAD_FIELD, 'mountOptions must be an object');
const error = mounts.validateMountOptions(provider, backupConfig.mountOptions);
if (error) throw error;
debug(`setupManagedMount: setting up mount at ${hostPath} with ${provider}`);
const newMount = {
name: path.basename(hostPath),
hostPath,
mountType: provider,
mountOptions: backupConfig.mountOptions
};
await mounts.tryAddMount(newMount, { timeout: 10 }); // 10 seconds
return newMount;
}
async function testMount(provider, backupConfig, mountPath) {
assert.strictEqual(typeof provider, 'string');
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof mountPath, 'string');
const func = api(provider);
if (!func) return new BoxError(BoxError.BAD_FIELD, 'unknown storage provider');
const testMountObject = await setupManagedMount(provider, backupConfig, mountPath); // this validates mountOptions
// filesystem backend uses mountPath
const [error] = await safe(api(provider).testConfig(Object.assign({ mountPath: '/mnt/backup-storage-validation', provider }, backupConfig)));
if (testMountObject) await mounts.removeMount(testMountObject);
if (error) throw error;
}
+5 -2
View File
@@ -21,13 +21,16 @@ exports = module.exports = {
// task types. if you add a task here, fill up the function table in taskworker and dashboard constants.js
// '_' prefix is removed for lookup
TASK_APP: 'app',
// "prefix" allows us to locate the tasks of a specific app or backup target
TASK_APP_BACKUP_PREFIX: 'appBackup_',
TASK_BACKUP: 'backup', // full backup
TASK_FULL_BACKUP_PREFIX: 'backup_', // full backup
TASK_CLEAN_BACKUPS_PREFIX: 'cleanBackups_',
TASK_BOX_UPDATE: 'boxUpdate',
TASK_CHECK_CERTS: 'checkCerts',
TASK_SYNC_DYNDNS: 'syncDyndns',
TASK_PREPARE_DASHBOARD_LOCATION: 'prepareDashboardLocation',
TASK_CLEAN_BACKUPS: 'cleanBackups',
TASK_SYNC_EXTERNAL_LDAP: 'syncExternalLdap',
TASK_CHANGE_MAIL_LOCATION: 'changeMailLocation',
TASK_SYNC_DNS_RECORDS: 'syncDnsRecords',
+13 -12
View File
@@ -13,7 +13,6 @@ const archives = require('../archives.js'),
common = require('./common.js'),
expect = require('expect.js'),
moment = require('moment'),
settings = require('../settings.js'),
tasks = require('../tasks.js'),
timers = require('timers/promises');
@@ -127,6 +126,8 @@ describe('backup cleaner', function () {
});
describe('task', function () {
let target;
const BACKUP_0_BOX = {
id: null,
remotePath: 'backup-box-0',
@@ -226,17 +227,17 @@ describe('backup cleaner', function () {
};
before(async function () {
await settings._set(settings.BACKUP_STORAGE_KEY, JSON.stringify({
target = await backupTargets._getDefault();
await backupTargets.setConfig(target, {
provider: 'filesystem',
password: 'supersecret',
backupFolder: '/tmp/someplace',
format: 'tgz'
}));
await backupTargets.setPolicy({ retention: { keepWithinSecs: 1 }, schedule: '00 00 23 * * *' });
});
await backupTargets.setRetention(target, { keepWithinSecs: 1 });
await backupTargets.setSchedule(target, '00 00 23 * * *');
});
async function cleanupBackups() {
const taskId = await backupTargets.startCleanupTask({ username: 'test' });
async function cleanupBackups(target) {
const taskId = await backupTargets.startCleanupTask(target, { username: 'test' });
console.log('started task', taskId);
@@ -253,7 +254,7 @@ describe('backup cleaner', function () {
}
it('succeeds without backups', async function () {
await cleanupBackups();
await cleanupBackups(target);
});
it('add the backups', async function () {
@@ -274,7 +275,7 @@ describe('backup cleaner', function () {
});
it('succeeds with box backups, keeps latest', async function () {
await cleanupBackups();
await cleanupBackups(target);
const results = await backupListing.getByTypePaged(backupListing.BACKUP_TYPE_BOX, 1, 1000);
expect(results.length).to.equal(1);
@@ -286,7 +287,7 @@ describe('backup cleaner', function () {
});
it('does not remove expired backups if only one left', async function () {
await cleanupBackups();
await cleanupBackups(target);
const results = await backupListing.getByTypePaged(backupListing.BACKUP_TYPE_BOX, 1, 1000);
expect(results[0].id).to.equal(BACKUP_1_BOX.id);
@@ -304,7 +305,7 @@ describe('backup cleaner', function () {
await timers.setTimeout(2000); // wait for expiration
await cleanupBackups();
await cleanupBackups(target);
let result = await backupListing.getByTypePaged(backupListing.BACKUP_TYPE_APP, 1, 1000);
expect(result.length).to.equal(4);
+4 -37
View File
@@ -14,7 +14,7 @@ const backupListing = require('../backuplisting.js'),
safe = require('safetydance');
describe('backups', function () {
const { setup, cleanup, defaultBackupTarget } = common;
const { setup, cleanup } = common;
const boxBackup = {
id: null,
@@ -48,8 +48,11 @@ describe('backups', function () {
targetId: null
};
let defaultBackupTarget;
before(async function () {
await setup();
defaultBackupTarget = await backupTargets._getDefault();
boxBackup.targetId = defaultBackupTarget.id;
appBackup.targetId = defaultBackupTarget.id;
});
@@ -120,40 +123,4 @@ describe('backups', function () {
expect(result).to.be(null);
});
});
describe('config and policy', function () {
it('can get backup config', async function () {
const backupConfig = await backupTargets.getConfig();
expect(backupConfig.provider).to.be('filesystem');
expect(backupConfig.backupFolder).to.be('/var/backups');
});
it('can set backup config', async function () {
let backupConfig = await backupTargets.getConfig();
backupConfig = Object.assign({}, backupConfig, { backupFolder: '/tmp/backups' });
await backupTargets.setConfig(backupConfig);
const newBackupConfig = await backupTargets.getConfig();
expect(newBackupConfig.backupFolder).to.be('/tmp/backups');
});
it('cannot set backup policy with invalid schedule', async function () {
const [error] = await safe(backupTargets.setPolicy({ schedule: '', retention: { keepWithinSecs: 1 }}));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('cannot set backup policy with missing retention', async function () {
const [error] = await safe(backupTargets.setPolicy({ schedule: '00 * * * * *'}));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('cannot set backup policy with invalid retention', async function () {
const [error] = await safe(backupTargets.setPolicy({ schedule: '00 * * * * *', retention: { keepWhenever: 4 }}));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('can set valid backup policy', async function () {
await backupTargets.setPolicy({ schedule: '00 00 2,23 * * 0,1,2', retention: { keepWithinSecs: 1 }});
});
});
});
+78
View File
@@ -0,0 +1,78 @@
/* jslint node:true */
/* global it:false */
/* global describe:false */
/* global before:false */
/* global after:false */
'use strict';
const backupTargets = require('../backuptargets.js'),
BoxError = require('../boxerror.js'),
common = require('./common.js'),
constants = require('../constants.js'),
expect = require('expect.js'),
safe = require('safetydance');
describe('backups', function () {
const { setup, cleanup } = common;
before(async function () {
await setup();
});
after(cleanup);
let defaultBackupTarget = null;
it('can list backup targets', async function () {
const result = await backupTargets.list(1, 5);
expect(result.length).to.be(1);
defaultBackupTarget = result[0];
});
it('can get backup target', async function () {
const backupTarget = await backupTargets.get(defaultBackupTarget.id);
expect(backupTarget.config.provider).to.be('filesystem');
expect(backupTarget.config.backupFolder).to.be('/var/backups');
expect(backupTarget.format).to.be('tgz');
expect(backupTarget.encryption).to.be(null);
});
it('cannot get random backup target', async function () {
const backupTarget = await backupTargets.get('random');
expect(backupTarget).to.be(null);
});
it('can set backup config', async function () {
const newConfig = Object.assign({}, defaultBackupTarget.config, { backupFolder: '/tmp/backups' });
await backupTargets.setConfig(defaultBackupTarget, newConfig);
const result = await backupTargets.get(defaultBackupTarget.id);
expect(result.config.backupFolder).to.be('/tmp/backups');
});
it('cannot set invalid schedule', async function () {
const [error] = await safe(backupTargets.setSchedule(defaultBackupTarget, ''));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('can set valid schedule', async function () {
for (const pattern of [ '00 * * * * *', constants.CRON_PATTERN_NEVER ]) {
await backupTargets.setSchedule(defaultBackupTarget, pattern);
const backupTarget = await backupTargets.get(defaultBackupTarget.id);
expect(backupTarget.schedule).to.be(pattern);
}
});
it('cannot set invalid retention', async function () {
const [error] = await safe(backupTargets.setRetention(defaultBackupTarget, { keepWhenever: 4 }));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('can set valid retention', async function () {
for (const retention of [ { keepWithinSecs: 1 }, { keepYearly: 3 }, { keepMonthly: 14 } ]) {
await backupTargets.setRetention(defaultBackupTarget, retention);
const backupTarget = await backupTargets.get(defaultBackupTarget.id);
expect(backupTarget.retention).to.eql(retention);
}
});
});
+8 -7
View File
@@ -28,17 +28,18 @@ describe('backuptask', function () {
const backupConfig = {
provider: 'filesystem',
backupFolder: path.join(os.tmpdir(), 'backupstask-test-filesystem'),
format: 'tgz',
};
let defaultBackupTarget;
before(async function () {
fs.rmSync(backupConfig.backupFolder, { recursive: true, force: true });
await backupTargets.setStorage(backupConfig);
defaultBackupTarget = await backupTargets._getDefault();
await backupTargets.setConfig(defaultBackupTarget, backupConfig);
});
async function createBackup() {
const taskId = await backupTargets.startBackupTask({ username: 'test' });
async function createBackup(target) {
const taskId = await backupTargets.startBackupTask(target, { username: 'test' });
while (true) {
await timers.setTimeout(1000);
@@ -67,7 +68,7 @@ describe('backuptask', function () {
return;
}
const result = await createBackup();
const result = await createBackup(defaultBackupTarget);
expect(fs.statSync(path.join(backupConfig.backupFolder, 'snapshot/box.tar.gz')).nlink).to.be(2); // hard linked to a rotated backup
expect(fs.statSync(path.join(backupConfig.backupFolder, `${result.remotePath}.tar.gz`)).nlink).to.be(2);
@@ -81,7 +82,7 @@ describe('backuptask', function () {
return;
}
const result = await createBackup();
const result = await createBackup(defaultBackupTarget);
expect(fs.statSync(path.join(backupConfig.backupFolder, 'snapshot/box.tar.gz')).nlink).to.be(2); // hard linked to a rotated backup
expect(fs.statSync(path.join(backupConfig.backupFolder, `${result.remotePath}.tar.gz`)).nlink).to.be(2); // hard linked to new backup
expect(fs.statSync(path.join(backupConfig.backupFolder, `${backupInfo1.remotePath}.tar.gz`)).nlink).to.be(1); // not hard linked anymore
+1 -3
View File
@@ -186,8 +186,6 @@ exports = module.exports = {
user,
appstoreToken: 'atoken',
defaultBackupTarget: { id: null },
serverUrl: `http://localhost:${constants.PORT}`,
};
@@ -220,7 +218,7 @@ async function databaseSetup() {
await database._clear();
await appstore._setApiServerOrigin(exports.mockApiServerOrigin);
await dashboard._setLocation(constants.DASHBOARD_SUBDOMAIN, exports.dashboardDomain);
exports.defaultBackupTarget.id = await backupTargets._addDefaultTarget();
await backupTargets._addDefault();
}
async function domainSetup() {
+7 -7
View File
@@ -33,18 +33,18 @@ describe('Storage', function () {
let gTmpFolder;
const gBackupConfig = {
provider: 'filesystem',
key: 'key',
backupFolder: null,
format: 'tgz',
};
before(function (done) {
let defaultBackupTarget;
before(async function () {
gTmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), 'filesystem-storage-test_'));
gBackupConfig.backupFolder = path.join(gTmpFolder, 'backups/');
defaultBackupTarget = await backupTargets._getDefault();
done();
gBackupConfig.backupFolder = path.join(gTmpFolder, 'backups/');
});
after(function (done) {
@@ -54,12 +54,12 @@ describe('Storage', function () {
it('fails to set backup storage for bad folder', async function () {
const tmp = Object.assign({}, gBackupConfig, { backupFolder: '/root/oof' });
const [error] = await safe(backupTargets.setStorage(tmp));
const [error] = await safe(backupTargets.setConfig(defaultBackupTarget, tmp));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('succeeds to set backup storage', async function () {
await backupTargets.setStorage(gBackupConfig);
await backupTargets.setConfig(defaultBackupTarget, gBackupConfig);
expect(fs.existsSync(path.join(gBackupConfig.backupFolder, 'snapshot'))).to.be(true); // auto-created
});