517 lines
19 KiB
JavaScript
517 lines
19 KiB
JavaScript
'use strict';
|
|
|
|
exports = module.exports = {
|
|
get,
|
|
getByIdentifierAndStatePaged,
|
|
getByTypePaged,
|
|
add,
|
|
update,
|
|
setState,
|
|
list,
|
|
del,
|
|
|
|
startBackupTask,
|
|
|
|
startCleanupTask,
|
|
cleanupCacheFilesSync,
|
|
|
|
removePrivateFields,
|
|
|
|
generateEncryptionKeysSync,
|
|
|
|
getSnapshotInfo,
|
|
setSnapshotInfo,
|
|
|
|
validatePolicy,
|
|
validateEncryptionPassword,
|
|
testStorage,
|
|
validateFormat,
|
|
|
|
getPolicy,
|
|
setPolicy,
|
|
|
|
getConfig,
|
|
setConfig,
|
|
setStorage,
|
|
setLimits,
|
|
|
|
getRootPath,
|
|
setupStorage,
|
|
|
|
remount,
|
|
getMountStatus,
|
|
|
|
BACKUP_IDENTIFIER_BOX: 'box',
|
|
BACKUP_IDENTIFIER_MAIL: 'mail',
|
|
|
|
BACKUP_TYPE_APP: 'app',
|
|
BACKUP_TYPE_BOX: 'box',
|
|
BACKUP_TYPE_MAIL: 'mail',
|
|
|
|
BACKUP_STATE_NORMAL: 'normal', // should rename to created to avoid listing in UI?
|
|
BACKUP_STATE_CREATING: 'creating',
|
|
BACKUP_STATE_ERROR: 'error',
|
|
};
|
|
|
|
const assert = require('assert'),
|
|
BoxError = require('./boxerror.js'),
|
|
constants = require('./constants.js'),
|
|
cron = require('./cron.js'),
|
|
CronJob = require('cron').CronJob,
|
|
crypto = require('crypto'),
|
|
database = require('./database.js'),
|
|
debug = require('debug')('box:backups'),
|
|
eventlog = require('./eventlog.js'),
|
|
hat = require('./hat.js'),
|
|
locker = require('./locker.js'),
|
|
mounts = require('./mounts.js'),
|
|
path = require('path'),
|
|
paths = require('./paths.js'),
|
|
safe = require('safetydance'),
|
|
settings = require('./settings.js'),
|
|
storage = require('./storage.js'),
|
|
tasks = require('./tasks.js'),
|
|
_ = require('underscore');
|
|
|
|
const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOnJson', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ];
|
|
|
|
function postProcess(result) {
|
|
assert.strictEqual(typeof result, 'object');
|
|
|
|
result.dependsOn = result.dependsOnJson ? safe.JSON.parse(result.dependsOnJson) : [];
|
|
delete result.dependsOnJson;
|
|
|
|
result.manifest = result.manifestJson ? safe.JSON.parse(result.manifestJson) : null;
|
|
delete result.manifestJson;
|
|
|
|
return result;
|
|
}
|
|
|
|
function removePrivateFields(backupConfig) {
|
|
assert.strictEqual(typeof backupConfig, 'object');
|
|
if (backupConfig.encryption) {
|
|
delete backupConfig.encryption;
|
|
backupConfig.password = constants.SECRET_PLACEHOLDER;
|
|
}
|
|
delete backupConfig.rootPath;
|
|
return storage.api(backupConfig.provider).removePrivateFields(backupConfig);
|
|
}
|
|
|
|
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
|
|
function generateEncryptionKeysSync(password) {
|
|
assert.strictEqual(typeof password, 'string');
|
|
|
|
const aesKeys = crypto.scryptSync(password, Buffer.from('CLOUDRONSCRYPTSALT', 'utf8'), 128);
|
|
return {
|
|
dataKey: aesKeys.slice(0, 32).toString('hex'),
|
|
dataHmacKey: aesKeys.slice(32, 64).toString('hex'),
|
|
filenameKey: aesKeys.slice(64, 96).toString('hex'),
|
|
filenameHmacKey: aesKeys.slice(96).toString('hex')
|
|
};
|
|
}
|
|
|
|
async function add(data) {
|
|
assert(data && typeof data === 'object');
|
|
assert.strictEqual(typeof data.remotePath, 'string');
|
|
assert(data.encryptionVersion === null || typeof data.encryptionVersion === 'number');
|
|
assert.strictEqual(typeof data.packageVersion, 'string');
|
|
assert.strictEqual(typeof data.type, 'string');
|
|
assert.strictEqual(typeof data.identifier, 'string');
|
|
assert.strictEqual(typeof data.state, 'string');
|
|
assert(Array.isArray(data.dependsOn));
|
|
assert.strictEqual(typeof data.manifest, 'object');
|
|
assert.strictEqual(typeof data.format, 'string');
|
|
assert.strictEqual(typeof data.preserveSecs, 'number');
|
|
|
|
const creationTime = data.creationTime || new Date(); // allow tests to set the time
|
|
const manifestJson = JSON.stringify(data.manifest);
|
|
const prefixId = data.type === exports.BACKUP_TYPE_APP ? `${data.type}_${data.identifier}` : data.type; // type and identifier are same for other types
|
|
const id = `${prefixId}_v${data.packageVersion}_${hat(32)}`; // id is used by the UI to derive dependent packages. making this a UUID will require a lot of db querying
|
|
|
|
const [error] = await safe(database.query('INSERT INTO backups (id, remotePath, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOnJson, manifestJson, format, preserveSecs) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
|
[ id, data.remotePath, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, JSON.stringify(data.dependsOn), manifestJson, data.format, data.preserveSecs ]));
|
|
|
|
if (error && error.code === 'ER_DUP_ENTRY') throw new BoxError(BoxError.ALREADY_EXISTS, 'Backup already exists');
|
|
if (error) throw error;
|
|
|
|
return id;
|
|
}
|
|
|
|
async function getByIdentifierAndStatePaged(identifier, state, page, perPage) {
|
|
assert.strictEqual(typeof identifier, 'string');
|
|
assert.strictEqual(typeof state, 'string');
|
|
assert(typeof page === 'number' && page > 0);
|
|
assert(typeof perPage === 'number' && perPage > 0);
|
|
|
|
const results = await database.query(`SELECT ${BACKUPS_FIELDS} FROM backups WHERE identifier = ? AND state = ? ORDER BY creationTime DESC LIMIT ?,?`, [ identifier, state, (page-1)*perPage, perPage ]);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
async function get(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const result = await database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups WHERE id = ? ORDER BY creationTime DESC', [ id ]);
|
|
if (result.length === 0) return null;
|
|
|
|
return postProcess(result[0]);
|
|
}
|
|
|
|
async function getByTypePaged(type, page, perPage) {
|
|
assert.strictEqual(typeof type, 'string');
|
|
assert(typeof page === 'number' && page > 0);
|
|
assert(typeof perPage === 'number' && perPage > 0);
|
|
|
|
const results = await database.query(`SELECT ${BACKUPS_FIELDS} FROM backups WHERE type = ? ORDER BY creationTime DESC LIMIT ?,?`, [ type, (page-1)*perPage, perPage ]);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
function validateLabel(label) {
|
|
assert.strictEqual(typeof label, 'string');
|
|
|
|
if (label.length >= 200) return new BoxError(BoxError.BAD_FIELD, 'label too long');
|
|
if (/[^a-zA-Z0-9._() -]/.test(label)) return new BoxError(BoxError.BAD_FIELD, 'label can only contain alphanumerals, space, dot, hyphen, brackets or underscore');
|
|
|
|
return null;
|
|
}
|
|
|
|
async function validatePolicy(policy) {
|
|
assert.strictEqual(typeof policy, 'object');
|
|
|
|
const job = safe.safeCall(function () { return new CronJob(policy.schedule); });
|
|
if (!job) return new BoxError(BoxError.BAD_FIELD, 'Invalid schedule pattern');
|
|
|
|
const retention = policy.retention;
|
|
if (!retention) return new BoxError(BoxError.BAD_FIELD, 'retention is required');
|
|
if (!['keepWithinSecs','keepDaily','keepWeekly','keepMonthly','keepYearly'].find(k => !!retention[k])) return new BoxError(BoxError.BAD_FIELD, 'retention properties missing');
|
|
if ('keepWithinSecs' in retention && typeof retention.keepWithinSecs !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepWithinSecs must be a number');
|
|
if ('keepDaily' in retention && typeof retention.keepDaily !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepDaily must be a number');
|
|
if ('keepWeekly' in retention && typeof retention.keepWeekly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepWeekly must be a number');
|
|
if ('keepMonthly' in retention && typeof retention.keepMonthly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepMonthly must be a number');
|
|
if ('keepYearly' in retention && typeof retention.keepYearly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'retention.keepYearly must be a number');
|
|
}
|
|
|
|
// this is called by REST API
|
|
async function update(id, data) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
assert.strictEqual(typeof data, 'object');
|
|
|
|
let error;
|
|
if ('label' in data) {
|
|
error = validateLabel(data.label);
|
|
if (error) throw error;
|
|
}
|
|
|
|
const fields = [], values = [];
|
|
for (const p in data) {
|
|
if (p === 'label' || p === 'preserveSecs') {
|
|
fields.push(p + ' = ?');
|
|
values.push(data[p]);
|
|
}
|
|
}
|
|
values.push(id);
|
|
|
|
const backup = await get(id);
|
|
if (backup === null) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
|
|
const result = await database.query('UPDATE backups SET ' + fields.join(', ') + ' WHERE id = ?', values);
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
|
|
if ('preserveSecs' in data) {
|
|
// update the dependancies
|
|
for (const depId of backup.dependsOn) {
|
|
await database.query('UPDATE backups SET preserveSecs=? WHERE id = ?', [ data.preserveSecs, depId]);
|
|
}
|
|
}
|
|
}
|
|
|
|
async function setState(id, state) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
assert.strictEqual(typeof state, 'string');
|
|
|
|
const result = await database.query('UPDATE backups SET state = ? WHERE id = ?', [state, id]);
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
}
|
|
|
|
async function startBackupTask(auditSource) {
|
|
let error = locker.lock(locker.OP_FULL_BACKUP);
|
|
if (error) throw new BoxError(BoxError.BAD_STATE, `Cannot backup now: ${error.message}`);
|
|
|
|
const backupConfig = await getConfig();
|
|
|
|
const memoryLimit = backupConfig.limits?.memoryLimit ? Math.max(backupConfig.limits.memoryLimit/1024/1024, 1024) : 1024;
|
|
|
|
const taskId = await tasks.add(tasks.TASK_BACKUP, [ { /* options */ } ]);
|
|
|
|
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId });
|
|
|
|
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit }, async function (error, backupId) {
|
|
locker.unlock(locker.OP_FULL_BACKUP);
|
|
|
|
const errorMessage = error ? error.message : '';
|
|
const timedOut = error ? error.code === tasks.ETIMEOUT : false;
|
|
|
|
const backup = backupId ? await get(backupId) : null;
|
|
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage, timedOut, backupId, remotePath: backup?.remotePath }), { debug });
|
|
});
|
|
|
|
return taskId;
|
|
}
|
|
|
|
async function list(page, perPage) {
|
|
assert(typeof page === 'number' && page > 0);
|
|
assert(typeof perPage === 'number' && perPage > 0);
|
|
|
|
const results = await database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups ORDER BY creationTime DESC LIMIT ?,?', [ (page-1)*perPage, perPage ]);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
async function del(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const result = await database.query('DELETE FROM backups WHERE id=?', [ id ]);
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
}
|
|
|
|
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
|
|
function cleanupCacheFilesSync() {
|
|
const files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR));
|
|
if (!files) return;
|
|
|
|
files
|
|
.filter(function (f) { return f.endsWith('.sync.cache'); })
|
|
.forEach(function (f) {
|
|
safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, f));
|
|
});
|
|
}
|
|
|
|
function getSnapshotInfo(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8');
|
|
const info = safe.JSON.parse(contents);
|
|
if (!info) return { };
|
|
return info[id] || { };
|
|
}
|
|
|
|
// keeps track of contents of the snapshot directory. this provides a way to clean up backups of uninstalled apps
|
|
async function setSnapshotInfo(id, info) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
assert.strictEqual(typeof info, 'object');
|
|
|
|
const contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8');
|
|
const data = safe.JSON.parse(contents) || { };
|
|
if (info) data[id] = info; else delete data[id];
|
|
if (!safe.fs.writeFileSync(paths.SNAPSHOT_INFO_FILE, JSON.stringify(data, null, 4), 'utf8')) {
|
|
throw new BoxError(BoxError.FS_ERROR, safe.error.message);
|
|
}
|
|
}
|
|
|
|
async function startCleanupTask(auditSource) {
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const taskId = await tasks.add(tasks.TASK_CLEAN_BACKUPS, []);
|
|
|
|
tasks.startTask(taskId, {}, async (error, result) => { // result is { removedBoxBackupPaths, removedAppBackupPaths, removedMailBackupPaths, missingBackupPaths }
|
|
await safe(eventlog.add(eventlog.ACTION_BACKUP_CLEANUP_FINISH, auditSource, {
|
|
taskId,
|
|
errorMessage: error ? error.message : null,
|
|
removedBoxBackupPaths: result ? result.removedBoxBackupPaths : [],
|
|
removedMailBackupPaths: result ? result.removedMailBackupPaths : [],
|
|
removedAppBackupPaths: result ? result.removedAppBackupPaths : [],
|
|
missingBackupPaths: result ? result.missingBackupPaths : []
|
|
}), { debug });
|
|
});
|
|
|
|
return taskId;
|
|
}
|
|
|
|
async function testStorage(storageConfig) {
|
|
assert.strictEqual(typeof storageConfig, 'object');
|
|
|
|
const func = storage.api(storageConfig.provider);
|
|
if (!func) return new BoxError(BoxError.BAD_FIELD, 'unknown storage provider');
|
|
|
|
await storage.api(storageConfig.provider).testConfig(storageConfig);
|
|
}
|
|
|
|
async function validateEncryptionPassword(password) {
|
|
assert.strictEqual(typeof password, 'string');
|
|
|
|
if (password.length < 8) return new BoxError(BoxError.BAD_FIELD, 'password must be atleast 8 characters');
|
|
}
|
|
|
|
function managedBackupMountObject(backupConfig) {
|
|
assert(mounts.isManagedProvider(backupConfig.provider));
|
|
|
|
return {
|
|
name: 'backup',
|
|
hostPath: paths.MANAGED_BACKUP_MOUNT_DIR,
|
|
mountType: backupConfig.provider,
|
|
mountOptions: backupConfig.mountOptions
|
|
};
|
|
}
|
|
|
|
async function remount(auditSource) {
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const backupConfig = await getConfig();
|
|
|
|
if (mounts.isManagedProvider(backupConfig.provider)) {
|
|
await mounts.remount(managedBackupMountObject(backupConfig));
|
|
}
|
|
}
|
|
|
|
async function getMountStatus() {
|
|
const backupConfig = await getConfig();
|
|
|
|
let hostPath;
|
|
if (mounts.isManagedProvider(backupConfig.provider)) {
|
|
hostPath = paths.MANAGED_BACKUP_MOUNT_DIR;
|
|
} else if (backupConfig.provider === 'mountpoint') {
|
|
hostPath = backupConfig.mountPoint;
|
|
} else if (backupConfig.provider === 'filesystem') {
|
|
hostPath = backupConfig.backupFolder;
|
|
} else {
|
|
throw new BoxError(BoxError.BAD_STATE, 'Backup location is not a mount');
|
|
}
|
|
|
|
return await mounts.getStatus(backupConfig.provider, hostPath); // { state, message }
|
|
}
|
|
|
|
async function getPolicy() {
|
|
const result = await settings.getJson(settings.BACKUP_POLICY_KEY);
|
|
return result || {
|
|
retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, // 2 days
|
|
schedule: '00 00 23 * * *' // every day at 11pm
|
|
};
|
|
}
|
|
|
|
async function setPolicy(policy) {
|
|
assert.strictEqual(typeof policy, 'object');
|
|
|
|
const error = await validatePolicy(policy);
|
|
if (error) throw error;
|
|
|
|
await settings.setJson(settings.BACKUP_POLICY_KEY, policy);
|
|
await cron.handleBackupPolicyChanged(policy);
|
|
}
|
|
|
|
function getRootPath(storageConfig, mountPath) {
|
|
assert.strictEqual(typeof storageConfig, 'object');
|
|
assert.strictEqual(typeof mountPath, 'string');
|
|
|
|
if (mounts.isManagedProvider(storageConfig.provider)) {
|
|
return path.join(mountPath, storageConfig.prefix);
|
|
} else if (storageConfig.provider === 'mountpoint') {
|
|
return path.join(storageConfig.mountPoint, storageConfig.prefix);
|
|
} else if (storageConfig.provider === 'filesystem') {
|
|
return storageConfig.backupFolder;
|
|
} else {
|
|
return storageConfig.prefix;
|
|
}
|
|
}
|
|
|
|
async function getConfig() {
|
|
const result = await settings.getJson(settings.BACKUP_STORAGE_KEY) || { provider: 'filesystem', backupFolder: paths.DEFAULT_BACKUP_DIR, format: 'tgz', encryption: null };
|
|
const limits = await settings.getJson(settings.BACKUP_LIMITS_KEY);
|
|
if (limits) result.limits = limits;
|
|
result.rootPath = getRootPath(result, paths.MANAGED_BACKUP_MOUNT_DIR); // note: rootPath will be dynamic for managed mount providers during app import
|
|
return result;
|
|
}
|
|
|
|
async function setConfig(backupConfig) {
|
|
assert.strictEqual(typeof backupConfig, 'object');
|
|
|
|
await settings.setJson(settings.BACKUP_STORAGE_KEY, _.omit(backupConfig, 'limits'));
|
|
await settings.setJson(settings.BACKUP_LIMITS_KEY, backupConfig.limits || null);
|
|
}
|
|
|
|
async function setLimits(limits) {
|
|
assert.strictEqual(typeof limits, 'object');
|
|
|
|
await settings.setJson(settings.BACKUP_LIMITS_KEY, limits);
|
|
}
|
|
|
|
function validateFormat(format) {
|
|
assert.strictEqual(typeof format, 'string');
|
|
|
|
if (format === 'tgz' || format == 'rsync') return null;
|
|
|
|
return new BoxError(BoxError.BAD_FIELD, 'Invalid backup format');
|
|
}
|
|
|
|
async function setupStorage(storageConfig, hostPath) {
|
|
assert.strictEqual(typeof storageConfig, 'object');
|
|
assert.strictEqual(typeof hostPath, 'string');
|
|
|
|
if (!mounts.isManagedProvider(storageConfig.provider)) return null;
|
|
|
|
const error = mounts.validateMountOptions(storageConfig.provider, storageConfig.mountOptions);
|
|
if (error) throw error;
|
|
|
|
debug(`setupStorage: setting up mount at ${hostPath} with ${storageConfig.provider}`);
|
|
|
|
const newMount = {
|
|
name: path.basename(hostPath),
|
|
hostPath: hostPath,
|
|
mountType: storageConfig.provider,
|
|
mountOptions: storageConfig.mountOptions
|
|
};
|
|
|
|
await mounts.tryAddMount(newMount, { timeout: 10 }); // 10 seconds
|
|
|
|
return newMount;
|
|
}
|
|
|
|
async function setStorage(storageConfig) {
|
|
assert.strictEqual(typeof storageConfig, 'object');
|
|
|
|
const oldConfig = await getConfig();
|
|
|
|
if (storageConfig.provider === oldConfig.provider) storage.api(storageConfig.provider).injectPrivateFields(storageConfig, oldConfig);
|
|
|
|
const foratmError = validateFormat(storageConfig.format);
|
|
if (foratmError) throw foratmError;
|
|
|
|
debug('setStorage: validating new storage configuration');
|
|
const rootPath = getRootPath(storageConfig, '/mnt/backup-storage-validation');
|
|
const testStorageConfig = Object.assign({ rootPath }, storageConfig);
|
|
const testMountObject = await setupStorage(testStorageConfig, '/mnt/backup-storage-validation');
|
|
const testStorageError = await testStorage(testStorageConfig);
|
|
if (testMountObject) await mounts.removeMount(testMountObject);
|
|
if (testStorageError) throw testStorageError;
|
|
|
|
debug('setStorage: removing old storage configuration');
|
|
if (mounts.isManagedProvider(oldConfig.provider)) await safe(mounts.removeMount(managedBackupMountObject(oldConfig)));
|
|
|
|
debug('setStorage: setting up new storage configuration');
|
|
await setupStorage(storageConfig, paths.MANAGED_BACKUP_MOUNT_DIR);
|
|
|
|
storageConfig.encryption = null;
|
|
if ('password' in storageConfig) { // user set password
|
|
if (storageConfig.password === constants.SECRET_PLACEHOLDER) {
|
|
storageConfig.encryption = oldConfig.encryption || null;
|
|
} else {
|
|
const encryptionPasswordError = await validateEncryptionPassword(storageConfig.password);
|
|
if (encryptionPasswordError) throw encryptionPasswordError;
|
|
|
|
storageConfig.encryption = generateEncryptionKeysSync(storageConfig.password);
|
|
}
|
|
delete storageConfig.password;
|
|
}
|
|
|
|
debug('setBackupConfig: clearing backup cache');
|
|
cleanupCacheFilesSync();
|
|
|
|
await settings.setJson(settings.BACKUP_STORAGE_KEY, storageConfig);
|
|
}
|