One idea was to compute this at cleanup time, but this has two problems: * the UI won't reflect this value. can be good or bad * the cleaner has no easy way to find out the "parent". I guess we should change our data structure, if we want to go down this route...
389 lines
15 KiB
JavaScript
389 lines
15 KiB
JavaScript
'use strict';
|
|
|
|
exports = module.exports = {
|
|
get,
|
|
getByIdentifierAndStatePaged,
|
|
getByTypePaged,
|
|
add,
|
|
update,
|
|
setState,
|
|
list,
|
|
del,
|
|
|
|
startBackupTask,
|
|
|
|
startCleanupTask,
|
|
cleanupCacheFilesSync,
|
|
|
|
injectPrivateFields,
|
|
removePrivateFields,
|
|
|
|
configureCollectd,
|
|
|
|
generateEncryptionKeysSync,
|
|
|
|
getSnapshotInfo,
|
|
setSnapshotInfo,
|
|
|
|
testConfig,
|
|
testProviderConfig,
|
|
|
|
remount,
|
|
|
|
BACKUP_IDENTIFIER_BOX: 'box',
|
|
BACKUP_IDENTIFIER_MAIL: 'mail',
|
|
|
|
BACKUP_TYPE_APP: 'app',
|
|
BACKUP_TYPE_BOX: 'box',
|
|
BACKUP_TYPE_MAIL: 'mail',
|
|
|
|
BACKUP_STATE_NORMAL: 'normal', // should rename to created to avoid listing in UI?
|
|
BACKUP_STATE_CREATING: 'creating',
|
|
BACKUP_STATE_ERROR: 'error',
|
|
};
|
|
|
|
const assert = require('assert'),
|
|
BoxError = require('./boxerror.js'),
|
|
collectd = require('./collectd.js'),
|
|
constants = require('./constants.js'),
|
|
CronJob = require('cron').CronJob,
|
|
crypto = require('crypto'),
|
|
database = require('./database.js'),
|
|
debug = require('debug')('box:backups'),
|
|
ejs = require('ejs'),
|
|
eventlog = require('./eventlog.js'),
|
|
fs = require('fs'),
|
|
locker = require('./locker.js'),
|
|
path = require('path'),
|
|
paths = require('./paths.js'),
|
|
safe = require('safetydance'),
|
|
settings = require('./settings.js'),
|
|
storage = require('./storage.js'),
|
|
tasks = require('./tasks.js'),
|
|
util = require('util'),
|
|
uuid = require('uuid');
|
|
|
|
const COLLECTD_CONFIG_EJS = fs.readFileSync(__dirname + '/collectd/cloudron-backup.ejs', { encoding: 'utf8' });
|
|
|
|
const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOnJson', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ];
|
|
|
|
// helper until all storage providers have been ported
|
|
function maybePromisify(func) {
|
|
if (util.types.isAsyncFunction(func)) return func;
|
|
return util.promisify(func);
|
|
}
|
|
|
|
function postProcess(result) {
|
|
assert.strictEqual(typeof result, 'object');
|
|
|
|
result.dependsOn = result.dependsOnJson ? safe.JSON.parse(result.dependsOnJson) : [];
|
|
delete result.dependsOnJson;
|
|
|
|
result.manifest = result.manifestJson ? safe.JSON.parse(result.manifestJson) : null;
|
|
delete result.manifestJson;
|
|
|
|
return result;
|
|
}
|
|
|
|
function injectPrivateFields(newConfig, currentConfig) {
|
|
if ('password' in newConfig) {
|
|
if (newConfig.password === constants.SECRET_PLACEHOLDER) {
|
|
delete newConfig.password;
|
|
}
|
|
newConfig.encryption = currentConfig.encryption || null;
|
|
} else {
|
|
newConfig.encryption = null;
|
|
}
|
|
if (newConfig.provider === currentConfig.provider) storage.api(newConfig.provider).injectPrivateFields(newConfig, currentConfig);
|
|
}
|
|
|
|
function removePrivateFields(backupConfig) {
|
|
assert.strictEqual(typeof backupConfig, 'object');
|
|
if (backupConfig.encryption) {
|
|
delete backupConfig.encryption;
|
|
backupConfig.password = constants.SECRET_PLACEHOLDER;
|
|
}
|
|
return storage.api(backupConfig.provider).removePrivateFields(backupConfig);
|
|
}
|
|
|
|
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
|
|
function generateEncryptionKeysSync(password) {
|
|
assert.strictEqual(typeof password, 'string');
|
|
|
|
const aesKeys = crypto.scryptSync(password, Buffer.from('CLOUDRONSCRYPTSALT', 'utf8'), 128);
|
|
return {
|
|
dataKey: aesKeys.slice(0, 32).toString('hex'),
|
|
dataHmacKey: aesKeys.slice(32, 64).toString('hex'),
|
|
filenameKey: aesKeys.slice(64, 96).toString('hex'),
|
|
filenameHmacKey: aesKeys.slice(96).toString('hex')
|
|
};
|
|
}
|
|
|
|
async function add(data) {
|
|
assert(data && typeof data === 'object');
|
|
assert.strictEqual(typeof data.remotePath, 'string');
|
|
assert(data.encryptionVersion === null || typeof data.encryptionVersion === 'number');
|
|
assert.strictEqual(typeof data.packageVersion, 'string');
|
|
assert.strictEqual(typeof data.type, 'string');
|
|
assert.strictEqual(typeof data.identifier, 'string');
|
|
assert.strictEqual(typeof data.state, 'string');
|
|
assert(Array.isArray(data.dependsOn));
|
|
assert.strictEqual(typeof data.manifest, 'object');
|
|
assert.strictEqual(typeof data.format, 'string');
|
|
assert.strictEqual(typeof data.preserveSecs, 'number');
|
|
|
|
const creationTime = data.creationTime || new Date(); // allow tests to set the time
|
|
const manifestJson = JSON.stringify(data.manifest);
|
|
const id = 'bid-' + uuid.v4();
|
|
|
|
const [error] = await safe(database.query('INSERT INTO backups (id, remotePath, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOnJson, manifestJson, format, preserveSecs) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
|
[ id, data.remotePath, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, JSON.stringify(data.dependsOn), manifestJson, data.format, data.preserveSecs ]));
|
|
|
|
if (error && error.code === 'ER_DUP_ENTRY') throw new BoxError(BoxError.ALREADY_EXISTS, 'Backup already exists');
|
|
if (error) throw error;
|
|
|
|
return id;
|
|
}
|
|
|
|
async function getByIdentifierAndStatePaged(identifier, state, page, perPage) {
|
|
assert.strictEqual(typeof identifier, 'string');
|
|
assert.strictEqual(typeof state, 'string');
|
|
assert(typeof page === 'number' && page > 0);
|
|
assert(typeof perPage === 'number' && perPage > 0);
|
|
|
|
const results = await database.query(`SELECT ${BACKUPS_FIELDS} FROM backups WHERE identifier = ? AND state = ? ORDER BY creationTime DESC LIMIT ?,?`, [ identifier, state, (page-1)*perPage, perPage ]);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
async function get(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const result = await database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups WHERE id = ? ORDER BY creationTime DESC', [ id ]);
|
|
if (result.length === 0) return null;
|
|
|
|
return postProcess(result[0]);
|
|
}
|
|
|
|
async function getByTypePaged(type, page, perPage) {
|
|
assert.strictEqual(typeof type, 'string');
|
|
assert(typeof page === 'number' && page > 0);
|
|
assert(typeof perPage === 'number' && perPage > 0);
|
|
|
|
const results = await database.query(`SELECT ${BACKUPS_FIELDS} FROM backups WHERE type = ? ORDER BY creationTime DESC LIMIT ?,?`, [ type, (page-1)*perPage, perPage ]);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
function validateLabel(label) {
|
|
assert.strictEqual(typeof label, 'string');
|
|
|
|
if (label.length >= 200) return new BoxError(BoxError.BAD_FIELD, 'label too long');
|
|
if (/[^a-zA-Z0-9._()-]/.test(label)) return new BoxError(BoxError.BAD_FIELD, 'label can only contain alphanumerals, dot, hyphen, brackets or underscore');
|
|
|
|
return null;
|
|
}
|
|
|
|
// this is called by REST API
|
|
async function update(id, data) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
assert.strictEqual(typeof data, 'object');
|
|
|
|
let error;
|
|
if ('label' in data) {
|
|
error = validateLabel(data.label);
|
|
if (error) throw error;
|
|
}
|
|
|
|
const fields = [], values = [];
|
|
for (const p in data) {
|
|
if (p === 'label' || p === 'preserveSecs') {
|
|
fields.push(p + ' = ?');
|
|
values.push(data[p]);
|
|
}
|
|
}
|
|
values.push(id);
|
|
|
|
const backup = await get(id);
|
|
if (backup === null) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
|
|
const result = await database.query('UPDATE backups SET ' + fields.join(', ') + ' WHERE id = ?', values);
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
|
|
if ('preserveSecs' in data) {
|
|
// update the dependancies
|
|
for (const depId of backup.dependsOn) {
|
|
await database.query('UPDATE backups SET preserveSecs=? WHERE id = ?', [ data.preserveSecs, depId]);
|
|
}
|
|
}
|
|
}
|
|
|
|
async function setState(id, state) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
assert.strictEqual(typeof state, 'string');
|
|
|
|
const result = await database.query('UPDATE backups SET state = ? WHERE id = ?', [state, id]);
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
}
|
|
|
|
async function startBackupTask(auditSource) {
|
|
let error = locker.lock(locker.OP_FULL_BACKUP);
|
|
if (error) throw new BoxError(BoxError.BAD_STATE, `Cannot backup now: ${error.message}`);
|
|
|
|
const backupConfig = await settings.getBackupConfig();
|
|
|
|
const memoryLimit = 'memoryLimit' in backupConfig ? Math.max(backupConfig.memoryLimit/1024/1024, 800) : 800;
|
|
|
|
const taskId = await tasks.add(tasks.TASK_BACKUP, [ { /* options */ } ]);
|
|
|
|
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId });
|
|
|
|
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit }, async function (error, backupId) {
|
|
locker.unlock(locker.OP_FULL_BACKUP);
|
|
|
|
const errorMessage = error ? error.message : '';
|
|
const timedOut = error ? error.code === tasks.ETIMEOUT : false;
|
|
|
|
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage, timedOut, backupId }), { debug });
|
|
});
|
|
|
|
return taskId;
|
|
}
|
|
|
|
async function list(page, perPage) {
|
|
assert(typeof page === 'number' && page > 0);
|
|
assert(typeof perPage === 'number' && perPage > 0);
|
|
|
|
const results = await database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups ORDER BY creationTime DESC LIMIT ?,?', [ (page-1)*perPage, perPage ]);
|
|
|
|
results.forEach(function (result) { postProcess(result); });
|
|
|
|
return results;
|
|
}
|
|
|
|
async function del(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const result = await database.query('DELETE FROM backups WHERE id=?', [ id ]);
|
|
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
|
|
}
|
|
|
|
// this function is used in migrations - 20200512172301-settings-backup-encryption.js
|
|
function cleanupCacheFilesSync() {
|
|
const files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR));
|
|
if (!files) return;
|
|
|
|
files
|
|
.filter(function (f) { return f.endsWith('.sync.cache'); })
|
|
.forEach(function (f) {
|
|
safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, f));
|
|
});
|
|
}
|
|
|
|
function getSnapshotInfo(id) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
|
|
const contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8');
|
|
const info = safe.JSON.parse(contents);
|
|
if (!info) return { };
|
|
return info[id] || { };
|
|
}
|
|
|
|
// keeps track of contents of the snapshot directory. this provides a way to clean up backups of uninstalled apps
|
|
async function setSnapshotInfo(id, info) {
|
|
assert.strictEqual(typeof id, 'string');
|
|
assert.strictEqual(typeof info, 'object');
|
|
|
|
const contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8');
|
|
const data = safe.JSON.parse(contents) || { };
|
|
if (info) data[id] = info; else delete data[id];
|
|
if (!safe.fs.writeFileSync(paths.SNAPSHOT_INFO_FILE, JSON.stringify(data, null, 4), 'utf8')) {
|
|
throw new BoxError(BoxError.FS_ERROR, safe.error.message);
|
|
}
|
|
}
|
|
|
|
async function startCleanupTask(auditSource) {
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const taskId = await tasks.add(tasks.TASK_CLEAN_BACKUPS, []);
|
|
|
|
tasks.startTask(taskId, {}, async (error, result) => { // result is { removedBoxBackupPaths, removedAppBackupPaths, removedMailBackupPaths, missingBackupPaths }
|
|
await safe(eventlog.add(eventlog.ACTION_BACKUP_CLEANUP_FINISH, auditSource, {
|
|
taskId,
|
|
errorMessage: error ? error.message : null,
|
|
removedBoxBackupPaths: result ? result.removedBoxBackupPaths : [],
|
|
removedMailBackupPaths: result ? result.removedMailBackupPaths : [],
|
|
removedAppBackupPaths: result ? result.removedAppBackupPaths : [],
|
|
missingBackupPaths: result ? result.missingBackupPaths : []
|
|
}), { debug });
|
|
});
|
|
|
|
return taskId;
|
|
}
|
|
|
|
async function configureCollectd(backupConfig) {
|
|
assert.strictEqual(typeof backupConfig, 'object');
|
|
|
|
if (backupConfig.provider === 'filesystem') {
|
|
const collectdConf = ejs.render(COLLECTD_CONFIG_EJS, { backupDir: backupConfig.backupFolder });
|
|
await collectd.addProfile('cloudron-backup', collectdConf);
|
|
} else {
|
|
await collectd.removeProfile('cloudron-backup');
|
|
}
|
|
}
|
|
|
|
async function testConfig(backupConfig) {
|
|
assert.strictEqual(typeof backupConfig, 'object');
|
|
|
|
const func = storage.api(backupConfig.provider);
|
|
if (!func) return new BoxError(BoxError.BAD_FIELD, 'unknown storage provider');
|
|
|
|
if (backupConfig.format !== 'tgz' && backupConfig.format !== 'rsync') return new BoxError(BoxError.BAD_FIELD, 'unknown format');
|
|
|
|
const job = safe.safeCall(function () { return new CronJob(backupConfig.schedulePattern); });
|
|
if (!job) return new BoxError(BoxError.BAD_FIELD, 'Invalid schedule pattern');
|
|
|
|
if ('password' in backupConfig) {
|
|
if (typeof backupConfig.password !== 'string') return new BoxError(BoxError.BAD_FIELD, 'password must be a string');
|
|
if (backupConfig.password.length < 8) return new BoxError(BoxError.BAD_FIELD, 'password must be atleast 8 characters');
|
|
}
|
|
|
|
const policy = backupConfig.retentionPolicy;
|
|
if (!policy) return new BoxError(BoxError.BAD_FIELD, 'retentionPolicy is required');
|
|
if (!['keepWithinSecs','keepDaily','keepWeekly','keepMonthly','keepYearly'].find(k => !!policy[k])) return new BoxError(BoxError.BAD_FIELD, 'properties missing');
|
|
if ('keepWithinSecs' in policy && typeof policy.keepWithinSecs !== 'number') return new BoxError(BoxError.BAD_FIELD, 'keepWithinSecs must be a number');
|
|
if ('keepDaily' in policy && typeof policy.keepDaily !== 'number') return new BoxError(BoxError.BAD_FIELD, 'keepDaily must be a number');
|
|
if ('keepWeekly' in policy && typeof policy.keepWeekly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'keepWeekly must be a number');
|
|
if ('keepMonthly' in policy && typeof policy.keepMonthly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'keepMonthly must be a number');
|
|
if ('keepYearly' in policy && typeof policy.keepYearly !== 'number') return new BoxError(BoxError.BAD_FIELD, 'keepYearly must be a number');
|
|
|
|
const [error] = await safe(util.promisify(storage.api(backupConfig.provider).testConfig)(backupConfig));
|
|
return error;
|
|
}
|
|
|
|
// this skips password check since that policy is only at creation time
|
|
async function testProviderConfig(backupConfig) {
|
|
assert.strictEqual(typeof backupConfig, 'object');
|
|
|
|
const func = storage.api(backupConfig.provider);
|
|
if (!func) return new BoxError(BoxError.BAD_FIELD, 'unknown storage provider');
|
|
|
|
const [error] = await safe(util.promisify(storage.api(backupConfig.provider).testConfig)(backupConfig));
|
|
return error;
|
|
}
|
|
|
|
async function remount(auditSource) {
|
|
assert.strictEqual(typeof auditSource, 'object');
|
|
|
|
const backupConfig = await settings.getBackupConfig();
|
|
|
|
const func = storage.api(backupConfig.provider);
|
|
if (!func) throw new BoxError(BoxError.BAD_FIELD, 'unknown storage provider');
|
|
|
|
await maybePromisify(storage.api(backupConfig.provider).remount)(backupConfig);
|
|
}
|