'use strict'; exports = module.exports = { testConfig: testConfig, testProviderConfig: testProviderConfig, getByIdentifierAndStatePaged, get: get, startBackupTask: startBackupTask, restore: restore, backupApp: backupApp, downloadApp: downloadApp, backupBoxAndApps: backupBoxAndApps, upload: upload, startCleanupTask: startCleanupTask, cleanup: cleanup, cleanupCacheFilesSync: cleanupCacheFilesSync, injectPrivateFields: injectPrivateFields, removePrivateFields: removePrivateFields, checkConfiguration: checkConfiguration, configureCollectd: configureCollectd, generateEncryptionKeysSync: generateEncryptionKeysSync, BACKUP_IDENTIFIER_BOX: 'box', BACKUP_TYPE_APP: 'app', BACKUP_TYPE_BOX: 'box', BACKUP_STATE_NORMAL: 'normal', // should rename to created to avoid listing in UI? BACKUP_STATE_CREATING: 'creating', BACKUP_STATE_ERROR: 'error', // for testing _getBackupFilePath: getBackupFilePath, _restoreFsMetadata: restoreFsMetadata, _saveFsMetadata: saveFsMetadata, _applyBackupRetentionPolicy: applyBackupRetentionPolicy }; var addons = require('./addons.js'), apps = require('./apps.js'), async = require('async'), assert = require('assert'), backupdb = require('./backupdb.js'), BoxError = require('./boxerror.js'), collectd = require('./collectd.js'), constants = require('./constants.js'), CronJob = require('cron').CronJob, crypto = require('crypto'), database = require('./database.js'), DataLayout = require('./datalayout.js'), debug = require('debug')('box:backups'), ejs = require('ejs'), eventlog = require('./eventlog.js'), fs = require('fs'), locker = require('./locker.js'), moment = require('moment'), once = require('once'), path = require('path'), paths = require('./paths.js'), progressStream = require('progress-stream'), safe = require('safetydance'), shell = require('./shell.js'), settings = require('./settings.js'), syncer = require('./syncer.js'), tar = require('tar-fs'), tasks = require('./tasks.js'), TransformStream = require('stream').Transform, util = require('util'), zlib = require('zlib'), _ = require('underscore'); const BACKUP_UPLOAD_CMD = path.join(__dirname, 'scripts/backupupload.js'); const COLLECTD_CONFIG_EJS = fs.readFileSync(__dirname + '/collectd/cloudron-backup.ejs', { encoding: 'utf8' }); function debugApp(app) { assert(typeof app === 'object'); debug(app.fqdn + ' ' + util.format.apply(util, Array.prototype.slice.call(arguments, 1))); } // choose which storage backend we use for test purpose we use s3 function api(provider) { switch (provider) { case 'nfs': return require('./storage/filesystem.js'); case 'cifs': return require('./storage/filesystem.js'); case 'sshfs': return require('./storage/filesystem.js'); case 's3': return require('./storage/s3.js'); case 'gcs': return require('./storage/gcs.js'); case 'filesystem': return require('./storage/filesystem.js'); case 'minio': return require('./storage/s3.js'); case 's3-v4-compat': return require('./storage/s3.js'); case 'digitalocean-spaces': return require('./storage/s3.js'); case 'exoscale-sos': return require('./storage/s3.js'); case 'wasabi': return require('./storage/s3.js'); case 'scaleway-objectstorage': return require('./storage/s3.js'); case 'backblaze-b2': return require('./storage/s3.js'); case 'linode-objectstorage': return require('./storage/s3.js'); case 'ovh-objectstorage': return require('./storage/s3.js'); case 'noop': return require('./storage/noop.js'); default: return null; } } function injectPrivateFields(newConfig, currentConfig) { if ('password' in newConfig) { if (newConfig.password === constants.SECRET_PLACEHOLDER) { delete newConfig.password; } newConfig.encryption = currentConfig.encryption || null; } else { newConfig.encryption = null; } if (newConfig.provider === currentConfig.provider) api(newConfig.provider).injectPrivateFields(newConfig, currentConfig); } function removePrivateFields(backupConfig) { assert.strictEqual(typeof backupConfig, 'object'); if (backupConfig.encryption) { delete backupConfig.encryption; backupConfig.password = constants.SECRET_PLACEHOLDER; } return api(backupConfig.provider).removePrivateFields(backupConfig); } function testConfig(backupConfig, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof callback, 'function'); var func = api(backupConfig.provider); if (!func) return callback(new BoxError(BoxError.BAD_FIELD, 'unknown storage provider', { field: 'provider' })); if (backupConfig.format !== 'tgz' && backupConfig.format !== 'rsync') return callback(new BoxError(BoxError.BAD_FIELD, 'unknown format', { field: 'format' })); const job = safe.safeCall(function () { return new CronJob(backupConfig.schedulePattern); }); if (!job) return callback(new BoxError(BoxError.BAD_FIELD, 'Invalid schedule pattern', { field: 'schedulePattern' })); if ('password' in backupConfig) { if (typeof backupConfig.password !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'password must be a string', { field: 'password' })); if (backupConfig.password.length < 8) return callback(new BoxError(BoxError.BAD_FIELD, 'password must be atleast 8 characters', { field: 'password' })); } const policy = backupConfig.retentionPolicy; if (!policy) return callback(new BoxError(BoxError.BAD_FIELD, 'retentionPolicy is required', { field: 'retentionPolicy' })); if (!['keepWithinSecs','keepDaily','keepWeekly','keepMonthly','keepYearly'].find(k => !!policy[k])) return callback(new BoxError(BoxError.BAD_FIELD, 'properties missing', { field: 'retentionPolicy' })); if ('keepWithinSecs' in policy && typeof policy.keepWithinSecs !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepWithinSecs must be a number', { field: 'retentionPolicy' })); if ('keepDaily' in policy && typeof policy.keepDaily !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepDaily must be a number', { field: 'retentionPolicy' })); if ('keepWeekly' in policy && typeof policy.keepWeekly !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepWeekly must be a number', { field: 'retentionPolicy' })); if ('keepMonthly' in policy && typeof policy.keepMonthly !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepMonthly must be a number', { field: 'retentionPolicy' })); if ('keepYearly' in policy && typeof policy.keepYearly !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepYearly must be a number', { field: 'retentionPolicy' })); api(backupConfig.provider).testConfig(backupConfig, callback); } // this skips password check since that policy is only at creation time function testProviderConfig(backupConfig, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof callback, 'function'); var func = api(backupConfig.provider); if (!func) return callback(new BoxError(BoxError.BAD_FIELD, 'unknown storage provider', { field: 'provider' })); api(backupConfig.provider).testConfig(backupConfig, callback); } function generateEncryptionKeysSync(password) { assert.strictEqual(typeof password, 'string'); const aesKeys = crypto.scryptSync(password, Buffer.from('CLOUDRONSCRYPTSALT', 'utf8'), 128); return { dataKey: aesKeys.slice(0, 32).toString('hex'), dataHmacKey: aesKeys.slice(32, 64).toString('hex'), filenameKey: aesKeys.slice(64, 96).toString('hex'), filenameHmacKey: aesKeys.slice(96).toString('hex') }; } function getByIdentifierAndStatePaged(identifier, state, page, perPage, callback) { assert.strictEqual(typeof identifier, 'string'); assert.strictEqual(typeof state, 'string'); assert(typeof page === 'number' && page > 0); assert(typeof perPage === 'number' && perPage > 0); assert.strictEqual(typeof callback, 'function'); backupdb.getByIdentifierAndStatePaged(identifier, state, page, perPage, function (error, results) { if (error) return callback(error); callback(null, results); }); } function get(backupId, callback) { assert.strictEqual(typeof backupId, 'string'); assert.strictEqual(typeof callback, 'function'); backupdb.get(backupId, function (error, result) { if (error) return callback(error); callback(null, result); }); } // This is not part of the storage api, since we don't want to pull the "format" logistics into that function getBackupFilePath(backupConfig, backupId, format) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof backupId, 'string'); assert.strictEqual(typeof format, 'string'); const backupPath = api(backupConfig.provider).getBackupPath(backupConfig); if (format === 'tgz') { const fileType = backupConfig.encryption ? '.tar.gz.enc' : '.tar.gz'; return path.join(backupPath, backupId+fileType); } else { return path.join(backupPath, backupId); } } function encryptFilePath(filePath, encryption) { assert.strictEqual(typeof filePath, 'string'); assert.strictEqual(typeof encryption, 'object'); var encryptedParts = filePath.split('/').map(function (part) { let hmac = crypto.createHmac('sha256', Buffer.from(encryption.filenameHmacKey, 'hex')); const iv = hmac.update(part).digest().slice(0, 16); // iv has to be deterministic, for our sync (copy) logic to work const cipher = crypto.createCipheriv('aes-256-cbc', Buffer.from(encryption.filenameKey, 'hex'), iv); let crypt = cipher.update(part); crypt = Buffer.concat([ iv, crypt, cipher.final() ]); return crypt.toString('base64') // ensures path is valid .replace(/\//g, '-') // replace '/' of base64 since it conflicts with path separator .replace(/=/g,''); // strip trailing = padding. this is only needed if we concat base64 strings, which we don't }); return encryptedParts.join('/'); } function decryptFilePath(filePath, encryption) { assert.strictEqual(typeof filePath, 'string'); assert.strictEqual(typeof encryption, 'object'); let decryptedParts = []; for (let part of filePath.split('/')) { part = part + Array(part.length % 4).join('='); // add back = padding part = part.replace(/-/g, '/'); // replace with '/' try { const buffer = Buffer.from(part, 'base64'); const iv = buffer.slice(0, 16); let decrypt = crypto.createDecipheriv('aes-256-cbc', Buffer.from(encryption.filenameKey, 'hex'), iv); const plainText = decrypt.update(buffer.slice(16)); const plainTextString = Buffer.concat([ plainText, decrypt.final() ]).toString('utf8'); const hmac = crypto.createHmac('sha256', Buffer.from(encryption.filenameHmacKey, 'hex')); if (!hmac.update(plainTextString).digest().slice(0, 16).equals(iv)) return { error: new BoxError(BoxError.CRYPTO_ERROR, `mac error decrypting part ${part} of path ${filePath}`) }; decryptedParts.push(plainTextString); } catch (error) { debug(`Error decrypting part ${part} of path ${filePath}:`, error); return { error: new BoxError(BoxError.CRYPTO_ERROR, `Error decrypting part ${part} of path ${filePath}: ${error.message}`) }; } } return { result: decryptedParts.join('/') }; } class EncryptStream extends TransformStream { constructor(encryption) { super(); this._headerPushed = false; this._iv = crypto.randomBytes(16); this._cipher = crypto.createCipheriv('aes-256-cbc', Buffer.from(encryption.dataKey, 'hex'), this._iv); this._hmac = crypto.createHmac('sha256', Buffer.from(encryption.dataHmacKey, 'hex')); } pushHeaderIfNeeded() { if (!this._headerPushed) { const magic = Buffer.from('CBV2'); this.push(magic); this._hmac.update(magic); this.push(this._iv); this._hmac.update(this._iv); this._headerPushed = true; } } _transform(chunk, ignoredEncoding, callback) { this.pushHeaderIfNeeded(); try { const crypt = this._cipher.update(chunk); this._hmac.update(crypt); callback(null, crypt); } catch (error) { callback(error); } } _flush(callback) { try { this.pushHeaderIfNeeded(); // for 0-length files const crypt = this._cipher.final(); this.push(crypt); this._hmac.update(crypt); callback(null, this._hmac.digest()); // +32 bytes } catch (error) { callback(error); } } } class DecryptStream extends TransformStream { constructor(encryption) { super(); this._key = Buffer.from(encryption.dataKey, 'hex'); this._header = Buffer.alloc(0); this._decipher = null; this._hmac = crypto.createHmac('sha256', Buffer.from(encryption.dataHmacKey, 'hex')); this._buffer = Buffer.alloc(0); } _transform(chunk, ignoredEncoding, callback) { const needed = 20 - this._header.length; // 4 for magic, 16 for iv if (this._header.length !== 20) { // not gotten header yet this._header = Buffer.concat([this._header, chunk.slice(0, needed)]); if (this._header.length !== 20) return callback(); if (!this._header.slice(0, 4).equals(new Buffer.from('CBV2'))) return callback(new BoxError(BoxError.CRYPTO_ERROR, 'Invalid magic in header')); const iv = this._header.slice(4); this._decipher = crypto.createDecipheriv('aes-256-cbc', this._key, iv); this._hmac.update(this._header); } this._buffer = Buffer.concat([ this._buffer, chunk.slice(needed) ]); if (this._buffer.length < 32) return callback(); // hmac trailer length is 32 try { const cipherText = this._buffer.slice(0, -32); this._hmac.update(cipherText); const plainText = this._decipher.update(cipherText); this._buffer = this._buffer.slice(-32); callback(null, plainText); } catch (error) { callback(error); } } _flush (callback) { if (this._buffer.length !== 32) return callback(new BoxError(BoxError.CRYPTO_ERROR, 'Invalid password or tampered file (not enough data)')); try { if (!this._hmac.digest().equals(this._buffer)) return callback(new BoxError(BoxError.CRYPTO_ERROR, 'Invalid password or tampered file (mac mismatch)')); const plainText = this._decipher.final(); callback(null, plainText); } catch (error) { callback(error); } } } function createReadStream(sourceFile, encryption) { assert.strictEqual(typeof sourceFile, 'string'); assert.strictEqual(typeof encryption, 'object'); var stream = fs.createReadStream(sourceFile); var ps = progressStream({ time: 10000 }); // display a progress every 10 seconds stream.on('error', function (error) { debug(`createReadStream: read stream error at ${sourceFile}`, error); ps.emit('error', new BoxError(BoxError.FS_ERROR, `Error reading ${sourceFile}: ${error.message}`)); }); if (encryption) { let encryptStream = new EncryptStream(encryption); encryptStream.on('error', function (error) { debug(`createReadStream: encrypt stream error ${sourceFile}`, error); ps.emit('error', new BoxError(BoxError.CRYPTO_ERROR, `Encryption error at ${sourceFile}: ${error.message}`)); }); return stream.pipe(encryptStream).pipe(ps); } else { return stream.pipe(ps); } } function createWriteStream(destFile, encryption) { assert.strictEqual(typeof destFile, 'string'); assert.strictEqual(typeof encryption, 'object'); var stream = fs.createWriteStream(destFile); var ps = progressStream({ time: 10000 }); // display a progress every 10 seconds stream.on('error', function (error) { debug(`createWriteStream: write stream error ${destFile}`, error); ps.emit('error', new BoxError(BoxError.FS_ERROR, `Write error ${destFile}: ${error.message}`)); }); stream.on('finish', function () { debug('createWriteStream: done.'); // we use a separate event because ps is a through2 stream which emits 'finish' event indicating end of inStream and not write ps.emit('done'); }); if (encryption) { let decrypt = new DecryptStream(encryption); decrypt.on('error', function (error) { debug(`createWriteStream: decrypt stream error ${destFile}`, error); ps.emit('error', new BoxError(BoxError.CRYPTO_ERROR, `Decryption error at ${destFile}: ${error.message}`)); }); ps.pipe(decrypt).pipe(stream); } else { ps.pipe(stream); } return ps; } function tarPack(dataLayout, encryption, callback) { assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof encryption, 'object'); assert.strictEqual(typeof callback, 'function'); var pack = tar.pack('/', { dereference: false, // pack the symlink and not what it points to entries: dataLayout.localPaths(), ignoreStatError: (path, err) => { debug(`tarPack: error stat'ing ${path} - ${err.code}`); return err.code === 'ENOENT'; // ignore if file or dir got removed (probably some temporary file) }, map: function(header) { header.name = dataLayout.toRemotePath(header.name); // the tar pax format allows us to encode filenames > 100 and size > 8GB (see #640) // https://www.systutorials.com/docs/linux/man/5-star/ if (header.size > 8589934590 || header.name > 99) header.pax = { size: header.size }; return header; }, strict: false // do not error for unknown types (skip fifo, char/block devices) }); var gzip = zlib.createGzip({}); var ps = progressStream({ time: 10000 }); // emit 'progress' every 10 seconds pack.on('error', function (error) { debug('tarPack: tar stream error.', error); ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error.message)); }); gzip.on('error', function (error) { debug('tarPack: gzip stream error.', error); ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error.message)); }); if (encryption) { const encryptStream = new EncryptStream(encryption); encryptStream.on('error', function (error) { debug('tarPack: encrypt stream error.', error); ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error.message)); }); pack.pipe(gzip).pipe(encryptStream).pipe(ps); } else { pack.pipe(gzip).pipe(ps); } return callback(null, ps); } function sync(backupConfig, backupId, dataLayout, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof backupId, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); // the number here has to take into account the s3.upload partSize (which is 10MB). So 20=200MB const concurrency = backupConfig.syncConcurrency || (backupConfig.provider === 's3' ? 20 : 10); syncer.sync(dataLayout, function processTask(task, iteratorCallback) { debug('sync: processing task: %j', task); // the empty task.path is special to signify the directory const destPath = task.path && backupConfig.encryption ? encryptFilePath(task.path, backupConfig.encryption) : task.path; const backupFilePath = path.join(getBackupFilePath(backupConfig, backupId, backupConfig.format), destPath); if (task.operation === 'removedir') { debug(`Removing directory ${backupFilePath}`); return api(backupConfig.provider).removeDir(backupConfig, backupFilePath) .on('progress', (message) => progressCallback({ message })) .on('done', iteratorCallback); } else if (task.operation === 'remove') { debug(`Removing ${backupFilePath}`); return api(backupConfig.provider).remove(backupConfig, backupFilePath, iteratorCallback); } var retryCount = 0; async.retry({ times: 5, interval: 20000 }, function (retryCallback) { retryCallback = once(retryCallback); // protect again upload() erroring much later after read stream error ++retryCount; if (task.operation === 'add') { progressCallback({ message: `Adding ${task.path}` + (retryCount > 1 ? ` (Try ${retryCount})` : '') }); debug(`Adding ${task.path} position ${task.position} try ${retryCount}`); var stream = createReadStream(dataLayout.toLocalPath('./' + task.path), backupConfig.encryption); stream.on('error', function (error) { debug(`read stream error for ${task.path}: ${error.message}`); retryCallback(); }); // ignore error if file disappears stream.on('progress', function (progress) { const transferred = Math.round(progress.transferred/1024/1024), speed = Math.round(progress.speed/1024/1024); if (!transferred && !speed) return progressCallback({ message: `Uploading ${task.path}` }); // 0M@0MBps looks wrong progressCallback({ message: `Uploading ${task.path}: ${transferred}M@${speed}MBps` }); // 0M@0MBps looks wrong }); api(backupConfig.provider).upload(backupConfig, backupFilePath, stream, function (error) { debug(error ? `Error uploading ${task.path} try ${retryCount}: ${error.message}` : `Uploaded ${task.path}`); retryCallback(error); }); } }, iteratorCallback); }, concurrency, function (error) { if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error.message)); callback(); }); } // this is not part of 'snapshotting' because we need root access to traverse function saveFsMetadata(dataLayout, metadataFile, callback) { assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof metadataFile, 'string'); assert.strictEqual(typeof callback, 'function'); // contains paths prefixed with './' let metadata = { emptyDirs: [], execFiles: [] }; for (let lp of dataLayout.localPaths()) { var emptyDirs = safe.child_process.execSync(`find ${lp} -type d -empty\n`, { encoding: 'utf8' }); if (emptyDirs === null) return callback(safe.error); if (emptyDirs.length) metadata.emptyDirs = metadata.emptyDirs.concat(emptyDirs.trim().split('\n').map((ed) => dataLayout.toRemotePath(ed))); var execFiles = safe.child_process.execSync(`find ${lp} -type f -executable\n`, { encoding: 'utf8' }); if (execFiles === null) return callback(safe.error); if (execFiles.length) metadata.execFiles = metadata.execFiles.concat(execFiles.trim().split('\n').map((ef) => dataLayout.toRemotePath(ef))); } if (!safe.fs.writeFileSync(metadataFile, JSON.stringify(metadata, null, 4))) return callback(safe.error); callback(); } // this function is called via backupupload (since it needs root to traverse app's directory) function upload(backupId, format, dataLayoutString, progressCallback, callback) { assert.strictEqual(typeof backupId, 'string'); assert.strictEqual(typeof format, 'string'); assert.strictEqual(typeof dataLayoutString, 'string'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); debug(`upload: id ${backupId} format ${format} dataLayout ${dataLayoutString}`); const dataLayout = DataLayout.fromString(dataLayoutString); settings.getBackupConfig(function (error, backupConfig) { if (error) return callback(error); api(backupConfig.provider).checkPreconditions(backupConfig, dataLayout, function (error) { if (error) return callback(error); if (format === 'tgz') { async.retry({ times: 5, interval: 20000 }, function (retryCallback) { retryCallback = once(retryCallback); // protect again upload() erroring much later after tar stream error tarPack(dataLayout, backupConfig.encryption, function (error, tarStream) { if (error) return retryCallback(error); tarStream.on('progress', function (progress) { const transferred = Math.round(progress.transferred/1024/1024), speed = Math.round(progress.speed/1024/1024); if (!transferred && !speed) return progressCallback({ message: 'Uploading backup' }); // 0M@0MBps looks wrong progressCallback({ message: `Uploading backup ${transferred}M@${speed}MBps` }); }); tarStream.on('error', retryCallback); // already returns BoxError api(backupConfig.provider).upload(backupConfig, getBackupFilePath(backupConfig, backupId, format), tarStream, retryCallback); }); }, callback); } else { async.series([ saveFsMetadata.bind(null, dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`), sync.bind(null, backupConfig, backupId, dataLayout, progressCallback) ], callback); } }); }); } function tarExtract(inStream, dataLayout, encryption, callback) { assert.strictEqual(typeof inStream, 'object'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof encryption, 'object'); assert.strictEqual(typeof callback, 'function'); var gunzip = zlib.createGunzip({}); var ps = progressStream({ time: 10000 }); // display a progress every 10 seconds var extract = tar.extract('/', { map: function (header) { header.name = dataLayout.toLocalPath(header.name); return header; }, dmode: 500 // ensure directory is writable }); const emitError = once((error) => { inStream.destroy(); ps.emit('error', error); }); inStream.on('error', function (error) { debug('tarExtract: input stream error.', error); emitError(new BoxError(BoxError.EXTERNAL_ERROR, error.message)); }); gunzip.on('error', function (error) { debug('tarExtract: gunzip stream error.', error); emitError(new BoxError(BoxError.EXTERNAL_ERROR, error.message)); }); extract.on('error', function (error) { debug('tarExtract: extract stream error.', error); emitError(new BoxError(BoxError.EXTERNAL_ERROR, error.message)); }); extract.on('finish', function () { debug('tarExtract: done.'); // we use a separate event because ps is a through2 stream which emits 'finish' event indicating end of inStream and not extract ps.emit('done'); }); if (encryption) { let decrypt = new DecryptStream(encryption); decrypt.on('error', function (error) { debug('tarExtract: decrypt stream error.', error); emitError(new BoxError(BoxError.EXTERNAL_ERROR, `Failed to decrypt: ${error.message}`)); }); inStream.pipe(ps).pipe(decrypt).pipe(gunzip).pipe(extract); } else { inStream.pipe(ps).pipe(gunzip).pipe(extract); } callback(null, ps); } function restoreFsMetadata(dataLayout, metadataFile, callback) { assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof metadataFile, 'string'); assert.strictEqual(typeof callback, 'function'); debug(`Recreating empty directories in ${dataLayout.toString()}`); var metadataJson = safe.fs.readFileSync(metadataFile, 'utf8'); if (metadataJson === null) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Error loading fsmetadata.json:' + safe.error.message)); var metadata = safe.JSON.parse(metadataJson); if (metadata === null) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Error parsing fsmetadata.json:' + safe.error.message)); async.eachSeries(metadata.emptyDirs, function createPath(emptyDir, iteratorDone) { fs.mkdir(dataLayout.toLocalPath(emptyDir), { recursive: true }, iteratorDone); }, function (error) { if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, `unable to create path: ${error.message}`)); async.eachSeries(metadata.execFiles, function createPath(execFile, iteratorDone) { fs.chmod(dataLayout.toLocalPath(execFile), parseInt('0755', 8), iteratorDone); }, function (error) { if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, `unable to chmod: ${error.message}`)); callback(); }); }); } function downloadDir(backupConfig, backupFilePath, dataLayout, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof backupFilePath, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); debug(`downloadDir: ${backupFilePath} to ${dataLayout.toString()}`); function downloadFile(entry, done) { let relativePath = path.relative(backupFilePath, entry.fullPath); if (backupConfig.encryption) { const { error, result } = decryptFilePath(relativePath, backupConfig.encryption); if (error) return done(new BoxError(BoxError.CRYPTO_ERROR, 'Unable to decrypt file')); relativePath = result; } const destFilePath = dataLayout.toLocalPath('./' + relativePath); fs.mkdir(path.dirname(destFilePath), { recursive: true }, function (error) { if (error) return done(new BoxError(BoxError.FS_ERROR, error.message)); async.retry({ times: 5, interval: 20000 }, function (retryCallback) { api(backupConfig.provider).download(backupConfig, entry.fullPath, function (error, sourceStream) { if (error) { progressCallback({ message: `Download ${entry.fullPath} to ${destFilePath} errored: ${error.message}` }); return retryCallback(error); } let destStream = createWriteStream(destFilePath, backupConfig.encryption); // protect against multiple errors. must destroy the write stream so that a previous retry does not write let closeAndRetry = once((error) => { if (error) progressCallback({ message: `Download ${entry.fullPath} to ${destFilePath} errored: ${error.message}` }); else progressCallback({ message: `Download ${entry.fullPath} to ${destFilePath} finished` }); sourceStream.destroy(); destStream.destroy(); retryCallback(error); }); destStream.on('progress', function (progress) { const transferred = Math.round(progress.transferred/1024/1024), speed = Math.round(progress.speed/1024/1024); if (!transferred && !speed) return progressCallback({ message: `Downloading ${entry.fullPath}` }); // 0M@0MBps looks wrong progressCallback({ message: `Downloading ${entry.fullPath}: ${transferred}M@${speed}MBps` }); }); destStream.on('error', closeAndRetry); sourceStream.on('error', closeAndRetry); progressCallback({ message: `Downloading ${entry.fullPath} to ${destFilePath}` }); sourceStream.pipe(destStream, { end: true }).on('done', closeAndRetry); }); }, done); }); } api(backupConfig.provider).listDir(backupConfig, backupFilePath, 1000, function (entries, iteratorDone) { // https://www.digitalocean.com/community/questions/rate-limiting-on-spaces?answer=40441 const concurrency = backupConfig.downloadConcurrency || (backupConfig.provider === 's3' ? 30 : 10); async.eachLimit(entries, concurrency, downloadFile, iteratorDone); }, callback); } function download(backupConfig, backupId, format, dataLayout, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof backupId, 'string'); assert.strictEqual(typeof format, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); debug(`download: Downloading ${backupId} of format ${format} to ${dataLayout.toString()}`); const backupFilePath = getBackupFilePath(backupConfig, backupId, format); if (format === 'tgz') { async.retry({ times: 5, interval: 20000 }, function (retryCallback) { api(backupConfig.provider).download(backupConfig, backupFilePath, function (error, sourceStream) { if (error) return retryCallback(error); tarExtract(sourceStream, dataLayout, backupConfig.encryption, function (error, ps) { if (error) return retryCallback(error); ps.on('progress', function (progress) { const transferred = Math.round(progress.transferred/1024/1024), speed = Math.round(progress.speed/1024/1024); if (!transferred && !speed) return progressCallback({ message: 'Downloading backup' }); // 0M@0MBps looks wrong progressCallback({ message: `Downloading ${transferred}M@${speed}MBps` }); }); ps.on('error', retryCallback); ps.on('done', retryCallback); }); }); }, callback); } else { downloadDir(backupConfig, backupFilePath, dataLayout, progressCallback, function (error) { if (error) return callback(error); restoreFsMetadata(dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`, callback); }); } } function restore(backupConfig, backupId, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof backupId, 'string'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const dataLayout = new DataLayout(paths.BOX_DATA_DIR, []); download(backupConfig, backupId, backupConfig.format, dataLayout, progressCallback, function (error) { if (error) return callback(error); debug('restore: download completed, importing database'); database.importFromFile(`${dataLayout.localRoot()}/box.mysqldump`, function (error) { if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error)); debug('restore: database imported'); settings.initCache(callback); }); }); } function downloadApp(app, restoreConfig, progressCallback, callback) { assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof restoreConfig, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const appDataDir = safe.fs.realpathSync(path.join(paths.APPS_DATA_DIR, app.id)); if (!appDataDir) return callback(safe.error); const dataLayout = new DataLayout(appDataDir, app.dataDir ? [{ localDir: app.dataDir, remoteDir: 'data' }] : []); const startTime = new Date(); const getBackupConfigFunc = restoreConfig.backupConfig ? (next) => next(null, restoreConfig.backupConfig) : settings.getBackupConfig; getBackupConfigFunc(function (error, backupConfig) { if (error) return callback(error); download(backupConfig, restoreConfig.backupId, restoreConfig.backupFormat, dataLayout, progressCallback, function (error) { debug('downloadApp: time: %s', (new Date() - startTime)/1000); callback(error); }); }); } function runBackupUpload(uploadConfig, progressCallback, callback) { assert.strictEqual(typeof uploadConfig, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const { backupId, format, dataLayout, progressTag } = uploadConfig; assert.strictEqual(typeof backupId, 'string'); assert.strictEqual(typeof format, 'string'); assert.strictEqual(typeof progressTag, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); let result = ''; // the script communicates error result as a string shell.sudo(`backup-${backupId}`, [ BACKUP_UPLOAD_CMD, backupId, format, dataLayout.toString() ], { preserveEnv: true, ipc: true }, function (error) { if (error && (error.code === null /* signal */ || (error.code !== 0 && error.code !== 50))) { // backuptask crashed return callback(new BoxError(BoxError.INTERNAL_ERROR, 'Backuptask crashed')); } else if (error && error.code === 50) { // exited with error return callback(new BoxError(BoxError.EXTERNAL_ERROR, result)); } callback(); }).on('message', function (progress) { // this is { message } or { result } if ('message' in progress) return progressCallback({ message: `${progress.message} (${progressTag})` }); debug(`runBackupUpload: result - ${JSON.stringify(progress)}`); result = progress.result; }); } function getSnapshotInfo(id) { assert.strictEqual(typeof id, 'string'); var contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8'); var info = safe.JSON.parse(contents); if (!info) return { }; return info[id] || { }; } function setSnapshotInfo(id, info, callback) { assert.strictEqual(typeof id, 'string'); assert.strictEqual(typeof info, 'object'); assert.strictEqual(typeof callback, 'function'); var contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8'); var data = safe.JSON.parse(contents) || { }; if (info) data[id] = info; else delete data[id]; if (!safe.fs.writeFileSync(paths.SNAPSHOT_INFO_FILE, JSON.stringify(data, null, 4), 'utf8')) { return callback(new BoxError(BoxError.FS_ERROR, safe.error.message)); } callback(); } function snapshotBox(progressCallback, callback) { assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); progressCallback({ message: 'Snapshotting box' }); const startTime = new Date(); database.exportToFile(`${paths.BOX_DATA_DIR}/box.mysqldump`, function (error) { if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error)); debug(`snapshotBox: took ${(new Date() - startTime)/1000} seconds`); return callback(); }); } function uploadBoxSnapshot(backupConfig, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); snapshotBox(progressCallback, function (error) { if (error) return callback(error); const boxDataDir = safe.fs.realpathSync(paths.BOX_DATA_DIR); if (!boxDataDir) return callback(safe.error); const uploadConfig = { backupId: 'snapshot/box', format: backupConfig.format, dataLayout: new DataLayout(boxDataDir, []), progressTag: 'box' }; progressCallback({ message: 'Uploading box snapshot' }); const startTime = new Date(); runBackupUpload(uploadConfig, progressCallback, function (error) { if (error) return callback(error); debug(`uploadBoxSnapshot: took ${(new Date() - startTime)/1000} seconds`); setSnapshotInfo('box', { timestamp: new Date().toISOString(), format: backupConfig.format }, callback); }); }); } function rotateBoxBackup(backupConfig, tag, appBackupIds, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof tag, 'string'); assert(Array.isArray(appBackupIds)); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); var snapshotInfo = getSnapshotInfo('box'); const snapshotTime = snapshotInfo.timestamp.replace(/[T.]/g, '-').replace(/[:Z]/g,''); // add this to filename to make it unique, so it's easy to download them const backupId = util.format('%s/box_%s_v%s', tag, snapshotTime, constants.VERSION); const format = backupConfig.format; debug(`Rotating box backup to id ${backupId}`); const data = { encryptionVersion: backupConfig.encryption ? 2 : null, packageVersion: constants.VERSION, type: exports.BACKUP_TYPE_BOX, state: exports.BACKUP_STATE_CREATING, identifier: 'box', dependsOn: appBackupIds, manifest: null, format: format }; backupdb.add(backupId, data, function (error) { if (error) return callback(error); var copy = api(backupConfig.provider).copy(backupConfig, getBackupFilePath(backupConfig, 'snapshot/box', format), getBackupFilePath(backupConfig, backupId, format)); copy.on('progress', (message) => progressCallback({ message: `box: ${message}` })); copy.on('done', function (copyBackupError) { const state = copyBackupError ? exports.BACKUP_STATE_ERROR : exports.BACKUP_STATE_NORMAL; backupdb.update(backupId, { state }, function (error) { if (copyBackupError) return callback(copyBackupError); if (error) return callback(error); debug(`Rotated box backup successfully as id ${backupId}`); callback(null, backupId); }); }); }); } function backupBoxWithAppBackupIds(appBackupIds, tag, progressCallback, callback) { assert(Array.isArray(appBackupIds)); assert.strictEqual(typeof tag, 'string'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); settings.getBackupConfig(function (error, backupConfig) { if (error) return callback(error); uploadBoxSnapshot(backupConfig, progressCallback, function (error) { if (error) return callback(error); rotateBoxBackup(backupConfig, tag, appBackupIds, progressCallback, callback); }); }); } function canBackupApp(app) { // only backup apps that are installed or specific pending states // stopped apps cannot be backed up because addons might be down (redis) if (app.runState === apps.RSTATE_STOPPED) return false; // we used to check the health here but that doesn't work for stopped apps. it's better to just fail // and inform the user if the backup fails and the app addons have not been setup yet. return app.installationState === apps.ISTATE_INSTALLED || app.installationState === apps.ISTATE_PENDING_CONFIGURE || app.installationState === apps.ISTATE_PENDING_BACKUP || // called from apptask app.installationState === apps.ISTATE_PENDING_UPDATE; // called from apptask } function snapshotApp(app, progressCallback, callback) { assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const startTime = new Date(); progressCallback({ message: `Snapshotting app ${app.fqdn}` }); if (!safe.fs.writeFileSync(path.join(paths.APPS_DATA_DIR, app.id + '/config.json'), JSON.stringify(app))) { return callback(new BoxError(BoxError.FS_ERROR, 'Error creating config.json: ' + safe.error.message)); } addons.backupAddons(app, app.manifest.addons, function (error) { if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error.message)); debugApp(app, `snapshotApp: took ${(new Date() - startTime)/1000} seconds`); return callback(null); }); } function rotateAppBackup(backupConfig, app, tag, options, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof tag, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const startTime = new Date(); var snapshotInfo = getSnapshotInfo(app.id); var manifest = snapshotInfo.restoreConfig ? snapshotInfo.restoreConfig.manifest : snapshotInfo.manifest; // compat const snapshotTime = snapshotInfo.timestamp.replace(/[T.]/g, '-').replace(/[:Z]/g,''); // add this for unique filename which helps when downloading them const backupId = util.format('%s/app_%s_%s_v%s', tag, app.id, snapshotTime, manifest.version); const format = backupConfig.format; debug(`Rotating app backup of ${app.id} to id ${backupId}`); const data = { encryptionVersion: backupConfig.encryption ? 2 : null, packageVersion: manifest.version, type: exports.BACKUP_TYPE_APP, state: exports.BACKUP_STATE_CREATING, identifier: app.id, dependsOn: [ ], manifest, format: format }; backupdb.add(backupId, data, function (error) { if (error) return callback(error); var copy = api(backupConfig.provider).copy(backupConfig, getBackupFilePath(backupConfig, `snapshot/app_${app.id}`, format), getBackupFilePath(backupConfig, backupId, format)); copy.on('progress', (message) => progressCallback({ message: `${message} (${app.fqdn})` })); copy.on('done', function (copyBackupError) { const state = copyBackupError ? exports.BACKUP_STATE_ERROR : exports.BACKUP_STATE_NORMAL; backupdb.update(backupId, { preserveSecs: options.preserveSecs || 0, state }, function (error) { if (copyBackupError) return callback(copyBackupError); if (error) return callback(error); debug(`Rotated app backup of ${app.id} successfully to id ${backupId}. Took ${(new Date() - startTime)/1000} seconds`); callback(null, backupId); }); }); }); } function uploadAppSnapshot(backupConfig, app, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); snapshotApp(app, progressCallback, function (error) { if (error) return callback(error); const backupId = util.format('snapshot/app_%s', app.id); const appDataDir = safe.fs.realpathSync(path.join(paths.APPS_DATA_DIR, app.id)); if (!appDataDir) return callback(safe.error); const dataLayout = new DataLayout(appDataDir, app.dataDir ? [{ localDir: app.dataDir, remoteDir: 'data' }] : []); progressCallback({ message: `Uploading app snapshot ${app.fqdn}`}); const uploadConfig = { backupId, format: backupConfig.format, dataLayout, progressTag: app.fqdn }; const startTime = new Date(); runBackupUpload(uploadConfig, progressCallback, function (error) { if (error) return callback(error); debugApp(app, `uploadAppSnapshot: ${backupId} done. ${(new Date() - startTime)/1000} seconds`); setSnapshotInfo(app.id, { timestamp: new Date().toISOString(), manifest: app.manifest, format: backupConfig.format }, callback); }); }); } function backupAppWithTag(app, tag, options, progressCallback, callback) { assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof tag, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); if (!canBackupApp(app)) { // if we cannot backup, reuse it's most recent backup getByIdentifierAndStatePaged(app.id, exports.BACKUP_STATE_NORMAL, 1, 1, function (error, results) { if (error) return callback(error); if (results.length === 0) return callback(null, null); // no backup to re-use callback(null, results[0].id); }); return; } settings.getBackupConfig(function (error, backupConfig) { if (error) return callback(error); uploadAppSnapshot(backupConfig, app, progressCallback, function (error) { if (error) return callback(error); rotateAppBackup(backupConfig, app, tag, options, progressCallback, callback); }); }); } function backupApp(app, options, progressCallback, callback) { assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,''); debug(`backupApp - Backing up ${app.fqdn} with tag ${tag}`); backupAppWithTag(app, tag, options, progressCallback, callback); } // this function expects you to have a lock. Unlike other progressCallback this also has a progress field function backupBoxAndApps(progressCallback, callback) { assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,''); apps.getAll(function (error, allApps) { if (error) return callback(error); let percent = 1; let step = 100/(allApps.length+2); async.mapSeries(allApps, function iterator(app, iteratorCallback) { progressCallback({ percent: percent, message: `Backing up ${app.fqdn}` }); percent += step; if (!app.enableBackup) { debug(`Skipped backup ${app.fqdn}`); return iteratorCallback(null, null); // nothing to backup } const startTime = new Date(); backupAppWithTag(app, tag, { /* options */ }, (progress) => progressCallback({ percent: percent, message: progress.message }), function (error, backupId) { if (error) { debugApp(app, 'Unable to backup', error); return iteratorCallback(error); } debugApp(app, `Backed up. Took ${(new Date() - startTime)/1000} seconds`); iteratorCallback(null, backupId || null); // clear backupId if is in BAD_STATE and never backed up }); }, function appsBackedUp(error, backupIds) { if (error) return callback(error); backupIds = backupIds.filter(function (id) { return id !== null; }); // remove apps in bad state that were never backed up progressCallback({ percent: percent, message: 'Backing up system data' }); percent += step; backupBoxWithAppBackupIds(backupIds, tag, (progress) => progressCallback({ percent: percent, message: progress.message }), callback); }); }); } function startBackupTask(auditSource, callback) { let error = locker.lock(locker.OP_FULL_BACKUP); if (error) return callback(new BoxError(BoxError.BAD_STATE, `Cannot backup now: ${error.message}`)); settings.getBackupConfig(function (error, backupConfig) { if (error) return callback(error); const memoryLimit = 'memoryLimit' in backupConfig ? Math.max(backupConfig.memoryLimit/1024/1024, 400) : 400; tasks.add(tasks.TASK_BACKUP, [ ], function (error, taskId) { if (error) return callback(error); eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId }); tasks.startTask(taskId, { timeout: 12 * 60 * 60 * 1000 /* 12 hours */, nice: 15, memoryLimit }, function (error, backupId) { locker.unlock(locker.OP_FULL_BACKUP); const errorMessage = error ? error.message : ''; const timedOut = error ? error.code === tasks.ETIMEOUT : false; eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage, timedOut, backupId }); }); callback(null, taskId); }); }); } function applyBackupRetentionPolicy(backups, policy, referencedBackupIds) { assert(Array.isArray(backups)); assert.strictEqual(typeof policy, 'object'); assert(Array.isArray(referencedBackupIds)); const now = new Date(); for (const backup of backups) { if (backup.state === exports.BACKUP_STATE_ERROR) { backup.discardReason = 'error'; } else if (backup.state === exports.BACKUP_STATE_CREATING) { if ((now - backup.creationTime) < 48*60*60*1000) backup.keepReason = 'creating'; else backup.discardReason = 'creating-too-long'; } else if (referencedBackupIds.includes(backup.id)) { backup.keepReason = 'reference'; } else if ((now - backup.creationTime) < (backup.preserveSecs * 1000)) { backup.keepReason = 'preserveSecs'; } else if ((now - backup.creationTime < policy.keepWithinSecs * 1000) || policy.keepWithinSecs < 0) { backup.keepReason = 'keepWithinSecs'; } } const KEEP_FORMATS = { keepDaily: 'Y-M-D', keepWeekly: 'Y-W', keepMonthly: 'Y-M', keepYearly: 'Y' }; for (const format of [ 'keepDaily', 'keepWeekly', 'keepMonthly', 'keepYearly' ]) { if (!(format in policy)) continue; const n = policy[format]; // we want to keep "n" backups of format if (!n) continue; // disabled rule let lastPeriod = null, keptSoFar = 0; for (const backup of backups) { if (backup.discardReason) continue; // already discarded for some reason if (backup.keepReason && backup.keepReason !== 'reference') continue; // kept for some other reason const period = moment(backup.creationTime).format(KEEP_FORMATS[format]); if (period === lastPeriod) continue; // already kept for this period lastPeriod = period; backup.keepReason = backup.keepReason ? `${backup.keepReason}+${format}` : format; if (++keptSoFar === n) break; } } if (policy.keepLatest) { let latestNormalBackup = backups.find(b => b.state === exports.BACKUP_STATE_NORMAL); if (latestNormalBackup && !latestNormalBackup.keepReason) latestNormalBackup.keepReason = 'latest'; } for (const backup of backups) { debug(`applyBackupRetentionPolicy: ${backup.id} ${backup.type} ${backup.keepReason || backup.discardReason || 'unprocessed'}`); } } function cleanupBackup(backupConfig, backup, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof backup, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); var backupFilePath = getBackupFilePath(backupConfig, backup.id, backup.format); function done(error) { if (error) { debug('cleanupBackup: error removing backup %j : %s', backup, error.message); return callback(); } // prune empty directory if possible api(backupConfig.provider).remove(backupConfig, path.dirname(backupFilePath), function (error) { if (error) debug('cleanupBackup: unable to prune backup directory %s : %s', path.dirname(backupFilePath), error.message); backupdb.del(backup.id, function (error) { if (error) debug('cleanupBackup: error removing from database', error); else debug('cleanupBackup: removed %s', backup.id); callback(); }); }); } if (backup.format ==='tgz') { progressCallback({ message: `${backup.id}: Removing ${backupFilePath}`}); api(backupConfig.provider).remove(backupConfig, backupFilePath, done); } else { var events = api(backupConfig.provider).removeDir(backupConfig, backupFilePath); events.on('progress', (message) => progressCallback({ message: `${backup.id}: ${message}` })); events.on('done', done); } } function cleanupAppBackups(backupConfig, referencedAppBackupIds, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert(Array.isArray(referencedAppBackupIds)); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); let removedAppBackupIds = []; apps.getAll(function (error, allApps) { if (error) return callback(error); const allAppIds = allApps.map(a => a.id); backupdb.getByTypePaged(exports.BACKUP_TYPE_APP, 1, 1000, function (error, appBackups) { if (error) return callback(error); // collate the backups by app id. note that the app could already have been uninstalled let appBackupsById = {}; for (const appBackup of appBackups) { if (!appBackupsById[appBackup.identifier]) appBackupsById[appBackup.identifier] = []; appBackupsById[appBackup.identifier].push(appBackup); } // apply backup policy per app. keep latest backup only for existing apps let appBackupsToRemove = []; for (const appId of Object.keys(appBackupsById)) { applyBackupRetentionPolicy(appBackupsById[appId], _.extend({ keepLatest: allAppIds.includes(appId) }, backupConfig.retentionPolicy), referencedAppBackupIds); appBackupsToRemove = appBackupsToRemove.concat(appBackupsById[appId].filter(b => !b.keepReason)); } async.eachSeries(appBackupsToRemove, function iterator(appBackup, iteratorDone) { progressCallback({ message: `Removing app backup (${appBackup.identifier}): ${appBackup.id}`}); removedAppBackupIds.push(appBackup.id); cleanupBackup(backupConfig, appBackup, progressCallback, iteratorDone); }, function () { debug('cleanupAppBackups: done'); callback(null, removedAppBackupIds); }); }); }); } function cleanupBoxBackups(backupConfig, progressCallback, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); let referencedAppBackupIds = [], removedBoxBackupIds = []; backupdb.getByTypePaged(exports.BACKUP_TYPE_BOX, 1, 1000, function (error, boxBackups) { if (error) return callback(error); applyBackupRetentionPolicy(boxBackups, _.extend({ keepLatest: true }, backupConfig.retentionPolicy), [] /* references */); async.eachSeries(boxBackups, function iterator(boxBackup, iteratorNext) { if (boxBackup.keepReason) { referencedAppBackupIds = referencedAppBackupIds.concat(boxBackup.dependsOn); return iteratorNext(); } progressCallback({ message: `Removing box backup ${boxBackup.id}`}); removedBoxBackupIds.push(boxBackup.id); cleanupBackup(backupConfig, boxBackup, progressCallback, iteratorNext); }, function () { debug('cleanupBoxBackups: done'); callback(null, { removedBoxBackupIds, referencedAppBackupIds }); }); }); } function cleanupCacheFilesSync() { var files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR)); if (!files) return; files.filter(function (f) { return f.endsWith('.sync.cache'); }).forEach(function (f) { safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, f)); }); } // removes the snapshots of apps that have been uninstalled function cleanupSnapshots(backupConfig, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof callback, 'function'); var contents = safe.fs.readFileSync(paths.SNAPSHOT_INFO_FILE, 'utf8'); var info = safe.JSON.parse(contents); if (!info) return callback(); delete info.box; async.eachSeries(Object.keys(info), function (appId, iteratorDone) { apps.get(appId, function (error /*, app */) { if (!error || error.reason !== BoxError.NOT_FOUND) return iteratorDone(); function done(/* ignoredError */) { safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, `${appId}.sync.cache`)); safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, `${appId}.sync.cache.new`)); setSnapshotInfo(appId, null, function (/* ignoredError */) { debug('cleanupSnapshots: cleaned up snapshot of app id %s', appId); iteratorDone(); }); } if (info[appId].format ==='tgz') { api(backupConfig.provider).remove(backupConfig, getBackupFilePath(backupConfig, `snapshot/app_${appId}`, info[appId].format), done); } else { var events = api(backupConfig.provider).removeDir(backupConfig, getBackupFilePath(backupConfig, `snapshot/app_${appId}`, info[appId].format)); events.on('progress', function (detail) { debug(`cleanupSnapshots: ${detail}`); }); events.on('done', done); } }); }, function () { debug('cleanupSnapshots: done'); callback(); }); } function cleanup(progressCallback, callback) { assert.strictEqual(typeof progressCallback, 'function'); assert.strictEqual(typeof callback, 'function'); settings.getBackupConfig(function (error, backupConfig) { if (error) return callback(error); if (backupConfig.retentionPolicy.keepWithinSecs < 0) { debug('cleanup: keeping all backups'); return callback(null, {}); } progressCallback({ percent: 10, message: 'Cleaning box backups' }); cleanupBoxBackups(backupConfig, progressCallback, function (error, { removedBoxBackupIds, referencedAppBackupIds }) { if (error) return callback(error); progressCallback({ percent: 40, message: 'Cleaning app backups' }); cleanupAppBackups(backupConfig, referencedAppBackupIds, progressCallback, function (error, removedAppBackupIds) { if (error) return callback(error); progressCallback({ percent: 90, message: 'Cleaning snapshots' }); cleanupSnapshots(backupConfig, function (error) { if (error) return callback(error); callback(null, { removedBoxBackupIds, removedAppBackupIds }); }); }); }); }); } function startCleanupTask(auditSource, callback) { tasks.add(tasks.TASK_CLEAN_BACKUPS, [], function (error, taskId) { if (error) return callback(error); tasks.startTask(taskId, {}, (error, result) => { // result is { removedBoxBackups, removedAppBackups } eventlog.add(eventlog.ACTION_BACKUP_CLEANUP_FINISH, auditSource, { taskId, errorMessage: error ? error.message : null, removedBoxBackups: result ? result.removedBoxBackups : [], removedAppBackups: result ? result.removedAppBackups : [] }); }); callback(null, taskId); }); } function checkConfiguration(callback) { assert.strictEqual(typeof callback, 'function'); settings.getBackupConfig(function (error, backupConfig) { if (error) return callback(error); let message = ''; if (backupConfig.provider === 'noop') { message = 'Cloudron backups are disabled. Please ensure this server is backed up using alternate means. See https://cloudron.io/documentation/backups/#storage-providers for more information.'; } else if (backupConfig.provider === 'filesystem' && !backupConfig.externalDisk) { message = 'Cloudron backups are currently on the same disk as the Cloudron server instance. This is dangerous and can lead to complete data loss if the disk fails. See https://cloudron.io/documentation/backups/#storage-providers for storing backups in an external location.'; } callback(null, message); }); } function configureCollectd(backupConfig, callback) { assert.strictEqual(typeof backupConfig, 'object'); assert.strictEqual(typeof callback, 'function'); if (backupConfig.provider === 'filesystem') { const collectdConf = ejs.render(COLLECTD_CONFIG_EJS, { backupDir: backupConfig.backupFolder }); collectd.addProfile('cloudron-backup', collectdConf, callback); } else { collectd.removeProfile('cloudron-backup', callback); } }