diff --git a/migrations/20200512172301-settings-backup-encryption.js b/migrations/20200512172301-settings-backup-encryption.js index 8dc263b2d..e25d2c6ad 100644 --- a/migrations/20200512172301-settings-backup-encryption.js +++ b/migrations/20200512172301-settings-backup-encryption.js @@ -10,7 +10,7 @@ exports.up = function(db, callback) { var backupConfig = JSON.parse(results[0].value); if (backupConfig.key) { backupConfig.encryption = hush.generateEncryptionKeysSync(backupConfig.key); - // backupTargets.cleanupCacheFilesSync(); + // backupSites.cleanupCacheFilesSync(); fs.writeFileSync('/home/yellowtent/platformdata/BACKUP_PASSWORD', 'This file contains your Cloudron backup password.\nBefore Cloudron v5.2, this was saved in the database.' + diff --git a/migrations/20250724102340-backupTargets-create-table.js b/migrations/20250724102340-backupSites-create-table.js similarity index 93% rename from migrations/20250724102340-backupTargets-create-table.js rename to migrations/20250724102340-backupSites-create-table.js index e5e6c857c..695d2e74a 100644 --- a/migrations/20250724102340-backupTargets-create-table.js +++ b/migrations/20250724102340-backupSites-create-table.js @@ -7,7 +7,7 @@ const child_process = require('node:child_process'), paths = require('../src/paths.js'); exports.up = async function (db) { - const cmd = 'CREATE TABLE IF NOT EXISTS backupTargets(' + + const cmd = 'CREATE TABLE IF NOT EXISTS backupSites(' + 'id VARCHAR(128) NOT NULL UNIQUE,' + 'name VARCHAR(128) NOT NULL,' + 'provider VARCHAR(32) NOT NULL,' + @@ -93,12 +93,12 @@ exports.up = async function (db) { child_process.execSync(`rm -f ${targetInfoDir}/*.cache`); await db.runSql('START TRANSACTION'); - await db.runSql('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + await db.runSql('INSERT INTO backupSites (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [ id, name, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]); await db.runSql('DELETE FROM settings WHERE name=? OR name=? OR name=?', [ 'backup_storage', 'backup_limits', 'backup_policy' ]); await db.runSql('COMMIT'); }; exports.down = async function (db) { - await db.runSql('DROP TABLE backupTargets'); + await db.runSql('DROP TABLE backupSites'); }; diff --git a/migrations/20250724141339-backups-add-siteId.js b/migrations/20250724141339-backups-add-siteId.js new file mode 100644 index 000000000..92ce12f10 --- /dev/null +++ b/migrations/20250724141339-backups-add-siteId.js @@ -0,0 +1,55 @@ +'use strict'; + +const crypto = require('node:crypto'), + path = require('node:path'), + paths = require('../src/paths.js'); + +exports.up = async function(db) { + const backups = await db.runSql('SELECT format, COUNT(*) AS count FROM backups GROUP BY format WITH ROLLUP', []); // https://dev.mysql.com/doc/refman/8.4/en/group-by-modifiers.html + + let tgzCount = 0, rsyncCount = 0, totalCount = 0; + for (const r of backups) { + if (r.format === 'tgz') tgzCount = r.count; + else if (r.format === 'rsync') rsyncCount = r.count; + else if (r.format === null) totalCount = r.count; + } + let theOneFormat = null; + if (tgzCount === totalCount) theOneFormat = 'tgz'; + else if (rsyncCount === totalCount) theOneFormat = 'rsync'; + console.log(`Backup counts. rsync: ${rsyncCount} tgz: ${tgzCount} total: ${totalCount} . theOneFormat: ${theOneFormat}`); + + const backupSites = await db.runSql('SELECT * FROM backupSites'); + const currentBackupSite = backupSites[0]; + let cloneBackupSite = null; + if (totalCount && currentBackupSite.format !== theOneFormat) { + const cloneId = `bc-${crypto.randomUUID()}`; + cloneBackupSite = Object.assign({}, backupSites[0], { id: cloneId }); + cloneBackupSite.format = currentBackupSite.format === 'rsync' ? 'tgz' : 'rsync'; + cloneBackupSite.priority = false; + cloneBackupSite.name = 'Copy of Default'; + cloneBackupSite.schedule = 'never'; + cloneBackupSite._managedMountPath = path.join(paths.MANAGED_BACKUP_MOUNT_DIR, cloneId); // this won't work until the user remounts + console.log(`Existing format is ${currentBackupSite.format} . Adding clone backup site for ${cloneBackupSite.format}`); + + await db.runSql('INSERT INTO backupSites (id, name, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ cloneBackupSite.id, cloneBackupSite.name, cloneBackupSite.configJson, cloneBackupSite.limitsJson, cloneBackupSite.retentionJson, cloneBackupSite.schedule, + cloneBackupSite.encryptionJson, cloneBackupSite.format, cloneBackupSite.priority ]); + } + + await db.runSql('ALTER TABLE backups ADD siteId VARCHAR(128)'); + if (totalCount) { + if (currentBackupSite.format === 'tgz') { + const ext = currentBackupSite.encryptionJson ? '.tar.gz.enc' : '.tar.gz'; + console.log(`Adjusting remotePath of existing tgz backups with ${ext}`); + await db.runSql('UPDATE backups SET remotePath=CONCAT(remotePath, ?) WHERE format=?', [ ext, 'tgz' ]); + } + await db.runSql('UPDATE backups SET siteId=? WHERE format=?', [ currentBackupSite.id, currentBackupSite.format ]); + if (cloneBackupSite) await db.runSql('UPDATE backups SET siteId=? WHERE format=?', [ cloneBackupSite.id, cloneBackupSite.format ]); + } + await db.runSql('ALTER TABLE backups MODIFY siteId VARCHAR(128) NOT NULL'); + await db.runSql('ALTER TABLE backups ADD FOREIGN KEY(siteId) REFERENCES backupSites(id)'); + await db.runSql('ALTER TABLE backups DROP COLUMN format'); +}; + +exports.down = async function(/*db*/) { +}; diff --git a/migrations/20250724141339-backups-add-targetId.js b/migrations/20250724141339-backups-add-targetId.js deleted file mode 100644 index 380b08a0a..000000000 --- a/migrations/20250724141339-backups-add-targetId.js +++ /dev/null @@ -1,55 +0,0 @@ -'use strict'; - -const crypto = require('node:crypto'), - path = require('node:path'), - paths = require('../src/paths.js'); - -exports.up = async function(db) { - const backups = await db.runSql('SELECT format, COUNT(*) AS count FROM backups GROUP BY format WITH ROLLUP', []); // https://dev.mysql.com/doc/refman/8.4/en/group-by-modifiers.html - - let tgzCount = 0, rsyncCount = 0, totalCount = 0; - for (const r of backups) { - if (r.format === 'tgz') tgzCount = r.count; - else if (r.format === 'rsync') rsyncCount = r.count; - else if (r.format === null) totalCount = r.count; - } - let theOneFormat = null; - if (tgzCount === totalCount) theOneFormat = 'tgz'; - else if (rsyncCount === totalCount) theOneFormat = 'rsync'; - console.log(`Backup counts. rsync: ${rsyncCount} tgz: ${tgzCount} total: ${totalCount} . theOneFormat: ${theOneFormat}`); - - const backupTargets = await db.runSql('SELECT * FROM backupTargets'); - const currentBackupTarget = backupTargets[0]; - let cloneBackupTarget = null; - if (totalCount && currentBackupTarget.format !== theOneFormat) { - const cloneId = `bc-${crypto.randomUUID()}`; - cloneBackupTarget = Object.assign({}, backupTargets[0], { id: cloneId }); - cloneBackupTarget.format = currentBackupTarget.format === 'rsync' ? 'tgz' : 'rsync'; - cloneBackupTarget.priority = false; - cloneBackupTarget.name = 'Copy of Default'; - cloneBackupTarget.schedule = 'never'; - cloneBackupTarget._managedMountPath = path.join(paths.MANAGED_BACKUP_MOUNT_DIR, cloneId); // this won't work until the user remounts - console.log(`Existing format is ${currentBackupTarget.format} . Adding clone backup target for ${cloneBackupTarget.format}`); - - await db.runSql('INSERT INTO backupTargets (id, name, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ cloneBackupTarget.id, cloneBackupTarget.name, cloneBackupTarget.configJson, cloneBackupTarget.limitsJson, cloneBackupTarget.retentionJson, cloneBackupTarget.schedule, - cloneBackupTarget.encryptionJson, cloneBackupTarget.format, cloneBackupTarget.priority ]); - } - - await db.runSql('ALTER TABLE backups ADD targetId VARCHAR(128)'); - if (totalCount) { - if (currentBackupTarget.format === 'tgz') { - const ext = currentBackupTarget.encryptionJson ? '.tar.gz.enc' : '.tar.gz'; - console.log(`Adjusting remotePath of existing tgz backups with ${ext}`); - await db.runSql('UPDATE backups SET remotePath=CONCAT(remotePath, ?) WHERE format=?', [ ext, 'tgz' ]); - } - await db.runSql('UPDATE backups SET targetId=? WHERE format=?', [ currentBackupTarget.id, currentBackupTarget.format ]); - if (cloneBackupTarget) await db.runSql('UPDATE backups SET targetId=? WHERE format=?', [ cloneBackupTarget.id, cloneBackupTarget.format ]); - } - await db.runSql('ALTER TABLE backups MODIFY targetId VARCHAR(128) NOT NULL'); - await db.runSql('ALTER TABLE backups ADD FOREIGN KEY(targetId) REFERENCES backupTargets(id)'); - await db.runSql('ALTER TABLE backups DROP COLUMN format'); -}; - -exports.down = async function(/*db*/) { -}; diff --git a/migrations/schema.sql b/migrations/schema.sql index 4d856dc58..f115a4c58 100644 --- a/migrations/schema.sql +++ b/migrations/schema.sql @@ -158,11 +158,11 @@ CREATE TABLE IF NOT EXISTS backups( manifestJson TEXT, /* to validate if the app can be installed in this version of box */ preserveSecs INTEGER DEFAULT 0, appConfigJson TEXT, /* useful for clone and archive */ - targetId VARCHAR(128) NOT NULL, + siteId VARCHAR(128) NOT NULL, integrityJson TEXT, /* { signature } */ statsJSON TEXT, - FOREIGN KEY(targetId) REFERENCES backupTargets(id), + FOREIGN KEY(siteId) REFERENCES backupSites(id), INDEX creationTime_index (creationTime), PRIMARY KEY (id)); @@ -304,7 +304,7 @@ CREATE TABLE IF NOT EXISTS dockerRegistries( PRIMARY KEY (id) ); -CREATE TABLE IF NOT EXISTS backupTargets( +CREATE TABLE IF NOT EXISTS backupSites( id VARCHAR(128) NOT NULL UNIQUE, name VARCHAR(128) NOT NULL UNIQUE, provider VARCHAR(32) NOT NULL, diff --git a/src/apps.js b/src/apps.js index 08d3cecd9..784860fae 100644 --- a/src/apps.js +++ b/src/apps.js @@ -153,7 +153,7 @@ const appTaskManager = require('./apptaskmanager.js'), archives = require('./archives.js'), assert = require('node:assert'), backups = require('./backups.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), BoxError = require('./boxerror.js'), constants = require('./constants.js'), crypto = require('node:crypto'), @@ -1258,12 +1258,12 @@ async function scheduleTask(appId, installationState, taskId, auditSource) { assert.strictEqual(typeof taskId, 'string'); assert.strictEqual(typeof auditSource, 'object'); - const backupTarget = await backupTargets.getPrimary(); + const backupSite = await backupSites.getPrimary(); let memoryLimit = 400; if (installationState === exports.ISTATE_PENDING_CLONE || installationState === exports.ISTATE_PENDING_RESTORE || installationState === exports.ISTATE_PENDING_IMPORT || installationState === exports.ISTATE_PENDING_UPDATE) { - memoryLimit = backupTarget.limits?.memoryLimit ? Math.max(backupTarget.limits.memoryLimit/1024/1024, 400) : 400; + memoryLimit = backupSite.limits?.memoryLimit ? Math.max(backupSite.limits.memoryLimit/1024/1024, 400) : 400; } else if (installationState === exports.ISTATE_PENDING_DATA_DIR_MIGRATION) { memoryLimit = 1024; // cp takes more memory than we think } @@ -2360,7 +2360,7 @@ async function importApp(app, data, auditSource) { let restoreConfig; if (data.remotePath) { // if not provided, we import in-place - const backupTarget = await backupTargets.createPseudo({ + const backupSite = await backupSites.createPseudo({ id: `appimport-${app.id}`, provider: data.provider, config: data.config, @@ -2369,7 +2369,7 @@ async function importApp(app, data, auditSource) { encryptedFilenames: data.encryptedFilenames ?? false }); - restoreConfig = { remotePath: data.remotePath, backupTarget }; + restoreConfig = { remotePath: data.remotePath, backupSite }; } else { // inPlace restoreConfig = { inPlace: true }; } @@ -2399,8 +2399,8 @@ async function exportApp(app, data, auditSource) { if (!canBackupApp(app)) throw new BoxError(BoxError.BAD_STATE, 'App cannot be backed up in this state'); - const backupTarget = await backupTargets.getPrimary(); - const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ appId, backupTarget.id, { snapshotOnly: true } ]); + const backupSite = await backupSites.getPrimary(); + const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ appId, backupSite.id, { snapshotOnly: true } ]); safe(tasks.startTask(taskId, {}), { debug }); // background return { taskId }; } @@ -2774,11 +2774,11 @@ async function backup(app, auditSource) { if (!canBackupApp(app)) throw new BoxError(BoxError.BAD_STATE, 'App cannot be backed up in this state'); - const backupTarget = await backupTargets.getPrimary(); + const backupSite = await backupSites.getPrimary(); - const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ app.id, backupTarget.id, { snapshotOnly: false } ]); + const taskId = await tasks.add(`${tasks.TASK_APP_BACKUP_PREFIX}${app.id}`, [ app.id, backupSite.id, { snapshotOnly: false } ]); - const memoryLimit = backupTarget.limits?.memoryLimit ? Math.max(backupTarget.limits.memoryLimit/1024/1024, 1024) : 1024; + const memoryLimit = backupSite.limits?.memoryLimit ? Math.max(backupSite.limits.memoryLimit/1024/1024, 1024) : 1024; // background tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit, oomScoreAdjust: -999 }) @@ -2827,12 +2827,12 @@ async function getBackupDownloadStream(app, backupId) { if (backup.identifier !== app.id) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); // some other app's backup if (backup.format !== 'tgz') throw new BoxError(BoxError.BAD_STATE, 'only tgz backups can be downloaded'); - const backupTarget = await backupTargets.get(backup.targetId); - if (!backupTarget) throw new BoxError(BoxError.NOT_FOUND, 'Backup target not found'); // not possible + const backupSite = await backupSites.get(backup.siteId); + if (!backupSite) throw new BoxError(BoxError.NOT_FOUND, 'Backup site not found'); // not possible const ps = new PassThrough(); - const stream = await backupTargets.storageApi(backupTarget).download(backupTarget.config, backup.remotePath); + const stream = await backupSites.storageApi(backupSite).download(backupSite.config, backup.remotePath); stream.on('error', function(error) { debug(`getBackupDownloadStream: read stream error: ${error.message}`); ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error)); @@ -2859,7 +2859,7 @@ async function restoreApps(apps, options, auditSource) { let installationState, restoreConfig; if (!error && result.length) { installationState = exports.ISTATE_PENDING_RESTORE; - restoreConfig = { remotePath: result[0].remotePath, backupTarget: options.backupTarget }; + restoreConfig = { remotePath: result[0].remotePath, backupSite: options.backupSite }; } else { installationState = exports.ISTATE_PENDING_INSTALL; restoreConfig = null; diff --git a/src/apptask.js b/src/apptask.js index 9e6a91d06..9ec060599 100644 --- a/src/apptask.js +++ b/src/apptask.js @@ -15,7 +15,7 @@ const apps = require('./apps.js'), appstore = require('./appstore.js'), assert = require('node:assert'), AuditSource = require('./auditsource.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), backuptask = require('./backuptask.js'), BoxError = require('./boxerror.js'), constants = require('./constants.js'), @@ -271,7 +271,7 @@ async function installCommand(app, args, progressCallback) { assert.strictEqual(typeof args, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - // restoreConfig is one of null (install) OR { backupId } (restore) OR { remotePath, backupTarget } (import) OR { inPlace } (import) + // restoreConfig is one of null (install) OR { backupId } (restore) OR { remotePath, backupSite } (import) OR { inPlace } (import) const { restoreConfig, overwriteDns, skipDnsSetup, oldManifest } = args; // this protects against the theoretical possibility of an app being marked for install/restore from @@ -334,11 +334,11 @@ async function installCommand(app, args, progressCallback) { await progressCallback({ percent: 65, message: 'Downloading backup and restoring addons' }); await services.setupAddons(app, app.manifest.addons); await services.clearAddons(app, app.manifest.addons); - const backupTarget = restoreConfig.backupTarget; - await backupTargets.storageApi(backupTarget).setup(backupTarget.config); + const backupSite = restoreConfig.backupSite; + await backupSites.storageApi(backupSite).setup(backupSite.config); await backuptask.downloadApp(app, restoreConfig, (progress) => { progressCallback({ percent: 75, message: progress.message }); }); await apps.loadConfig(app); - await backupTargets.storageApi(backupTarget).teardown(backupTarget.config); + await backupSites.storageApi(backupSite).teardown(backupSite.config); await progressCallback({ percent: 75, message: 'Restoring addons' }); await services.restoreAddons(app, app.manifest.addons); } else { // clone and restore @@ -566,9 +566,9 @@ async function updateCommand(app, args, progressCallback) { if (!updateConfig.skipBackup) { await progressCallback({ percent: 15, message: 'Backing up app' }); - const backupTarget = await backupTargets.getPrimary(); + const backupSite = await backupSites.getPrimary(); // preserve update backups for 3 weeks - const [error] = await safe(backuptask.backupApp(app, backupTarget, { preserveSecs: 3*7*24*60*60 }, (progress) => { + const [error] = await safe(backuptask.backupApp(app, backupSite, { preserveSecs: 3*7*24*60*60 }, (progress) => { progressCallback({ percent: 15, message: `Backup - ${progress.message}` }); })); if (error) { diff --git a/src/backupcleaner.js b/src/backupcleaner.js index a347b9644..9bc11bb2b 100644 --- a/src/backupcleaner.js +++ b/src/backupcleaner.js @@ -13,7 +13,7 @@ const apps = require('./apps.js'), assert = require('node:assert'), backups = require('./backups.js'), backupFormats = require('./backupformats.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), constants = require('./constants.js'), debug = require('debug')('box:backupcleaner'), moment = require('moment'), @@ -78,20 +78,20 @@ function applyBackupRetention(allBackups, retention, referencedBackupIds) { } } -async function removeBackup(target, backup, progressCallback) { - assert.strictEqual(typeof target, 'object'); +async function removeBackup(site, backup, progressCallback) { + assert.strictEqual(typeof site, 'object'); assert.strictEqual(typeof backup, 'object'); assert.strictEqual(typeof progressCallback, 'function'); const remotePath = backup.remotePath; let removeError; - if (target.format ==='tgz') { + if (site.format ==='tgz') { progressCallback({ message: `${backup.remotePath}: Removing ${remotePath}`}); - [removeError] = await safe(backupTargets.storageApi(target).remove(target.config, remotePath)); + [removeError] = await safe(backupSites.storageApi(site).remove(site.config, remotePath)); } else { progressCallback({ message: `${backup.remotePath}: Removing directory ${remotePath}`}); - [removeError] = await safe(backupTargets.storageApi(target).removeDir(target.config, remotePath, progressCallback)); + [removeError] = await safe(backupSites.storageApi(site).removeDir(site.config, remotePath, progressCallback)); } if (removeError) { @@ -100,7 +100,7 @@ async function removeBackup(target, backup, progressCallback) { } // prune empty directory if possible - const [pruneError] = await safe(backupTargets.storageApi(target).remove(target.config, path.dirname(remotePath))); + const [pruneError] = await safe(backupSites.storageApi(site).remove(site.config, path.dirname(remotePath))); if (pruneError) debug(`removeBackup: unable to prune backup directory ${path.dirname(remotePath)}: ${pruneError.message}`); const [delError] = await safe(backups.del(backup.id)); @@ -108,8 +108,8 @@ async function removeBackup(target, backup, progressCallback) { else debug(`removeBackup: removed ${backup.remotePath}`); } -async function cleanupAppBackups(target, referencedBackupIds, progressCallback) { - assert.strictEqual(typeof target, 'object'); +async function cleanupAppBackups(site, referencedBackupIds, progressCallback) { + assert.strictEqual(typeof site, 'object'); assert(Array.isArray(referencedBackupIds)); assert.strictEqual(typeof progressCallback, 'function'); @@ -131,7 +131,7 @@ async function cleanupAppBackups(target, referencedBackupIds, progressCallback) // apply backup policy per app. keep latest backup only for existing apps let appBackupsToRemove = []; for (const appId of Object.keys(appBackupsById)) { - const appRetention = Object.assign({ keepLatest: allAppIds.includes(appId) }, target.retention); + const appRetention = Object.assign({ keepLatest: allAppIds.includes(appId) }, site.retention); debug(`cleanupAppBackups: applying retention for appId ${appId} retention: ${JSON.stringify(appRetention)}`); applyBackupRetention(appBackupsById[appId], appRetention, referencedBackupIds); appBackupsToRemove = appBackupsToRemove.concat(appBackupsById[appId].filter(b => !b.keepReason)); @@ -140,7 +140,7 @@ async function cleanupAppBackups(target, referencedBackupIds, progressCallback) for (const appBackup of appBackupsToRemove) { await progressCallback({ message: `Removing app backup (${appBackup.identifier}): ${appBackup.id}`}); removedAppBackupPaths.push(appBackup.remotePath); - await removeBackup(target, appBackup, progressCallback); // never errors + await removeBackup(site, appBackup, progressCallback); // never errors } debug('cleanupAppBackups: done'); @@ -148,8 +148,8 @@ async function cleanupAppBackups(target, referencedBackupIds, progressCallback) return removedAppBackupPaths; } -async function cleanupMailBackups(target, referencedBackupIds, progressCallback) { - assert.strictEqual(typeof target, 'object'); +async function cleanupMailBackups(site, referencedBackupIds, progressCallback) { + assert.strictEqual(typeof site, 'object'); assert(Array.isArray(referencedBackupIds)); assert.strictEqual(typeof progressCallback, 'function'); @@ -157,13 +157,13 @@ async function cleanupMailBackups(target, referencedBackupIds, progressCallback) const mailBackups = await backups.getByTypePaged(backups.BACKUP_TYPE_MAIL, 1, 100000); - applyBackupRetention(mailBackups, Object.assign({ keepLatest: true }, target.retention), referencedBackupIds); + applyBackupRetention(mailBackups, Object.assign({ keepLatest: true }, site.retention), referencedBackupIds); for (const mailBackup of mailBackups) { if (mailBackup.keepReason) continue; await progressCallback({ message: `Removing mail backup ${mailBackup.remotePath}`}); removedMailBackupPaths.push(mailBackup.remotePath); - await removeBackup(target, mailBackup, progressCallback); // never errors + await removeBackup(site, mailBackup, progressCallback); // never errors } debug('cleanupMailBackups: done'); @@ -171,18 +171,18 @@ async function cleanupMailBackups(target, referencedBackupIds, progressCallback) return removedMailBackupPaths; } -async function cleanupBoxBackups(target, progressCallback) { +async function cleanupBoxBackups(site, progressCallback) { assert.strictEqual(typeof progressCallback, 'function'); let referencedBackupIds = []; const removedBoxBackupPaths = []; - // We need to fetch all box backups to be able to compile a list of all referenced app backupTargets. + // We need to fetch all box backups to be able to compile a list of all referenced app backupSites. // Otherwise if we miss some app backups, they will get purged! // 100000 here should be seen as infinity const boxBackups = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 100000); - applyBackupRetention(boxBackups, Object.assign({ keepLatest: true }, target.retention), [] /* references */); + applyBackupRetention(boxBackups, Object.assign({ keepLatest: true }, site.retention), [] /* references */); for (const boxBackup of boxBackups) { if (boxBackup.keepReason) { @@ -193,7 +193,7 @@ async function cleanupBoxBackups(target, progressCallback) { await progressCallback({ message: `Removing box backup ${boxBackup.remotePath}`}); removedBoxBackupPaths.push(boxBackup.remotePath); - await removeBackup(target, boxBackup, progressCallback); + await removeBackup(site, boxBackup, progressCallback); } debug('cleanupBoxBackups: done'); @@ -202,8 +202,8 @@ async function cleanupBoxBackups(target, progressCallback) { } // cleans up the database by checking if backup exists in the remote. this can happen if user had set some bucket policy -async function cleanupMissingBackups(backupTarget, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function cleanupMissingBackups(backupSite, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof progressCallback, 'function'); const perPage = 1000; @@ -218,9 +218,9 @@ async function cleanupMissingBackups(backupTarget, progressCallback) { for (const backup of result) { if (backup.state !== backups.BACKUP_STATE_NORMAL) continue; // note: errored and incomplete backups are cleaned up by the backup retention logic - const ext = backupFormats.api(backupTarget.format).getFileExtension(!!backupTarget.encyption); + const ext = backupFormats.api(backupSite.format).getFileExtension(!!backupSite.encyption); const remotePath = backup.remotePath + (ext ? '' : '/'); // add trailing slash to indicate directory - const [existsError, exists] = await safe(backupTargets.storageApi(backupTarget).exists(backupTarget.config, remotePath)); + const [existsError, exists] = await safe(backupSites.storageApi(backupSite).exists(backupSite.config, remotePath)); if (existsError || exists) continue; await progressCallback({ message: `Removing missing backup ${backup.remotePath}`}); @@ -240,10 +240,10 @@ async function cleanupMissingBackups(backupTarget, progressCallback) { } // removes the snapshots of apps that have been uninstalled -async function removeOldAppSnapshots(backupTarget) { - assert.strictEqual(typeof backupTarget, 'object'); +async function removeOldAppSnapshots(backupSite) { + assert.strictEqual(typeof backupSite, 'object'); - const snapshotInfo = await backupTargets.getSnapshotInfo(backupTarget); + const snapshotInfo = await backupSites.getSnapshotInfo(backupSite); const progressCallback = (progress) => { debug(`removeOldAppSnapshots: ${progress.message}`); }; @@ -253,57 +253,57 @@ async function removeOldAppSnapshots(backupTarget) { const app = await apps.get(appId); if (app !== null) continue; // app is still installed - const ext = backupFormats.api(backupTarget.format).getFileExtension(!!backupTarget.encyption); + const ext = backupFormats.api(backupSite.format).getFileExtension(!!backupSite.encyption); const remotePath = `snapshot/app_${appId}${ext}`; if (ext) { - await safe(backupTargets.storageApi(backupTarget).remove(backupTarget.config, remotePath), { debug }); + await safe(backupSites.storageApi(backupSite).remove(backupSite.config, remotePath), { debug }); } else { - await safe(backupTargets.storageApi(backupTarget).removeDir(backupTarget.config, remotePath, progressCallback), { debug }); + await safe(backupSites.storageApi(backupSite).removeDir(backupSite.config, remotePath, progressCallback), { debug }); } - await backupTargets.setSnapshotInfo(backupTarget, appId, null /* info */); + await backupSites.setSnapshotInfo(backupSite, appId, null /* info */); debug(`removeOldAppSnapshots: removed snapshot of app ${appId}`); } debug('removeOldAppSnapshots: done'); } -async function run(targetId, progressCallback) { - assert.strictEqual(typeof targetId, 'string'); +async function run(siteId, progressCallback) { + assert.strictEqual(typeof siteId, 'string'); assert.strictEqual(typeof progressCallback, 'function'); - const backupTarget = await backupTargets.get(targetId); - if (!backupTarget) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Target not found'); + const backupSite = await backupSites.get(siteId); + if (!backupSite) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Target not found'); - debug(`run: retention is ${JSON.stringify(backupTarget.retention)}`); + debug(`run: retention is ${JSON.stringify(backupSite.retention)}`); - const status = await backupTargets.ensureMounted(backupTarget); + const status = await backupSites.ensureMounted(backupSite); debug(`run: mount point status is ${JSON.stringify(status)}`); if (status.state !== 'active') throw new BoxError(BoxError.MOUNT_ERROR, `Backup endpoint is not mounted: ${status.message}`); - if (backupTarget.retention.keepWithinSecs < 0) { + if (backupSite.retention.keepWithinSecs < 0) { debug('run: keeping all backups'); return {}; } await progressCallback({ percent: 10, message: 'Cleaning box backups' }); - const { removedBoxBackupPaths, referencedBackupIds } = await cleanupBoxBackups(backupTarget, progressCallback); // references is app or mail backup ids + const { removedBoxBackupPaths, referencedBackupIds } = await cleanupBoxBackups(backupSite, progressCallback); // references is app or mail backup ids await progressCallback({ percent: 20, message: 'Cleaning mail backups' }); - const removedMailBackupPaths = await cleanupMailBackups(backupTarget, referencedBackupIds, progressCallback); + const removedMailBackupPaths = await cleanupMailBackups(backupSite, referencedBackupIds, progressCallback); await progressCallback({ percent: 40, message: 'Cleaning app backups' }); const archivedBackupIds = await archives.listBackupIds(); - const removedAppBackupPaths = await cleanupAppBackups(backupTarget, referencedBackupIds.concat(archivedBackupIds), progressCallback); + const removedAppBackupPaths = await cleanupAppBackups(backupSite, referencedBackupIds.concat(archivedBackupIds), progressCallback); await progressCallback({ percent: 70, message: 'Checking storage backend and removing stale entries in database' }); - const missingBackupPaths = await cleanupMissingBackups(backupTarget, progressCallback); + const missingBackupPaths = await cleanupMissingBackups(backupSite, progressCallback); await progressCallback({ percent: 80, message: 'Removing snapshots of uninstalled apps' }); - await removeOldAppSnapshots(backupTarget); + await removeOldAppSnapshots(backupSite); await progressCallback({ percent: 80, message: 'Cleaning storage artifacts' }); - await backupTargets.storageApi(backupTarget).cleanup(backupTarget.config, progressCallback); + await backupSites.storageApi(backupSite).cleanup(backupSite.config, progressCallback); return { removedBoxBackupPaths, removedMailBackupPaths, removedAppBackupPaths, missingBackupPaths }; } diff --git a/src/backupformat/rsync.js b/src/backupformat/rsync.js index d54bfc9f7..f1a6438ed 100644 --- a/src/backupformat/rsync.js +++ b/src/backupformat/rsync.js @@ -13,7 +13,7 @@ exports = module.exports = { const assert = require('node:assert'), async = require('async'), - backupTargets = require('../backuptargets.js'), + backupSites = require('../backupsites.js'), BoxError = require('../boxerror.js'), DataLayout = require('../datalayout.js'), { DecryptStream } = require('../hush.js'), @@ -75,15 +75,15 @@ async function addFile(sourceFile, encryption, uploader, progressCallback) { }; } -async function sync(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function sync(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); // the number here has to take into account the s3.upload partSize (which is 10MB). So 20=200MB - const concurrency = backupTarget.limits?.syncConcurrency || (backupTarget.provider === 's3' ? 20 : 10); - const cacheFile = path.join(paths.BACKUP_INFO_DIR, backupTarget.id, `${dataLayout.getBasename()}.sync.cache`); + const concurrency = backupSite.limits?.syncConcurrency || (backupSite.provider === 's3' ? 20 : 10); + const cacheFile = path.join(paths.BACKUP_INFO_DIR, backupSite.id, `${dataLayout.getBasename()}.sync.cache`); const { delQueue, addQueue, integrityMap } = await syncer.sync(dataLayout, cacheFile); debug(`sync: processing ${delQueue.length} deletes and ${addQueue.length} additions`); const aggregatedStats = { @@ -96,33 +96,33 @@ async function sync(backupTarget, remotePath, dataLayout, progressCallback) { async function processSyncerChange(change) { debug('sync: processing task: %j', change); // the empty task.path is special to signify the directory - const destPath = change.path && backupTarget.encryption?.encryptedFilenames ? hush.encryptFilePath(change.path, backupTarget.encryption) : change.path; + const destPath = change.path && backupSite.encryption?.encryptedFilenames ? hush.encryptFilePath(change.path, backupSite.encryption) : change.path; const fullPath = path.join(remotePath, destPath); if (change.operation === 'removedir') { debug(`Removing directory ${fullPath}`); - await backupTargets.storageApi(backupTarget).removeDir(backupTarget.config, fullPath, progressCallback); + await backupSites.storageApi(backupSite).removeDir(backupSite.config, fullPath, progressCallback); } else if (change.operation === 'remove') { debug(`Removing ${fullPath}`); - await backupTargets.storageApi(backupTarget).remove(backupTarget.config, fullPath); + await backupSites.storageApi(backupSite).remove(backupSite.config, fullPath); } else if (change.operation === 'add') { await promiseRetry({ times: 5, interval: 20000, debug }, async (retryCount) => { progressCallback({ message: `Adding ${change.path}` + (retryCount > 1 ? ` (Try ${retryCount})` : '') }); debug(`Adding ${change.path} position ${change.position} try ${retryCount}`); - const uploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, fullPath); - const { integrity } = await addFile(dataLayout.toLocalPath('./' + change.path), backupTarget.encryption, uploader, progressCallback); + const uploader = await backupSites.storageApi(backupSite).upload(backupSite.config, fullPath); + const { integrity } = await addFile(dataLayout.toLocalPath('./' + change.path), backupSite.encryption, uploader, progressCallback); integrityMap.set(destPath, integrity); aggregatedStats.size += integrity.size; }); } } - const [delError] = await safe(async.eachLimit(delQueue, concurrency, async (change) => await processSyncerChange(change, backupTarget, remotePath, dataLayout, progressCallback))); + const [delError] = await safe(async.eachLimit(delQueue, concurrency, async (change) => await processSyncerChange(change, backupSite, remotePath, dataLayout, progressCallback))); debug('sync: done processing deletes. error: %o', delError); if (delError) throw delError; - const [addError] = await safe(async.eachLimit(addQueue, concurrency, async (change) => await processSyncerChange(change, backupTarget, remotePath, dataLayout, progressCallback))); + const [addError] = await safe(async.eachLimit(addQueue, concurrency, async (change) => await processSyncerChange(change, backupSite, remotePath, dataLayout, progressCallback))); debug('sync: done processing adds. error: %o', addError); if (addError) throw addError; @@ -163,8 +163,8 @@ async function saveFsMetadata(dataLayout, metadataFile) { if (symlinkFilesError) throw symlinkFilesError; if (symlinkFiles.length) metadata.symlinks = metadata.symlinks.concat(symlinkFiles.trim().split('\n').map((sl) => { - const target = safe.fs.readlinkSync(sl); - return { path: dataLayout.toRemotePath(sl), target }; + const site = safe.fs.readlinkSync(sl); + return { path: dataLayout.toRemotePath(sl), site }; })); } @@ -193,29 +193,29 @@ async function restoreFsMetadata(dataLayout, metadataFile) { } for (const symlink of (metadata.symlinks || [])) { - if (!symlink.target) continue; + if (!symlink.site) continue; // the path may not exist if we had a directory full of symlinks const [mkdirError] = await safe(fs.promises.mkdir(path.dirname(dataLayout.toLocalPath(symlink.path)), { recursive: true })); if (mkdirError) throw new BoxError(BoxError.FS_ERROR, `unable to symlink (mkdir): ${mkdirError.message}`); - const [symlinkError] = await safe(fs.promises.symlink(symlink.target, dataLayout.toLocalPath(symlink.path), 'file')); + const [symlinkError] = await safe(fs.promises.symlink(symlink.site, dataLayout.toLocalPath(symlink.path), 'file')); if (symlinkError) throw new BoxError(BoxError.FS_ERROR, `unable to symlink: ${symlinkError.message}`); } } -async function downloadDir(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function downloadDir(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); - const encryptedFilenames = backupTarget.encryption?.encryptedFilenames || false; + const encryptedFilenames = backupSite.encryption?.encryptedFilenames || false; - debug(`downloadDir: ${remotePath} to ${dataLayout.toString()}. encryption filenames: ${encryptedFilenames}. encrypted files: ${!!backupTarget.encryption}`); + debug(`downloadDir: ${remotePath} to ${dataLayout.toString()}. encryption filenames: ${encryptedFilenames}. encrypted files: ${!!backupSite.encryption}`); async function downloadFile(entry) { let relativePath = path.relative(remotePath, entry.path); if (encryptedFilenames) { - const { error, result } = hush.decryptFilePath(relativePath, backupTarget.encryption); + const { error, result } = hush.decryptFilePath(relativePath, backupSite.encryption); if (error) throw new BoxError(BoxError.CRYPTO_ERROR, 'Unable to decrypt file'); relativePath = result; } @@ -227,7 +227,7 @@ async function downloadDir(backupTarget, remotePath, dataLayout, progressCallbac await promiseRetry({ times: 3, interval: 20000 }, async function () { progressCallback({ message: `Downloading ${entry.path} to ${destFilePath}` }); - const [downloadError, sourceStream] = await safe(backupTargets.storageApi(backupTarget).download(backupTarget.config, entry.path)); + const [downloadError, sourceStream] = await safe(backupSites.storageApi(backupSite).download(backupSite.config, entry.path)); if (downloadError) { progressCallback({ message: `Download ${entry.path} to ${destFilePath} errored: ${downloadError.message}` }); throw downloadError; @@ -244,8 +244,8 @@ async function downloadDir(backupTarget, remotePath, dataLayout, progressCallbac const streams = [ sourceStream, ps ]; - if (backupTarget.encryption) { - const decryptStream = new DecryptStream(backupTarget.encryption); + if (backupSite.encryption) { + const decryptStream = new DecryptStream(backupSite.encryption); streams.push(decryptStream); } @@ -261,45 +261,45 @@ async function downloadDir(backupTarget, remotePath, dataLayout, progressCallbac } // https://www.digitalocean.com/community/questions/rate-limiting-on-spaces?answer=40441 - const concurrency = backupTarget.limits?.downloadConcurrency || (backupTarget.provider === 's3' ? 30 : 10); + const concurrency = backupSite.limits?.downloadConcurrency || (backupSite.provider === 's3' ? 30 : 10); let marker = null; while (true) { - const batch = await backupTargets.storageApi(backupTarget).listDir(backupTarget.config, remotePath, marker === null ? 1 : 1000, marker); // try with one file first. if that works out, we continue faster + const batch = await backupSites.storageApi(backupSite).listDir(backupSite.config, remotePath, marker === null ? 1 : 1000, marker); // try with one file first. if that works out, we continue faster await async.eachLimit(batch.entries, concurrency, downloadFile); if (!batch.marker) break; marker = batch.marker; } } -async function download(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function download(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); debug(`download: Downloading ${remotePath} to ${dataLayout.toString()}`); - await downloadDir(backupTarget, remotePath, dataLayout, progressCallback); + await downloadDir(backupSite, remotePath, dataLayout, progressCallback); await restoreFsMetadata(dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`); } -async function upload(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function upload(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert.strictEqual(typeof dataLayout, 'object'); assert.strictEqual(typeof progressCallback, 'function'); await saveFsMetadata(dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`); - return await sync(backupTarget, remotePath, dataLayout, progressCallback); // { stats, integrityMap } + return await sync(backupSite, remotePath, dataLayout, progressCallback); // { stats, integrityMap } } -async function copy(backupTarget, fromPath, toPath, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function copy(backupSite, fromPath, toPath, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof fromPath, 'string'); assert.strictEqual(typeof toPath, 'string'); assert.strictEqual(typeof progressCallback, 'function'); - await backupTargets.storageApi(backupTarget).copyDir(backupTarget.config, fromPath, toPath, progressCallback); + await backupSites.storageApi(backupSite).copyDir(backupSite.config, fromPath, toPath, progressCallback); } function getFileExtension(encryption) { @@ -308,27 +308,27 @@ function getFileExtension(encryption) { return ''; // this also signals to backupcleanear that we are dealing with directories } -async function verify(backupTarget, remotePath, integrityMap, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function verify(backupSite, remotePath, integrityMap, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(util.types.isMap(integrityMap), 'integrityMap should be a Map'); assert.strictEqual(typeof progressCallback, 'function'); debug(`verify: Verifying ${remotePath}`); - const encryptedFilenames = backupTarget.encryption?.encryptedFilenames || false; + const encryptedFilenames = backupSite.encryption?.encryptedFilenames || false; let fileCount = 0; async function validateFile(entry) { let relativePath = path.relative(remotePath, entry.path); if (encryptedFilenames) { - const { error, result } = hush.decryptFilePath(relativePath, backupTarget.encryption); + const { error, result } = hush.decryptFilePath(relativePath, backupSite.encryption); if (error) throw new BoxError(BoxError.CRYPTO_ERROR, 'Unable to decrypt file'); relativePath = result; } ++fileCount; - const sourceStream = await backupTargets.storageApi(backupTarget).download(backupTarget.config, entry.path); + const sourceStream = await backupSites.storageApi(backupSite).download(backupSite.config, entry.path); const ps = new ProgressStream({ interval: 10000 }); // display a progress every 10 seconds ps.on('progress', function (progress) { @@ -339,8 +339,8 @@ async function verify(backupTarget, remotePath, integrityMap, progressCallback) const streams = [ sourceStream, ps ]; - if (backupTarget.encryption) { - const decryptStream = new DecryptStream(backupTarget.encryption); + if (backupSite.encryption) { + const decryptStream = new DecryptStream(backupSite.encryption); streams.push(decryptStream); } @@ -357,10 +357,10 @@ async function verify(backupTarget, remotePath, integrityMap, progressCallback) debug(integrityMap.entries()); // https://www.digitalocean.com/community/questions/rate-limiting-on-spaces?answer=40441 - const concurrency = backupTarget.limits?.downloadConcurrency || (backupTarget.provider === 's3' ? 30 : 10); + const concurrency = backupSite.limits?.downloadConcurrency || (backupSite.provider === 's3' ? 30 : 10); let marker = null; while (true) { - const batch = await backupTargets.storageApi(backupTarget).listDir(backupTarget.config, remotePath, marker === null ? 1 : 1000, marker); // try with one file first. if that works out, we continue faster + const batch = await backupSites.storageApi(backupSite).listDir(backupSite.config, remotePath, marker === null ? 1 : 1000, marker); // try with one file first. if that works out, we continue faster await async.eachLimit(batch.entries, concurrency, validateFile); if (!batch.marker) break; marker = batch.marker; diff --git a/src/backupformat/tgz.js b/src/backupformat/tgz.js index c9dd7fee9..252ca881c 100644 --- a/src/backupformat/tgz.js +++ b/src/backupformat/tgz.js @@ -1,7 +1,7 @@ 'use strict'; const assert = require('node:assert'), - backupTargets = require('../backuptargets.js'), + backupSites = require('../backupsites.js'), BoxError = require('../boxerror.js'), DataLayout = require('../datalayout.js'), debug = require('debug')('box:backupformat/tgz'), @@ -116,9 +116,9 @@ async function addPathToPack(pack, localPath, dataLayout) { await addEntryToPack(pack, header, { /* options */ }); ++stats.dirCount; } else if (entry.isSymbolicLink()) { - const [readlinkError, target] = await safe(fs.promises.readlink(abspath)); - if (!target) { debug(`tarPack: skipping link, could not readlink ${abspath}: ${readlinkError.message}`); continue; } - const header = { name: headerName, type: 'symlink', linkname: target, uid: process.getuid(), gid: process.getgid() }; + const [readlinkError, site] = await safe(fs.promises.readlink(abspath)); + if (!site) { debug(`tarPack: skipping link, could not readlink ${abspath}: ${readlinkError.message}`); continue; } + const header = { name: headerName, type: 'symlink', linkname: site, uid: process.getuid(), gid: process.getgid() }; await addEntryToPack(pack, header, { /* options */ }); ++stats.linkCount; } else { @@ -235,8 +235,8 @@ async function tarExtract(inStream, dataLayout, encryption, progressCallback) { debug(`tarExtract: pipeline finished: ${JSON.stringify(ps.stats())}`); } -async function download(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function download(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); @@ -246,13 +246,13 @@ async function download(backupTarget, remotePath, dataLayout, progressCallback) await promiseRetry({ times: 5, interval: 20000, debug }, async () => { progressCallback({ message: `Downloading backup ${remotePath}` }); - const sourceStream = await backupTargets.storageApi(backupTarget).download(backupTarget.config, remotePath); - await tarExtract(sourceStream, dataLayout, backupTarget.encryption, progressCallback); + const sourceStream = await backupSites.storageApi(backupSite).download(backupSite.config, remotePath); + await tarExtract(sourceStream, dataLayout, backupSite.encryption, progressCallback); }); } -async function upload(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function upload(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert.strictEqual(typeof dataLayout, 'object'); assert.strictEqual(typeof progressCallback, 'function'); @@ -262,8 +262,8 @@ async function upload(backupTarget, remotePath, dataLayout, progressCallback) { return await promiseRetry({ times: 5, interval: 20000, debug }, async () => { progressCallback({ message: `Uploading backup ${remotePath}` }); - const uploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, remotePath); - const { stats, integrity } = await tarPack(dataLayout, backupTarget.encryption, uploader, progressCallback); + const uploader = await backupSites.storageApi(backupSite).upload(backupSite.config, remotePath); + const { stats, integrity } = await tarPack(dataLayout, backupSite.encryption, uploader, progressCallback); // use '.' instead of remote path since the backup can be moved to another path const integrityMap = new Map([ ['.', integrity] ]); @@ -271,24 +271,24 @@ async function upload(backupTarget, remotePath, dataLayout, progressCallback) { }); } -async function copy(backupTarget, fromPath, toPath, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function copy(backupSite, fromPath, toPath, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof fromPath, 'string'); assert.strictEqual(typeof toPath, 'string'); assert.strictEqual(typeof progressCallback, 'function'); - await backupTargets.storageApi(backupTarget).copy(backupTarget.config, fromPath, toPath, progressCallback); + await backupSites.storageApi(backupSite).copy(backupSite.config, fromPath, toPath, progressCallback); } -async function verify(backupTarget, remotePath, integrityMap, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function verify(backupSite, remotePath, integrityMap, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(util.types.isMap(integrityMap), 'integrityMap should be a Map'); assert.strictEqual(typeof progressCallback, 'function'); debug(`verify: Verifying ${remotePath}`); - const inStream = await backupTargets.storageApi(backupTarget).download(backupTarget.config, remotePath); + const inStream = await backupSites.storageApi(backupSite).download(backupSite.config, remotePath); let fileCount = 0; @@ -316,8 +316,8 @@ async function verify(backupTarget, remotePath, integrityMap, progressCallback) progressCallback({ message: `Downloading ${transferred}M@${speed}MBps` }); }); - if (backupTarget.encryption) { - const decrypt = new DecryptStream(backupTarget.encryption); + if (backupSite.encryption) { + const decrypt = new DecryptStream(backupSite.encryption); const [error] = await safe(stream.pipeline(inStream, ps, hash, decrypt, gunzip, extract)); if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, `tarExtract pipeline error: ${error.message}`); } else { diff --git a/src/backupintegrity.js b/src/backupintegrity.js index a922c8a50..4fc892ced 100644 --- a/src/backupintegrity.js +++ b/src/backupintegrity.js @@ -6,7 +6,7 @@ exports = module.exports = { const backups = require('./backups.js'), backupFormats = require('./backupformats.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), BoxError = require('./boxerror'), consumers = require('node:stream/consumers'), crypto = require('node:crypto'); @@ -15,19 +15,19 @@ async function check(backupId, progressCallback) { const backup = await backups.get(backupId); if (!backup) throw new BoxError(BoxError.BAD_FIELD, 'Backup not found'); - const backupTarget = await backupTargets.get(backup.targetId); - if (!backupTarget) throw new BoxError(BoxError.BAD_FIELD, 'Backup target not found'); + const backupSite = await backupSites.get(backup.siteId); + if (!backupSite) throw new BoxError(BoxError.BAD_FIELD, 'Backup site not found'); - const stream = await backupTargets.storageApi(backupTarget).download(backupTarget.config, `${backup.remotePath}.backupinfo`); + const stream = await backupSites.storageApi(backupSite).download(backupSite.config, `${backup.remotePath}.backupinfo`); const buffer = await consumers.buffer(stream); - const validSignature = crypto.verify(null /* algo */, buffer, backupTarget.integrityKeyPair.publicKey, Buffer.from(backup.integrity.signature, 'utf8')); + const validSignature = crypto.verify(null /* algo */, buffer, backupSite.integrityKeyPair.publicKey, Buffer.from(backup.integrity.signature, 'utf8')); progressCallback({ message: `Signature valid? ${validSignature}`}); const backupInfo = JSON.parse(buffer.toString('utf8')); const integrityMap = new Map(Object.entries(backupInfo)); - const verifyResult = await backupFormats.api(backupTarget.format).verify(backupTarget, backup.remotePath, integrityMap, progressCallback); + const verifyResult = await backupFormats.api(backupSite.format).verify(backupSite, backup.remotePath, integrityMap, progressCallback); progressCallback({ message: 'Verification done' }); return { diff --git a/src/backups.js b/src/backups.js index e498debd2..901206ba1 100644 --- a/src/backups.js +++ b/src/backups.js @@ -35,7 +35,7 @@ const assert = require('node:assert'), safe = require('safetydance'), tasks = require('./tasks.js'); -const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'integrityJson', 'statsJson', 'dependsOnJson', 'state', 'manifestJson', 'preserveSecs', 'encryptionVersion', 'appConfigJson', 'targetId' ].join(','); +const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'integrityJson', 'statsJson', 'dependsOnJson', 'state', 'manifestJson', 'preserveSecs', 'encryptionVersion', 'appConfigJson', 'siteId' ].join(','); function postProcess(result) { assert.strictEqual(typeof result, 'object'); @@ -74,7 +74,7 @@ async function add(data) { assert.strictEqual(typeof data.manifest, 'object'); assert.strictEqual(typeof data.preserveSecs, 'number'); assert.strictEqual(typeof data.appConfig, 'object'); - assert.strictEqual(typeof data.targetId, 'string'); + assert.strictEqual(typeof data.siteId, 'string'); const creationTime = data.creationTime || new Date(); // allow tests to set the time const manifestJson = JSON.stringify(data.manifest); @@ -84,8 +84,8 @@ async function add(data) { const statsJson = data.stats ? JSON.stringify(data.stats) : null; const integrityJson = data.integrity ? JSON.stringify(data.integrity) : null; - const [error] = await safe(database.query('INSERT INTO backups (id, remotePath, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOnJson, manifestJson, preserveSecs, appConfigJson, targetId, statsJson, integrityJson) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ id, data.remotePath, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, JSON.stringify(data.dependsOn), manifestJson, data.preserveSecs, appConfigJson, data.targetId, statsJson, integrityJson ])); + const [error] = await safe(database.query('INSERT INTO backups (id, remotePath, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOnJson, manifestJson, preserveSecs, appConfigJson, siteId, statsJson, integrityJson) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ id, data.remotePath, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, JSON.stringify(data.dependsOn), manifestJson, data.preserveSecs, appConfigJson, data.siteId, statsJson, integrityJson ])); if (error && error.code === 'ER_DUP_ENTRY') throw new BoxError(BoxError.ALREADY_EXISTS, 'Backup already exists'); if (error) throw error; @@ -106,11 +106,11 @@ async function getByIdentifierAndStatePaged(identifier, state, page, perPage) { return results; } -async function getLatestInTargetByIdentifier(identifier, targetId) { +async function getLatestInTargetByIdentifier(identifier, siteId) { assert.strictEqual(typeof identifier, 'string'); - assert.strictEqual(typeof targetId, 'string'); + assert.strictEqual(typeof siteId, 'string'); - const results = await database.query(`SELECT ${BACKUPS_FIELDS} FROM backups WHERE identifier = ? AND state = ? AND targetId = ? LIMIT 1`, [ identifier, exports.BACKUP_STATE_NORMAL, targetId ]); + const results = await database.query(`SELECT ${BACKUPS_FIELDS} FROM backups WHERE identifier = ? AND state = ? AND siteId = ? LIMIT 1`, [ identifier, exports.BACKUP_STATE_NORMAL, siteId ]); if (!results.length) return null; return postProcess(results[0]); } diff --git a/src/backuptargets.js b/src/backupsites.js similarity index 75% rename from src/backuptargets.js rename to src/backupsites.js index ee3499467..55f946c70 100644 --- a/src/backuptargets.js +++ b/src/backupsites.js @@ -64,10 +64,10 @@ const assert = require('node:assert'), // encryption: 'encryptionPassword' and 'encryptedFilenames' is converted into an 'encryption' object using hush.js. Password is lost forever after conversion. const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(','); -function storageApi(backupTarget) { - assert.strictEqual(typeof backupTarget, 'object'); +function storageApi(backupSite) { + assert.strictEqual(typeof backupSite, 'object'); - switch (backupTarget.provider) { + switch (backupSite.provider) { case 'nfs': return require('./storage/filesystem.js'); case 'cifs': return require('./storage/filesystem.js'); case 'sshfs': return require('./storage/filesystem.js'); @@ -94,7 +94,7 @@ function storageApi(backupTarget) { case 'contabo-objectstorage': return require('./storage/s3.js'); case 'hetzner-objectstorage': return require('./storage/s3.js'); case 'noop': return require('./storage/noop.js'); - default: throw new BoxError(BoxError.BAD_FIELD, `Unknown provider: ${backupTarget.provider}`); + default: throw new BoxError(BoxError.BAD_FIELD, `Unknown provider: ${backupSite.provider}`); } } @@ -122,18 +122,18 @@ function postProcess(result) { return result; } -function removePrivateFields(target) { - assert.strictEqual(typeof target, 'object'); +function removePrivateFields(site) { + assert.strictEqual(typeof site, 'object'); - target.encrypted = target.encryption !== null; - target.encryptedFilenames = target.encryption?.encryptedFilenames || false; - target.encryptionPasswordHint = target.encryption?.encryptionPasswordHint || null; - delete target.encryption; + site.encrypted = site.encryption !== null; + site.encryptedFilenames = site.encryption?.encryptedFilenames || false; + site.encryptionPasswordHint = site.encryption?.encryptionPasswordHint || null; + delete site.encryption; - delete target.integrityKeyPair.privateKey; + delete site.integrityKeyPair.privateKey; - target.config = storageApi(target).removePrivateFields(target.config); - return target; + site.config = storageApi(site).removePrivateFields(site.config); + return site; } function validateName(name) { @@ -178,7 +178,7 @@ async function list(page, perPage) { assert(typeof page === 'number' && page > 0); assert(typeof perPage === 'number' && perPage > 0); - const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets ORDER BY name DESC LIMIT ?,?`, [ (page-1)*perPage, perPage ]); + const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites ORDER BY name DESC LIMIT ?,?`, [ (page-1)*perPage, perPage ]); results.forEach(function (result) { postProcess(result); }); @@ -186,19 +186,19 @@ async function list(page, perPage) { } async function get(id) { - const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE id=?`, [ id ]); + const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE id=?`, [ id ]); if (results.length === 0) return null; return postProcess(results[0]); } async function getPrimary() { - const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets WHERE main=?`, [ true ]); + const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupSites WHERE main=?`, [ true ]); if (results.length === 0) return null; return postProcess(results[0]); } -async function update(target, data) { - assert.strictEqual(typeof target, 'object'); +async function update(site, data) { + assert.strictEqual(typeof site, 'object'); assert(data && typeof data === 'object'); const args = []; @@ -212,66 +212,66 @@ async function update(target, data) { args.push(JSON.stringify(data[k])); } } - args.push(target.id); + args.push(site.id); - const [updateError, result] = await safe(database.query('UPDATE backupTargets SET ' + fields.join(', ') + ' WHERE id = ?', args)); + const [updateError, result] = await safe(database.query('UPDATE backupSites SET ' + fields.join(', ') + ' WHERE id = ?', args)); if (updateError) throw updateError; if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found'); } -async function setSchedule(backupTarget, schedule, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setSchedule(backupSite, schedule, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof schedule, 'string'); assert.strictEqual(typeof auditSource, 'object'); const error = await validateSchedule(schedule); if (error) throw error; - await update(backupTarget, { schedule }); - await cron.handleBackupScheduleChanged(Object.assign({}, backupTarget, { schedule })); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, schedule }); + await update(backupSite, { schedule }); + await cron.handleBackupScheduleChanged(Object.assign({}, backupSite, { schedule })); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, schedule }); } -async function setLimits(backupTarget, limits, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setLimits(backupSite, limits, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof limits, 'object'); assert.strictEqual(typeof auditSource, 'object'); - await update(backupTarget, { limits }); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, limits }); + await update(backupSite, { limits }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, limits }); } -async function setRetention(backupTarget, retention, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setRetention(backupSite, retention, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof retention, 'object'); assert.strictEqual(typeof auditSource, 'object'); const error = await validateRetention(retention); if (error) throw error; - await update(backupTarget, { retention }); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, retention }); + await update(backupSite, { retention }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, retention }); } -async function setPrimary(backupTarget, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setPrimary(backupSite, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof auditSource, 'object'); const queries = [ - { query: 'SELECT 1 FROM backupTargets WHERE id=? FOR UPDATE', args: [ backupTarget.id ] }, // ensure this exists! - { query: 'UPDATE backupTargets SET main=?', args: [ false ] }, - { query: 'UPDATE backupTargets SET main=? WHERE id=?', args: [ true, backupTarget.id ] } + { query: 'SELECT 1 FROM backupSites WHERE id=? FOR UPDATE', args: [ backupSite.id ] }, // ensure this exists! + { query: 'UPDATE backupSites SET main=?', args: [ false ] }, + { query: 'UPDATE backupSites SET main=? WHERE id=?', args: [ true, backupSite.id ] } ]; const [error, result] = await safe(database.transaction(queries)); if (error) throw error; if (result[2].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found'); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, primary: true }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, primary: true }); } -async function setEncryption(backupTarget, data, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setEncryption(backupSite, data, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof data, 'object'); assert.strictEqual(typeof auditSource, 'object'); @@ -285,65 +285,65 @@ async function setEncryption(backupTarget, data, auditSource) { } const queries = [ - { query: 'DELETE FROM backups WHERE targetId=?', args: [ backupTarget.id ] }, - { query: 'UPDATE backupTargets SET encryptionJson=? WHERE id=?', args: [ encryption ? JSON.stringify(encryption) : null, backupTarget.id ] }, + { query: 'DELETE FROM backups WHERE siteId=?', args: [ backupSite.id ] }, + { query: 'UPDATE backupSites SET encryptionJson=? WHERE id=?', args: [ encryption ? JSON.stringify(encryption) : null, backupSite.id ] }, ]; const [error, result] = await safe(database.transaction(queries)); if (error) throw error; if (result[1].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found'); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, encryption: !!encryption }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, encryption: !!encryption }); } -async function setName(backupTarget, name, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setName(backupSite, name, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof auditSource, 'object'); const nameError = validateName(name); if (nameError) throw nameError; - await update(backupTarget, { name }); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, name }); + await update(backupSite, { name }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, name }); } -async function del(backupTarget, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function del(backupSite, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof auditSource, 'object'); - await safe(storageApi(backupTarget).teardown(backupTarget.config), { debug }); // ignore error + await safe(storageApi(backupSite).teardown(backupSite.config), { debug }); // ignore error - if (backupTarget.primary) throw new BoxError(BoxError.CONFLICT, 'Cannot delete the primary backup target'); + if (backupSite.primary) throw new BoxError(BoxError.CONFLICT, 'Cannot delete the primary backup site'); const queries = [ - { query: 'DELETE FROM archives WHERE backupId IN (SELECT id FROM backups WHERE targetId=?)', args: [ backupTarget.id ] }, - { query: 'DELETE FROM backups WHERE targetId=?', args: [ backupTarget.id ] }, - { query: 'DELETE FROM backupTargets WHERE id=? AND main=?', args: [ backupTarget.id, false ] }, // cannot delete primary + { query: 'DELETE FROM archives WHERE backupId IN (SELECT id FROM backups WHERE siteId=?)', args: [ backupSite.id ] }, + { query: 'DELETE FROM backups WHERE siteId=?', args: [ backupSite.id ] }, + { query: 'DELETE FROM backupSites WHERE id=? AND main=?', args: [ backupSite.id, false ] }, // cannot delete primary ]; const [error, result] = await safe(database.transaction(queries)); if (error && error.code === 'ER_NO_REFERENCED_ROW_2') throw new BoxError(BoxError.NOT_FOUND, error); if (error) throw error; if (result[2].affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Target not found'); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_REMOVE, auditSource, { backupTarget: backupTarget }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_REMOVE, auditSource, { backupSite: backupSite }); - backupTarget.schedule = constants.CRON_PATTERN_NEVER; - await cron.handleBackupScheduleChanged(backupTarget); + backupSite.schedule = constants.CRON_PATTERN_NEVER; + await cron.handleBackupScheduleChanged(backupSite); - const infoDir = path.join(paths.BACKUP_INFO_DIR, backupTarget.id); + const infoDir = path.join(paths.BACKUP_INFO_DIR, backupSite.id); safe.fs.rmSync(infoDir, { recursive: true }); } -async function startBackupTask(target, auditSource) { - assert.strictEqual(typeof target, 'object'); +async function startBackupTask(site, auditSource) { + assert.strictEqual(typeof site, 'object'); - const [error] = await safe(locks.acquire(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${target.id}`)); + const [error] = await safe(locks.acquire(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${site.id}`)); if (error) throw new BoxError(BoxError.BAD_STATE, `Another backup task is in progress: ${error.message}`); - const memoryLimit = target.limits?.memoryLimit ? Math.max(target.limits.memoryLimit/1024/1024, 1024) : 1024; + const memoryLimit = site.limits?.memoryLimit ? Math.max(site.limits.memoryLimit/1024/1024, 1024) : 1024; - const taskId = await tasks.add(`${tasks.TASK_FULL_BACKUP_PREFIX}${target.id}`, [ target.id, { /* options */ } ]); + const taskId = await tasks.add(`${tasks.TASK_FULL_BACKUP_PREFIX}${site.id}`, [ site.id, { /* options */ } ]); await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId }); @@ -358,17 +358,17 @@ async function startBackupTask(target, auditSource) { await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage: error.message, timedOut })); }) .finally(async () => { - await locks.release(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${target.id}`); + await locks.release(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${site.id}`); await locks.releaseByTaskId(taskId); }); return taskId; } -async function removeCacheFiles(backupTarget) { - assert.strictEqual(typeof backupTarget, 'object'); +async function removeCacheFiles(backupSite) { + assert.strictEqual(typeof backupSite, 'object'); - const infoDir = path.join(paths.BACKUP_INFO_DIR, backupTarget.id); + const infoDir = path.join(paths.BACKUP_INFO_DIR, backupSite.id); const files = safe.fs.readdirSync(infoDir); if (!files) throw new BoxError(BoxError.FS_ERROR, `Unable to access ${infoDir}: ${safe.error.message}`); for (const f of files) { @@ -378,22 +378,22 @@ async function removeCacheFiles(backupTarget) { } // keeps track of contents of the snapshot directory. this provides a way to clean up backups of uninstalled apps -async function getSnapshotInfo(backupTarget) { - assert.strictEqual(typeof backupTarget, 'object'); +async function getSnapshotInfo(backupSite) { + assert.strictEqual(typeof backupSite, 'object'); - const snapshotFilePath = path.join(paths.BACKUP_INFO_DIR, backupTarget.id, constants.SNAPSHOT_INFO_FILENAME); + const snapshotFilePath = path.join(paths.BACKUP_INFO_DIR, backupSite.id, constants.SNAPSHOT_INFO_FILENAME); const contents = safe.fs.readFileSync(snapshotFilePath, 'utf8'); const info = safe.JSON.parse(contents); return info || {}; } // keeps track of contents of the snapshot directory. this provides a way to clean up backups of uninstalled apps -async function setSnapshotInfo(backupTarget, id, info) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setSnapshotInfo(backupSite, id, info) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof id, 'string'); // 'box', 'mail' or appId assert.strictEqual(typeof info, 'object'); - const infoDir = path.join(paths.BACKUP_INFO_DIR, backupTarget.id); + const infoDir = path.join(paths.BACKUP_INFO_DIR, backupSite.id); const snapshotFilePath = path.join(infoDir, constants.SNAPSHOT_INFO_FILENAME); const contents = safe.fs.readFileSync(snapshotFilePath, 'utf8'); const data = safe.JSON.parse(contents) || {}; @@ -408,11 +408,11 @@ async function setSnapshotInfo(backupTarget, id, info) { } } -async function startCleanupTask(backupTarget, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function startCleanupTask(backupSite, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof auditSource, 'object'); - const taskId = await tasks.add(`${tasks.TASK_CLEAN_BACKUPS_PREFIX}${backupTarget.id}`, [ backupTarget.id ]); + const taskId = await tasks.add(`${tasks.TASK_CLEAN_BACKUPS_PREFIX}${backupSite.id}`, [ backupSite.id ]); // background tasks.startTask(taskId, {}) @@ -426,53 +426,53 @@ async function startCleanupTask(backupTarget, auditSource) { return taskId; } -async function remount(target) { - assert.strictEqual(typeof target, 'object'); +async function remount(site) { + assert.strictEqual(typeof site, 'object'); - await storageApi(target).setup(target.config); + await storageApi(site).setup(site.config); } -async function getStatus(target) { - assert.strictEqual(typeof target, 'object'); +async function getStatus(site) { + assert.strictEqual(typeof site, 'object'); - return await storageApi(target).getStatus(target.config); + return await storageApi(site).getStatus(site.config); } -async function ensureMounted(target) { - assert.strictEqual(typeof target, 'object'); +async function ensureMounted(site) { + assert.strictEqual(typeof site, 'object'); - const status = await getStatus(target); + const status = await getStatus(site); if (status.state === 'active') return status; await remount(); - return await getStatus(target); + return await getStatus(site); } -async function setConfig(backupTarget, newConfig, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function setConfig(backupSite, newConfig, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof newConfig, 'object'); assert.strictEqual(typeof auditSource, 'object'); if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode'); - const oldConfig = backupTarget.config; + const oldConfig = backupSite.config; - storageApi(backupTarget).injectPrivateFields(newConfig, oldConfig); + storageApi(backupSite).injectPrivateFields(newConfig, oldConfig); debug('setConfig: validating new storage configuration'); - const sanitizedConfig = await storageApi(backupTarget).verifyConfig({ id: backupTarget.id, provider: backupTarget.provider, config: newConfig }); + const sanitizedConfig = await storageApi(backupSite).verifyConfig({ id: backupSite.id, provider: backupSite.provider, config: newConfig }); debug('setConfig: clearing backup cache'); // FIXME: this cleans up the cache files in case the bucket or the prefix changes and the destination already has something there // however, this will also resync if just the credentials change - await removeCacheFiles(backupTarget); + await removeCacheFiles(backupSite); - await update(backupTarget, { config: sanitizedConfig }); + await update(backupSite, { config: sanitizedConfig }); debug('setConfig: setting up new storage configuration'); - await storageApi(backupTarget).setup(sanitizedConfig); + await storageApi(backupSite).setup(sanitizedConfig); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, newConfig }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupSite, newConfig }); } async function add(data, auditSource) { @@ -513,7 +513,7 @@ async function add(data, auditSource) { debug('add: validating new storage configuration'); const sanitizedConfig = await storageApi({ provider }).verifyConfig({id, provider, config }); - await database.query('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + await database.query('INSERT INTO backupSites (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]); debug('add: setting up new storage configuration'); @@ -527,8 +527,8 @@ async function add(data, auditSource) { async function addDefault(auditSource) { assert.strictEqual(typeof auditSource, 'object'); - debug('addDefault: adding default backup target'); - const defaultBackupTarget = { + debug('addDefault: adding default backup site'); + const defaultBackupSite = { name: 'Default', provider: 'filesystem', config: { backupDir: paths.DEFAULT_BACKUP_DIR }, @@ -536,11 +536,11 @@ async function addDefault(auditSource) { schedule: '00 00 23 * * *', format: 'tgz' }; - defaultBackupTarget.id = await add(defaultBackupTarget, auditSource); - await setPrimary(defaultBackupTarget, auditSource); + defaultBackupSite.id = await add(defaultBackupSite, auditSource); + await setPrimary(defaultBackupSite, auditSource); } -// creates a backup target object that is not in the database +// creates a backup site object that is not in the database async function createPseudo(data) { assert.strictEqual(typeof data, 'object'); diff --git a/src/backuptask.js b/src/backuptask.js index d9c88c32b..273550130 100644 --- a/src/backuptask.js +++ b/src/backuptask.js @@ -18,7 +18,7 @@ const apps = require('./apps.js'), assert = require('node:assert'), backupFormats = require('./backupformats.js'), backups = require('./backups.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), BoxError = require('./boxerror.js'), constants = require('./constants.js'), crypto = require('node:crypto'), @@ -37,25 +37,25 @@ const apps = require('./apps.js'), const BACKUP_UPLOAD_CMD = path.join(__dirname, 'scripts/backupupload.js'); -function addFileExtension(backupTarget, remotePath) { - assert.strictEqual(typeof backupTarget, 'object'); +function addFileExtension(backupSite, remotePath) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); - const ext = backupFormats.api(backupTarget.format).getFileExtension(!!backupTarget.encyption); + const ext = backupFormats.api(backupSite.format).getFileExtension(!!backupSite.encyption); return remotePath + ext; } -async function checkPreconditions(backupTarget, dataLayout) { - assert.strictEqual(typeof backupTarget, 'object'); +async function checkPreconditions(backupSite, dataLayout) { + assert.strictEqual(typeof backupSite, 'object'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); // check mount status before uploading - const status = await backupTargets.ensureMounted(backupTarget); + const status = await backupSites.ensureMounted(backupSite); debug(`checkPreconditions: mount point status is ${JSON.stringify(status)}`); if (status.state !== 'active') throw new BoxError(BoxError.MOUNT_ERROR, `Backup endpoint is not active: ${status.message}`); // check availabe size. this requires root for df to work - const available = await backupTargets.storageApi(backupTarget).getAvailableSize(backupTarget.config); + const available = await backupSites.storageApi(backupSite).getAvailableSize(backupSite.config); let used = 0; for (const localPath of dataLayout.localPaths()) { debug(`checkPreconditions: getting disk usage of ${localPath}`); @@ -72,53 +72,53 @@ async function checkPreconditions(backupTarget, dataLayout) { if (available <= needed) throw new BoxError(BoxError.FS_ERROR, `Not enough disk space for backup. Needed: ${df.prettyBytes(needed)} Available: ${df.prettyBytes(available)}`); } -async function uploadBackupInfo(backupTarget, remotePath, integrityMap) { +async function uploadBackupInfo(backupSite, remotePath, integrityMap) { const sortedIntegrityMap = [...integrityMap.entries()].sort(([a], [b]) => a < b); // for readability, order the entries const integrityDataJsonString = JSON.stringify(Object.fromEntries(sortedIntegrityMap), null, 2); const integrityDataStream = Readable.from(integrityDataJsonString); - const integrityUploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, `${remotePath}.backupinfo`); + const integrityUploader = await backupSites.storageApi(backupSite).upload(backupSite.config, `${remotePath}.backupinfo`); await stream.pipeline(integrityDataStream, integrityUploader.stream); await integrityUploader.finish(); - return await crypto.sign(null /* algorithm */, integrityDataJsonString, backupTarget.integrityKeyPair.privateKey); + return await crypto.sign(null /* algorithm */, integrityDataJsonString, backupSite.integrityKeyPair.privateKey); } // this function is called via backupupload (since it needs root to traverse app's directory) -async function upload(remotePath, targetId, dataLayoutString, progressCallback) { +async function upload(remotePath, siteId, dataLayoutString, progressCallback) { assert.strictEqual(typeof remotePath, 'string'); - assert.strictEqual(typeof targetId, 'string'); + assert.strictEqual(typeof siteId, 'string'); assert.strictEqual(typeof dataLayoutString, 'string'); assert.strictEqual(typeof progressCallback, 'function'); - debug(`upload: path ${remotePath} target ${targetId} dataLayout ${dataLayoutString}`); + debug(`upload: path ${remotePath} site ${siteId} dataLayout ${dataLayoutString}`); - const backupTarget = await backupTargets.get(targetId); - if (!backupTarget) throw new BoxError(BoxError.NOT_FOUND, 'Backup target not found'); + const backupSite = await backupSites.get(siteId); + if (!backupSite) throw new BoxError(BoxError.NOT_FOUND, 'Backup site not found'); const dataLayout = DataLayout.fromString(dataLayoutString); - await checkPreconditions(backupTarget, dataLayout); + await checkPreconditions(backupSite, dataLayout); - const { stats, integrityMap } = await backupFormats.api(backupTarget.format).upload(backupTarget, remotePath, dataLayout, progressCallback); + const { stats, integrityMap } = await backupFormats.api(backupSite.format).upload(backupSite, remotePath, dataLayout, progressCallback); progressCallback({ message: `Uploading integrity information to ${remotePath}.backupinfo` }); - const signature = await uploadBackupInfo(backupTarget, remotePath, integrityMap); + const signature = await uploadBackupInfo(backupSite, remotePath, integrityMap); return { stats, integrity: { signature } }; } -async function download(backupTarget, remotePath, dataLayout, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function download(backupSite, remotePath, dataLayout, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); assert.strictEqual(typeof progressCallback, 'function'); - debug(`download: Downloading ${remotePath} of format ${backupTarget.format} (encrypted: ${!!backupTarget.encryption}) to ${dataLayout.toString()}`); + debug(`download: Downloading ${remotePath} of format ${backupSite.format} (encrypted: ${!!backupSite.encryption}) to ${dataLayout.toString()}`); - await backupFormats.api(backupTarget.format).download(backupTarget, remotePath, dataLayout, progressCallback); + await backupFormats.api(backupSite.format).download(backupSite, remotePath, dataLayout, progressCallback); } -async function restore(backupTarget, remotePath, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function restore(backupSite, remotePath, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert.strictEqual(typeof progressCallback, 'function'); @@ -126,7 +126,7 @@ async function restore(backupTarget, remotePath, progressCallback) { if (!boxDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving boxdata: ${safe.error.message}`); const dataLayout = new DataLayout(boxDataDir, []); - await download(backupTarget, remotePath, dataLayout, progressCallback); + await download(backupSite, remotePath, dataLayout, progressCallback); debug('restore: download completed, importing database'); @@ -146,15 +146,15 @@ async function downloadApp(app, restoreConfig, progressCallback) { const dataLayout = new DataLayout(appDataDir, app.storageVolumeId ? [{ localDir: await apps.getStorageDir(app), remoteDir: 'data' }] : []); const startTime = new Date(); - let { backupTarget, remotePath } = restoreConfig; // set when importing + let { backupSite, remotePath } = restoreConfig; // set when importing if (!remotePath) { const backup = await backups.get(restoreConfig.backupId); if (!backup) throw new BoxError(BoxError.BAD_FIELD, 'No such backup'); remotePath = backup.remotePath; - backupTarget = await backupTargets.get(backup.targetId); + backupSite = await backupSites.get(backup.siteId); } - await download(backupTarget, remotePath, dataLayout, progressCallback); + await download(backupSite, remotePath, dataLayout, progressCallback); debug('downloadApp: time: %s', (new Date() - startTime)/1000); } @@ -162,16 +162,16 @@ async function runBackupUpload(uploadConfig, progressCallback) { assert.strictEqual(typeof uploadConfig, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - const { remotePath, backupTarget, dataLayout, progressTag } = uploadConfig; + const { remotePath, backupSite, dataLayout, progressTag } = uploadConfig; assert.strictEqual(typeof remotePath, 'string'); - assert.strictEqual(typeof backupTarget, 'object'); + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof progressTag, 'string'); assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout'); // https://stackoverflow.com/questions/48387040/node-js-recommended-max-old-space-size const envCopy = Object.assign({}, process.env); - if (backupTarget.limits?.memoryLimit >= 2*1024*1024*1024) { - const heapSize = Math.min((backupTarget.limits.memoryLimit/1024/1024) - 256, 8192); + if (backupSite.limits?.memoryLimit >= 2*1024*1024*1024) { + const heapSize = Math.min((backupSite.limits.memoryLimit/1024/1024) - 256, 8192); debug(`runBackupUpload: adjusting heap size to ${heapSize}M`); envCopy.NODE_OPTIONS = `--max-old-space-size=${heapSize}`; } @@ -184,7 +184,7 @@ async function runBackupUpload(uploadConfig, progressCallback) { } // do not use debug for logging child output because it already has timestamps via it's own debug - const [error] = await safe(shell.sudo([ BACKUP_UPLOAD_CMD, remotePath, backupTarget.id, dataLayout.toString() ], { env: envCopy, preserveEnv: true, onMessage, logger: process.stdout.write })); + const [error] = await safe(shell.sudo([ BACKUP_UPLOAD_CMD, remotePath, backupSite.id, dataLayout.toString() ], { env: envCopy, preserveEnv: true, onMessage, logger: process.stdout.write })); if (error && (error.code === null /* signal */ || (error.code !== 0 && error.code !== 50))) { // backuptask crashed debug(`runBackupUpload: backuptask crashed`, error); throw new BoxError(BoxError.INTERNAL_ERROR, 'Backuptask crashed'); @@ -206,20 +206,20 @@ async function snapshotBox(progressCallback) { debug(`snapshotBox: took ${(new Date() - startTime)/1000} seconds`); } -async function uploadBoxSnapshot(backupTarget, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function uploadBoxSnapshot(backupSite, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof progressCallback, 'function'); await snapshotBox(progressCallback); - const remotePath = addFileExtension(backupTarget, `snapshot/box`); + const remotePath = addFileExtension(backupSite, `snapshot/box`); const boxDataDir = safe.fs.realpathSync(paths.BOX_DATA_DIR); if (!boxDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving boxdata: ${safe.error.message}`); const uploadConfig = { remotePath, - backupTarget, + backupSite, dataLayout: new DataLayout(boxDataDir, []), progressTag: 'box' }; @@ -232,26 +232,26 @@ async function uploadBoxSnapshot(backupTarget, progressCallback) { debug(`uploadBoxSnapshot: took ${(new Date() - startTime)/1000} seconds`); - await backupTargets.setSnapshotInfo(backupTarget, 'box', { timestamp: new Date().toISOString() }); + await backupSites.setSnapshotInfo(backupSite, 'box', { timestamp: new Date().toISOString() }); return { stats, integrity }; } -async function copy(backupTarget, srcRemotePath, destRemotePath, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function copy(backupSite, srcRemotePath, destRemotePath, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof srcRemotePath, 'string'); assert.strictEqual(typeof destRemotePath, 'string'); assert.strictEqual(typeof progressCallback, 'function'); const startTime = new Date(); - const [copyError] = await safe(backupFormats.api(backupTarget.format).copy(backupTarget, srcRemotePath, destRemotePath, progressCallback)); + const [copyError] = await safe(backupFormats.api(backupSite.format).copy(backupSite, srcRemotePath, destRemotePath, progressCallback)); if (copyError) { debug(`copy: copy to ${destRemotePath} errored. error: ${copyError.message}`); throw copyError; } debug(`copy: copied successfully to ${destRemotePath}. Took ${(new Date() - startTime)/1000} seconds`); - const [copyChecksumError] = await safe(backupTargets.storageApi(backupTarget).copy(backupTarget.config, `${srcRemotePath}.backupinfo`, `${destRemotePath}.backupinfo`, progressCallback)); + const [copyChecksumError] = await safe(backupSites.storageApi(backupSite).copy(backupSite.config, `${srcRemotePath}.backupinfo`, `${destRemotePath}.backupinfo`, progressCallback)); if (copyChecksumError) { debug(`copy: copy to ${destRemotePath} errored. error: ${copyChecksumError.message}`); throw copyChecksumError; @@ -259,22 +259,22 @@ async function copy(backupTarget, srcRemotePath, destRemotePath, progressCallbac debug(`copy: copied backupinfo successfully to ${destRemotePath}.backupinfo`); } -async function backupBox(backupTarget, dependsOn, tag, options, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function backupBox(backupSite, dependsOn, tag, options, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert(Array.isArray(dependsOn)); assert.strictEqual(typeof tag, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - const { stats, integrity } = await uploadBoxSnapshot(backupTarget, progressCallback); + const { stats, integrity } = await uploadBoxSnapshot(backupSite, progressCallback); - const remotePath = addFileExtension(backupTarget, `${tag}/box_v${constants.VERSION}`); + const remotePath = addFileExtension(backupSite, `${tag}/box_v${constants.VERSION}`); - debug(`backupBox: rotating box snapshot of ${backupTarget.id} to id ${remotePath}`); + debug(`backupBox: rotating box snapshot of ${backupSite.id} to id ${remotePath}`); const data = { remotePath, - encryptionVersion: backupTarget.encryption ? 2 : null, + encryptionVersion: backupSite.encryption ? 2 : null, packageVersion: constants.VERSION, type: backups.BACKUP_TYPE_BOX, state: backups.BACKUP_STATE_CREATING, @@ -283,14 +283,14 @@ async function backupBox(backupTarget, dependsOn, tag, options, progressCallback manifest: null, preserveSecs: options.preserveSecs || 0, appConfig: null, - targetId: backupTarget.id, + siteId: backupSite.id, stats, integrity }; const id = await backups.add(data); - const snapshotPath = addFileExtension(backupTarget, 'snapshot/box'); - const [error] = await safe(copy(backupTarget, snapshotPath, remotePath, progressCallback)); + const snapshotPath = addFileExtension(backupSite, 'snapshot/box'); + const [error] = await safe(copy(backupSite, snapshotPath, remotePath, progressCallback)); const state = error ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL; await backups.setState(id, state); if (error) throw error; @@ -311,14 +311,14 @@ async function snapshotApp(app, progressCallback) { debug(`snapshotApp: ${app.fqdn} took ${(new Date() - startTime)/1000} seconds`); } -async function uploadAppSnapshot(backupTarget, app, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function uploadAppSnapshot(backupSite, app, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof app, 'object'); assert.strictEqual(typeof progressCallback, 'function'); await snapshotApp(app, progressCallback); - const remotePath = addFileExtension(backupTarget, `snapshot/app_${app.id}`); + const remotePath = addFileExtension(backupSite, `snapshot/app_${app.id}`); const appDataDir = safe.fs.realpathSync(path.join(paths.APPS_DATA_DIR, app.id)); if (!appDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving appsdata: ${safe.error.message}`); @@ -328,7 +328,7 @@ async function uploadAppSnapshot(backupTarget, app, progressCallback) { const uploadConfig = { remotePath, - backupTarget, + backupSite, dataLayout, progressTag: app.fqdn }; @@ -339,34 +339,34 @@ async function uploadAppSnapshot(backupTarget, app, progressCallback) { debug(`uploadAppSnapshot: ${app.fqdn} uploaded to ${remotePath}. ${(new Date() - startTime)/1000} seconds`); - await backupTargets.setSnapshotInfo(backupTarget, app.id, { timestamp: new Date().toISOString(), manifest: app.manifest }); + await backupSites.setSnapshotInfo(backupSite, app.id, { timestamp: new Date().toISOString(), manifest: app.manifest }); return { stats, integrity }; } -async function backupAppWithTag(app, backupTarget, tag, options, progressCallback) { +async function backupAppWithTag(app, backupSite, tag, options, progressCallback) { assert.strictEqual(typeof app, 'object'); - assert.strictEqual(typeof backupTarget, 'object'); + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof tag, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); if (!apps.canBackupApp(app)) { // if we cannot backup, reuse it's most recent backup - const lastKnownGoodAppBackup = await backups.getLatestInTargetByIdentifier(app.id, backupTarget.id); + const lastKnownGoodAppBackup = await backups.getLatestInTargetByIdentifier(app.id, backupSite.id); if (lastKnownGoodAppBackup === null) return null; // no backup to re-use return lastKnownGoodAppBackup.id; } - const { stats, integrity } = await uploadAppSnapshot(backupTarget, app, progressCallback); + const { stats, integrity } = await uploadAppSnapshot(backupSite, app, progressCallback); const manifest = app.manifest; - const remotePath = addFileExtension(backupTarget, `${tag}/app_${app.fqdn}_v${manifest.version}`); + const remotePath = addFileExtension(backupSite, `${tag}/app_${app.fqdn}_v${manifest.version}`); - debug(`backupAppWithTag: rotating ${app.fqdn} snapshot of ${backupTarget.id} to path ${remotePath}`); + debug(`backupAppWithTag: rotating ${app.fqdn} snapshot of ${backupSite.id} to path ${remotePath}`); const data = { remotePath, - encryptionVersion: backupTarget.encryption ? 2 : null, + encryptionVersion: backupSite.encryption ? 2 : null, packageVersion: manifest.version, type: backups.BACKUP_TYPE_APP, state: backups.BACKUP_STATE_CREATING, @@ -375,14 +375,14 @@ async function backupAppWithTag(app, backupTarget, tag, options, progressCallbac manifest, preserveSecs: options.preserveSecs || 0, appConfig: app, - targetId: backupTarget.id, + siteId: backupSite.id, stats, integrity }; const id = await backups.add(data); - const snapshotPath = addFileExtension(backupTarget, `snapshot/app_${app.id}`); - const [error] = await safe(copy(backupTarget, snapshotPath, remotePath, progressCallback)); + const snapshotPath = addFileExtension(backupSite, `snapshot/app_${app.id}`); + const [error] = await safe(copy(backupSite, snapshotPath, remotePath, progressCallback)); const state = error ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL; await backups.setState(id, state); if (error) throw error; @@ -390,9 +390,9 @@ async function backupAppWithTag(app, backupTarget, tag, options, progressCallbac return id; } -async function backupApp(app, backupTarget, options, progressCallback) { +async function backupApp(app, backupSite, options, progressCallback) { assert.strictEqual(typeof app, 'object'); - assert.strictEqual(typeof backupTarget, 'object'); + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); @@ -402,25 +402,25 @@ async function backupApp(app, backupTarget, options, progressCallback) { await snapshotApp(app, progressCallback); } else { const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,''); - backupId = await backupAppWithTag(app, backupTarget, tag, options, progressCallback); + backupId = await backupAppWithTag(app, backupSite, tag, options, progressCallback); } await locks.release(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`); return backupId; } -async function uploadMailSnapshot(backupTarget, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function uploadMailSnapshot(backupSite, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - const remotePath = addFileExtension(backupTarget, 'snapshot/mail'); + const remotePath = addFileExtension(backupSite, 'snapshot/mail'); const mailDataDir = safe.fs.realpathSync(paths.MAIL_DATA_DIR); if (!mailDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving maildata: ${safe.error.message}`); const uploadConfig = { remotePath, - backupTarget, + backupSite, dataLayout: new DataLayout(mailDataDir, []), progressTag: 'mail' }; @@ -433,28 +433,28 @@ async function uploadMailSnapshot(backupTarget, progressCallback) { debug(`uploadMailSnapshot: took ${(new Date() - startTime)/1000} seconds`); - await backupTargets.setSnapshotInfo(backupTarget, 'mail', { timestamp: new Date().toISOString() }); + await backupSites.setSnapshotInfo(backupSite, 'mail', { timestamp: new Date().toISOString() }); return { stats, integrity }; } -async function backupMailWithTag(backupTarget, tag, options, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function backupMailWithTag(backupSite, tag, options, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof tag, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); debug(`backupMailWithTag: backing up mail with tag ${tag}`); - const { stats, integrity } = await uploadMailSnapshot(backupTarget, progressCallback); + const { stats, integrity } = await uploadMailSnapshot(backupSite, progressCallback); - const remotePath = addFileExtension(backupTarget, `${tag}/mail_v${constants.VERSION}`); + const remotePath = addFileExtension(backupSite, `${tag}/mail_v${constants.VERSION}`); - debug(`backupMailWithTag: rotating mail snapshot of ${backupTarget.id} to ${remotePath}`); + debug(`backupMailWithTag: rotating mail snapshot of ${backupSite.id} to ${remotePath}`); const data = { remotePath, - encryptionVersion: backupTarget.encryption ? 2 : null, + encryptionVersion: backupSite.encryption ? 2 : null, packageVersion: constants.VERSION, type: backups.BACKUP_TYPE_MAIL, state: backups.BACKUP_STATE_CREATING, @@ -463,14 +463,14 @@ async function backupMailWithTag(backupTarget, tag, options, progressCallback) { manifest: null, preserveSecs: options.preserveSecs || 0, appConfig: null, - targetId: backupTarget.id, + siteId: backupSite.id, stats, integrity }; const id = await backups.add(data); - const snapshotPath = addFileExtension(backupTarget, 'snapshot/mail'); - const [error] = await safe(copy(backupTarget, snapshotPath, remotePath, progressCallback)); + const snapshotPath = addFileExtension(backupSite, 'snapshot/mail'); + const [error] = await safe(copy(backupSite, snapshotPath, remotePath, progressCallback)); const state = error ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL; await backups.setState(id, state); if (error) throw error; @@ -478,8 +478,8 @@ async function backupMailWithTag(backupTarget, tag, options, progressCallback) { return id; } -async function downloadMail(backupTarget, remotePath, progressCallback) { - assert.strictEqual(typeof backupTarget, 'object'); +async function downloadMail(backupSite, remotePath, progressCallback) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert.strictEqual(typeof progressCallback, 'function'); @@ -489,18 +489,18 @@ async function downloadMail(backupTarget, remotePath, progressCallback) { const startTime = new Date(); - await download(backupTarget, remotePath, dataLayout, progressCallback); + await download(backupSite, remotePath, dataLayout, progressCallback); debug('downloadMail: time: %s', (new Date() - startTime)/1000); } // this function is called from external process. calling process is expected to have a lock -async function fullBackup(backupTargetId, options, progressCallback) { - assert.strictEqual(typeof backupTargetId, 'string'); +async function fullBackup(backupSiteId, options, progressCallback) { + assert.strictEqual(typeof backupSiteId, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - const backupTarget = await backupTargets.get(backupTargetId); - if (!backupTarget) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Backup target not found'); + const backupSite = await backupSites.get(backupSiteId); + if (!backupSite) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Backup site not found'); const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,''); // unique tag under which all apps/mail/box backs up @@ -522,7 +522,7 @@ async function fullBackup(backupTargetId, options, progressCallback) { progressCallback({ percent, message: `Backing up ${app.fqdn} (${i+1}/${allApps.length}). Waiting for lock` }); await locks.wait(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`); const startTime = new Date(); - const [appBackupError, appBackupId] = await safe(backupAppWithTag(app, backupTarget, tag, options, (progress) => progressCallback({ percent, message: progress.message }))); + const [appBackupError, appBackupId] = await safe(backupAppWithTag(app, backupSite, tag, options, (progress) => progressCallback({ percent, message: progress.message }))); debug(`fullBackup: app ${app.fqdn} backup finished. Took ${(new Date() - startTime)/1000} seconds`); await locks.release(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`); if (appBackupError) throw appBackupError; @@ -531,32 +531,32 @@ async function fullBackup(backupTargetId, options, progressCallback) { progressCallback({ percent, message: 'Backing up mail' }); percent += step; - const mailBackupId = await backupMailWithTag(backupTarget, tag, options, (progress) => progressCallback({ percent, message: progress.message })); + const mailBackupId = await backupMailWithTag(backupSite, tag, options, (progress) => progressCallback({ percent, message: progress.message })); progressCallback({ percent, message: 'Backing up system data' }); percent += step; const dependsOn = appBackupIds.concat(mailBackupId); - const backupId = await backupBox(backupTarget, dependsOn, tag, options, (progress) => progressCallback({ percent, message: progress.message })); + const backupId = await backupBox(backupSite, dependsOn, tag, options, (progress) => progressCallback({ percent, message: progress.message })); return backupId; } // this function is called from external process -async function appBackup(appId, backupTargetId, options, progressCallback) { +async function appBackup(appId, backupSiteId, options, progressCallback) { assert.strictEqual(typeof appId, 'string'); - assert.strictEqual(typeof backupTargetId, 'string'); + assert.strictEqual(typeof backupSiteId, 'string'); assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); const app = await apps.get(appId); if (!app) throw new BoxError(BoxError.BAD_FIELD, 'App not found'); - const backupTarget = await backupTargets.get(backupTargetId); - if (!backupTarget) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Backup target not found'); + const backupSite = await backupSites.get(backupSiteId); + if (!backupSite) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Backup site not found'); await progressCallback({ percent: 1, message: `Backing up ${app.fqdn}. Waiting for lock` }); const startTime = new Date(); - const backupId = await backupApp(app, backupTarget, options, progressCallback); + const backupId = await backupApp(app, backupSite, options, progressCallback); await progressCallback({ percent: 100, message: `app ${app.fqdn} backup finished. Took ${(new Date() - startTime)/1000} seconds` }); return backupId; } diff --git a/src/cron.js b/src/cron.js index 67bd9760c..c35042722 100644 --- a/src/cron.js +++ b/src/cron.js @@ -24,7 +24,7 @@ const appHealthMonitor = require('./apphealthmonitor.js'), assert = require('node:assert'), appstore = require('./appstore.js'), AuditSource = require('./auditsource.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), cloudron = require('./cloudron.js'), constants = require('./constants.js'), { CronJob } = require('cron'), @@ -144,8 +144,8 @@ async function startJobs() { gJobs.cleanupBackups = CronJob.from({ cronTime: DEFAULT_CLEANUP_BACKUPS_PATTERN, onTick: async () => { - for (const backupTarget of await backupTargets.list(1, 100)) { - await safe(backupTargets.startCleanupTask(backupTarget, AuditSource.CRON), { debug }); + for (const backupSite of await backupSites.list(1, 100)) { + await safe(backupSites.startCleanupTask(backupSite, AuditSource.CRON), { debug }); } }, start: true @@ -200,37 +200,37 @@ async function startJobs() { start: true }); - for (const backupTarget of await backupTargets.list(1, 100)) { - await handleBackupScheduleChanged(backupTarget); + for (const backupSite of await backupSites.list(1, 100)) { + await handleBackupScheduleChanged(backupSite); } await handleAutoupdatePatternChanged(await updater.getAutoupdatePattern()); await handleDynamicDnsChanged(await network.getDynamicDns()); await handleExternalLdapChanged(await externalLdap.getConfig()); } -async function handleBackupScheduleChanged(target) { - assert.strictEqual(typeof target, 'object'); +async function handleBackupScheduleChanged(site) { + assert.strictEqual(typeof site, 'object'); const tz = await cloudron.getTimeZone(); - debug(`handleBackupScheduleChanged: schedule ${target.schedule} (${tz})`); + debug(`handleBackupScheduleChanged: schedule ${site.schedule} (${tz})`); - if (gJobs.backups.has(target.id)) gJobs.backups.get(target.id).stop(); - gJobs.backups.delete(target.id); + if (gJobs.backups.has(site.id)) gJobs.backups.get(site.id).stop(); + gJobs.backups.delete(site.id); - if (target.schedule === constants.CRON_PATTERN_NEVER) return; + if (site.schedule === constants.CRON_PATTERN_NEVER) return; const job = CronJob.from({ - cronTime: target.schedule, + cronTime: site.schedule, onTick: async () => { - const t = await backupTargets.get(target.id); + const t = await backupSites.get(site.id); if (!t) return; - await safe(backupTargets.startBackupTask(t, AuditSource.CRON), { debug }); + await safe(backupSites.startBackupTask(t, AuditSource.CRON), { debug }); }, start: true, timeZone: tz }); - gJobs.backups.set(target.id, job); + gJobs.backups.set(site.id, job); } async function handleTimeZoneChanged(tz) { diff --git a/src/metrics.js b/src/metrics.js index bb8fdf1bd..f690b3c1e 100644 --- a/src/metrics.js +++ b/src/metrics.js @@ -252,7 +252,7 @@ async function getContainer(name, options) { for (const target of targets) { const query = { - target: target, + target, format: 'json', from: `-${fromSecs}s`, until: 'now+20s', // until is exclusive. 'now' is otherwise not included @@ -315,7 +315,7 @@ async function readSystemFromGraphite(options) { for (const target of targets) { const query = { - target: target, + target, format: 'json', from: `-${fromSecs}s`, until: 'now', diff --git a/src/paths.js b/src/paths.js index 3597ff5f3..07cdd5bc1 100644 --- a/src/paths.js +++ b/src/paths.js @@ -41,7 +41,7 @@ exports = module.exports = { NGINX_CONFIG_DIR: path.join(baseDir(), 'platformdata/nginx'), NGINX_APPCONFIG_DIR: path.join(baseDir(), 'platformdata/nginx/applications'), NGINX_CERT_DIR: path.join(baseDir(), 'platformdata/nginx/cert'), - BACKUP_INFO_DIR: path.join(baseDir(), 'platformdata/backup'), // contains /{backupTarget.id}/ + BACKUP_INFO_DIR: path.join(baseDir(), 'platformdata/backup'), // contains /{backupSite.id}/ UPDATE_DIR: path.join(baseDir(), 'platformdata/update'), BOX_UPDATE_FILE: path.join(baseDir(), 'platformdata/update/boxupdate.json'), DISK_USAGE_EXCLUDE_FILE: path.join(baseDir(), 'platformdata/diskusage/exclude'), diff --git a/src/platform.js b/src/platform.js index fa6223a15..be878dea9 100644 --- a/src/platform.js +++ b/src/platform.js @@ -77,7 +77,7 @@ async function removeAllContainers() { async function markApps(existingInfra, restoreOptions) { assert.strictEqual(typeof existingInfra, 'object'); - assert.strictEqual(typeof restoreOptions, 'object'); // { backupTarget, skipDnsSetup } + assert.strictEqual(typeof restoreOptions, 'object'); // { backupSite, skipDnsSetup } if (existingInfra.version === 'none') { // cloudron is being restored from backup debug('markApps: restoring apps'); @@ -113,7 +113,7 @@ async function onInfraReady(infraChanged) { } async function startInfra(restoreOptions) { - assert.strictEqual(typeof restoreOptions, 'object'); // { backupTarget, skipDnsSetup } + assert.strictEqual(typeof restoreOptions, 'object'); // { backupSite, skipDnsSetup } if (constants.TEST && !process.env.TEST_CREATE_INFRA) return; @@ -196,7 +196,7 @@ async function uninitialize() { } async function onActivated(restoreOptions) { - assert.strictEqual(typeof restoreOptions, 'object'); // { backupTarget, skipDnsSetup } + assert.strictEqual(typeof restoreOptions, 'object'); // { backupSite, skipDnsSetup } debug('onActivated: starting post activation services'); diff --git a/src/provision.js b/src/provision.js index bdf1a3068..2559ab1a8 100644 --- a/src/provision.js +++ b/src/provision.js @@ -9,7 +9,7 @@ exports = module.exports = { const appstore = require('./appstore.js'), assert = require('node:assert'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), backups = require('./backups.js'), backuptask = require('./backuptask.js'), BoxError = require('./boxerror.js'), @@ -86,7 +86,7 @@ async function setupTask(domain, auditSource) { await reverseProxy.ensureCertificate(location, {}, auditSource); await ensureDhparams(); await dashboard.setupLocation(constants.DASHBOARD_SUBDOMAIN, domain, auditSource); - await backupTargets.addDefault(auditSource); + await backupSites.addDefault(auditSource); setProgress('setup', 'Done'), await eventlog.add(eventlog.ACTION_PROVISION, auditSource, {}); } catch (error) { @@ -172,8 +172,8 @@ async function activate(username, password, email, displayName, ip, auditSource) }; } -async function restoreTask(backupTarget, remotePath, ipv4Config, ipv6Config, options, auditSource) { - assert.strictEqual(typeof backupTarget, 'object'); +async function restoreTask(backupSite, remotePath, ipv4Config, ipv6Config, options, auditSource) { + assert.strictEqual(typeof backupSite, 'object'); assert.strictEqual(typeof remotePath, 'string'); assert.strictEqual(typeof ipv4Config, 'object'); assert.strictEqual(typeof ipv6Config, 'object'); @@ -181,17 +181,17 @@ async function restoreTask(backupTarget, remotePath, ipv4Config, ipv6Config, opt assert.strictEqual(typeof auditSource, 'object'); try { - setProgress('restore', 'Preparing backup target'); - await backupTargets.storageApi(backupTarget).setup(backupTarget.config); + setProgress('restore', 'Preparing backup site'); + await backupSites.storageApi(backupSite).setup(backupSite.config); setProgress('restore', 'Downloading box backup'); - await backuptask.restore(backupTarget, remotePath, (progress) => setProgress('restore', progress.message)); + await backuptask.restore(backupSite, remotePath, (progress) => setProgress('restore', progress.message)); setProgress('restore', 'Downloading mail backup'); const mailBackups = await backups.getByIdentifierAndStatePaged(backups.BACKUP_IDENTIFIER_MAIL, backups.BACKUP_STATE_NORMAL, 1, 1); if (mailBackups.length === 0) throw new BoxError(BoxError.NOT_FOUND, 'mail backup not found'); const mailRemotePath = mailBackups[0].remotePath; - await backuptask.downloadMail(backupTarget, mailRemotePath, (progress) => setProgress('restore', progress.message)); + await backuptask.downloadMail(backupSite, mailRemotePath, (progress) => setProgress('restore', progress.message)); await ensureDhparams(); await network.setIPv4Config(ipv4Config); @@ -208,7 +208,7 @@ async function restoreTask(backupTarget, remotePath, ipv4Config, ipv6Config, opt await eventlog.add(eventlog.ACTION_RESTORE, auditSource, { remotePath }); - setImmediate(() => safe(platform.onActivated({ backupTarget, skipDnsSetup: options.skipDnsSetup }), { debug })); + setImmediate(() => safe(platform.onActivated({ backupSite, skipDnsSetup: options.skipDnsSetup }), { debug })); } catch (error) { debug('restoreTask: error. %o', error); gStatus.restore.errorMessage = error ? error.message : ''; @@ -236,7 +236,7 @@ async function restore(backupConfig, remotePath, version, ipv4Config, ipv6Config const activated = await users.isActivated(); if (activated) throw new BoxError(BoxError.CONFLICT, 'Already activated. Restore with a fresh Cloudron installation.'); - const backupTarget = await backupTargets.createPseudo({ + const backupSite = await backupSites.createPseudo({ id: `cloudron-restore`, provider: backupConfig.provider, config: backupConfig.config, @@ -248,7 +248,7 @@ async function restore(backupConfig, remotePath, version, ipv4Config, ipv6Config const error = await network.testIPv4Config(ipv4Config); if (error) throw error; - safe(restoreTask(backupTarget, remotePath, ipv4Config, ipv6Config, options, auditSource), { debug }); // now that args are validated run the task in the background + safe(restoreTask(backupSite, remotePath, ipv4Config, ipv6Config, options, auditSource), { debug }); // now that args are validated run the task in the background } catch (error) { debug('restore: error. %o', error); gStatus.restore.active = false; diff --git a/src/routes/backuptargets.js b/src/routes/backupsites.js similarity index 77% rename from src/routes/backuptargets.js rename to src/routes/backupsites.js index d1054ef1e..edc46dfe6 100644 --- a/src/routes/backuptargets.js +++ b/src/routes/backupsites.js @@ -25,7 +25,7 @@ exports = module.exports = { const assert = require('node:assert'), AuditSource = require('../auditsource.js'), - backupTargets = require('../backuptargets.js'), + backupSites = require('../backupsites.js'), BoxError = require('../boxerror.js'), HttpError = require('@cloudron/connect-lastmile').HttpError, HttpSuccess = require('@cloudron/connect-lastmile').HttpSuccess, @@ -34,11 +34,11 @@ const assert = require('node:assert'), async function load(req, res, next) { assert.strictEqual(typeof req.params.id, 'string'); - const [error, result] = await safe(backupTargets.get(req.params.id)); + const [error, result] = await safe(backupSites.get(req.params.id)); if (error) return next(BoxError.toHttpError(error)); - if (!result) return next(new HttpError(404, 'Backup target not found')); + if (!result) return next(new HttpError(404, 'Backup site not found')); - req.resources.backupTarget = result; + req.resources.backupSite = result; next(); } @@ -46,7 +46,7 @@ async function load(req, res, next) { async function get(req, res, next) { assert.strictEqual(typeof req.params.id, 'string'); - next(new HttpSuccess(200, backupTargets.removePrivateFields(req.resources.backupTarget))); + next(new HttpSuccess(200, backupSites.removePrivateFields(req.resources.backupSite))); } async function list(req, res, next) { @@ -56,10 +56,10 @@ async function list(req, res, next) { const perPage = typeof req.query.per_page === 'string'? parseInt(req.query.per_page) : 25; if (!perPage || perPage < 0) return next(new HttpError(400, 'per_page query param has to be a postive number')); - const [error, result] = await safe(backupTargets.list(page, perPage)); + const [error, result] = await safe(backupSites.list(page, perPage)); if (error) return next(BoxError.toHttpError(error)); - next(new HttpSuccess(200, { backupTargets: result.map(backupTargets.removePrivateFields) })); + next(new HttpSuccess(200, { backupSites: result.map(backupSites.removePrivateFields) })); } async function add(req, res, next) { @@ -86,7 +86,7 @@ async function add(req, res, next) { // testing the backup using put/del takes a bit of time at times req.clearTimeout(); - const [error, id] = await safe(backupTargets.add(req.body, AuditSource.fromRequest(req))); + const [error, id] = await safe(backupSites.add(req.body, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, { id })); @@ -94,9 +94,9 @@ async function add(req, res, next) { async function del(req, res, next) { assert.strictEqual(typeof req.params.id, 'string'); - assert.strictEqual(typeof req.resources.backupTarget, 'object'); + assert.strictEqual(typeof req.resources.backupSite, 'object'); - const [error] = await safe(backupTargets.del(req.resources.backupTarget, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.del(req.resources.backupSite, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(204)); @@ -132,7 +132,7 @@ async function setLimits(req, res, next) { if ('memoryLimit' in limits && typeof limits.memoryLimit !== 'number') return next(new HttpError(400, 'memoryLimit must be a positive integer')); - const [error] = await safe(backupTargets.setLimits(req.resources.backupTarget, limits, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setLimits(req.resources.backupSite, limits, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); @@ -146,7 +146,7 @@ async function setConfig(req, res, next) { // testing the backup using put/del takes a bit of time at times req.clearTimeout(); - const [error] = await safe(backupTargets.setConfig(req.resources.backupTarget, req.body.config, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setConfig(req.resources.backupSite, req.body.config, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); @@ -157,7 +157,7 @@ async function setSchedule(req, res, next) { if (typeof req.body.schedule !== 'string') return next(new HttpError(400, 'schedule is required')); - const [error] = await safe(backupTargets.setSchedule(req.resources.backupTarget, req.body.schedule, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setSchedule(req.resources.backupSite, req.body.schedule, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); @@ -168,7 +168,7 @@ async function setRetention(req, res, next) { if (!req.body.retention || typeof req.body.retention !== 'object') return next(new HttpError(400, 'retention is required')); - const [error] = await safe(backupTargets.setRetention(req.resources.backupTarget, req.body.retention, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setRetention(req.resources.backupSite, req.body.retention, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); @@ -177,7 +177,7 @@ async function setRetention(req, res, next) { async function setPrimary(req, res, next) { assert.strictEqual(typeof req.body, 'object'); - const [error] = await safe(backupTargets.setPrimary(req.resources.backupTarget, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setPrimary(req.resources.backupSite, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); @@ -192,7 +192,7 @@ async function setEncryption(req, res, next) { if ('encryptionPasswordHint' in req.body && typeof req.body.encryptionPasswordHint !== 'string') return next(new HttpError(400, 'encryptionPasswordHint must be a string')); if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean')); - const [error] = await safe(backupTargets.setEncryption(req.resources.backupTarget, req.body, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setEncryption(req.resources.backupSite, req.body, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); @@ -203,43 +203,43 @@ async function setName(req, res, next) { if (typeof req.body.name !== 'string') return next(new HttpError(400, 'name is required')); - const [error] = await safe(backupTargets.setName(req.resources.backupTarget, req.body.name, AuditSource.fromRequest(req))); + const [error] = await safe(backupSites.setName(req.resources.backupSite, req.body.name, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, {})); } async function createBackup(req, res, next) { - assert.strictEqual(typeof req.resources.backupTarget, 'object'); + assert.strictEqual(typeof req.resources.backupSite, 'object'); - const [error, taskId] = await safe(backupTargets.startBackupTask(req.resources.backupTarget, AuditSource.fromRequest(req))); + const [error, taskId] = await safe(backupSites.startBackupTask(req.resources.backupSite, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(202, { taskId })); } async function cleanup(req, res, next) { - assert.strictEqual(typeof req.resources.backupTarget, 'object'); + assert.strictEqual(typeof req.resources.backupSite, 'object'); - const [error, taskId] = await safe(backupTargets.startCleanupTask(req.resources.backupTarget, AuditSource.fromRequest(req))); + const [error, taskId] = await safe(backupSites.startCleanupTask(req.resources.backupSite, AuditSource.fromRequest(req))); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(202, { taskId })); } async function remount(req, res, next) { - assert.strictEqual(typeof req.resources.backupTarget, 'object'); + assert.strictEqual(typeof req.resources.backupSite, 'object'); - const [error] = await safe(backupTargets.remount(req.resources.backupTarget)); + const [error] = await safe(backupSites.remount(req.resources.backupSite)); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(202, {})); } async function getStatus(req, res, next) { - assert.strictEqual(typeof req.resources.backupTarget, 'object'); + assert.strictEqual(typeof req.resources.backupSite, 'object'); - const [error, mountStatus] = await safe(backupTargets.getStatus(req.resources.backupTarget)); + const [error, mountStatus] = await safe(backupSites.getStatus(req.resources.backupSite)); if (error) return next(BoxError.toHttpError(error)); next(new HttpSuccess(200, mountStatus)); } diff --git a/src/routes/index.js b/src/routes/index.js index fa7051bab..701b9150d 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -9,7 +9,7 @@ exports = module.exports = { archives: require('./archives.js'), auth: require('./auth.js'), backups: require('./backups.js'), - backupTargets: require('./backuptargets.js'), + backupSites: require('./backupsites.js'), branding: require('./branding.js'), cloudron: require('./cloudron.js'), dashboard: require('./dashboard.js'), diff --git a/src/routes/test/appstore-test.js b/src/routes/test/appstore-test.js index 463443ed6..4a432f23b 100644 --- a/src/routes/test/appstore-test.js +++ b/src/routes/test/appstore-test.js @@ -35,7 +35,7 @@ describe('Appstore Apps API', function () { .query({ access_token: owner.token }) .ok(() => true); - expect(response.status).to.be(412); + expect(response.status).to.be(402); expect(scope1.isDone()).to.be.ok(); }); diff --git a/src/routes/test/archives-test.js b/src/routes/test/archives-test.js index 722e07940..26fb1b928 100644 --- a/src/routes/test/archives-test.js +++ b/src/routes/test/archives-test.js @@ -12,7 +12,7 @@ const archives = require('../../archives.js'), superagent = require('@cloudron/superagent'); describe('Archives API', function () { - const { setup, cleanup, serverUrl, owner, auditSource, getDefaultBackupTarget } = common; + const { setup, cleanup, serverUrl, owner, auditSource, getDefaultBackupSite } = common; const appBackup = { id: null, @@ -27,14 +27,14 @@ describe('Archives API', function () { preserveSecs: 0, label: '', appConfig: { loc: 'loc1' }, - targetId: null + siteId: null }; let archiveId; before(async function () { await setup(); - appBackup.targetId = (await getDefaultBackupTarget()).id; + appBackup.siteId = (await getDefaultBackupSite()).id; appBackup.id = await backups.add(appBackup); archiveId = await archives.add(appBackup.id, {}, auditSource); }); diff --git a/src/routes/test/backups-test.js b/src/routes/test/backups-test.js index 824a46acf..afc5f88ec 100644 --- a/src/routes/test/backups-test.js +++ b/src/routes/test/backups-test.js @@ -7,7 +7,7 @@ const common = require('./common.js'), superagent = require('@cloudron/superagent'); describe('Backups API', function () { - const { setup, cleanup, waitForTask, serverUrl, owner, admin, getDefaultBackupTarget } = common; + const { setup, cleanup, waitForTask, serverUrl, owner, admin, getDefaultBackupSite } = common; before(setup); after(cleanup); @@ -16,8 +16,8 @@ describe('Backups API', function () { // create some backup first before(async function () { - const target = await getDefaultBackupTarget(); - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${target.id}/create_backup`) + const site = await getDefaultBackupSite(); + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${site.id}/create_backup`) .query({ access_token: admin.token }); expect(response.status).to.equal(202); expect(response.body.taskId).to.be.a('string'); diff --git a/src/routes/test/backuptargets-test.js b/src/routes/test/backupsites-test.js similarity index 74% rename from src/routes/test/backuptargets-test.js rename to src/routes/test/backupsites-test.js index 2acb1e2e0..b3e0478e9 100644 --- a/src/routes/test/backuptargets-test.js +++ b/src/routes/test/backupsites-test.js @@ -2,30 +2,30 @@ 'use strict'; -const backupTargets = require('../../backuptargets.js'), +const backupSites = require('../../backupsites.js'), common = require('./common.js'), expect = require('expect.js'), superagent = require('@cloudron/superagent'); describe('Backups API', function () { - const { setup, cleanup, waitForTask, serverUrl, owner, admin, getDefaultBackupTarget } = common; + const { setup, cleanup, waitForTask, serverUrl, owner, admin, getDefaultBackupSite } = common; before(setup); after(cleanup); - const newTarget = { + const newSite = { provider: 'filesystem', - name: 'NewTarget', - config: { backupDir: '/tmp/boxtest-newtarget' }, + name: 'NewSite', + config: { backupDir: '/tmp/boxtest-newsite' }, format: 'tgz', retention: { keepWithinSecs: 60 * 60 }, schedule: '00 01 * * * *' }; - const encryptedTarget = { + const encryptedSite = { provider: 'filesystem', - name: 'EncryptedTarget', - config: { backupDir: '/tmp/boxtest-enctarget' }, + name: 'EncryptedSite', + config: { backupDir: '/tmp/boxtest-encsite' }, format: 'rsync', retention: { keepMonthly: 60 }, schedule: '* 1 * * * *', @@ -33,72 +33,72 @@ describe('Backups API', function () { describe('add', function () { it('fails as admin', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites`) .query({ access_token: admin.token }) - .send(newTarget) + .send(newSite) .ok(() => true); expect(response.status).to.equal(403); }); it('succeeds as owner', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites`) .query({ access_token: owner.token }) - .send(newTarget) + .send(newSite) .ok(() => true); expect(response.status).to.equal(200); - newTarget.id = response.body.id; + newSite.id = response.body.id; }); it('succeeds with password', async function () { - const tmp = Object.assign({}, encryptedTarget, { encryptionPassword: 'deutsch-a1', encryptedFilenames: true }); + const tmp = Object.assign({}, encryptedSite, { encryptionPassword: 'deutsch-a1', encryptedFilenames: true }); - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites`) .query({ access_token: owner.token }) .send(tmp) .ok(() => true); expect(response.status).to.equal(200); - encryptedTarget.id = response.body.id; + encryptedSite.id = response.body.id; }); }); describe('list', function () { it('succeeds as admin', async function () { - const response = await superagent.get(`${serverUrl}/api/v1/backup_targets`) + const response = await superagent.get(`${serverUrl}/api/v1/backup_sites`) .query({ access_token: admin.token }); expect(response.status).to.equal(200); - expect(response.body.backupTargets.length).to.be(3); - const tmp = response.body.backupTargets.find(t => t.id === newTarget.id); - expect(tmp.provider).to.be(newTarget.provider); + expect(response.body.backupSites.length).to.be(3); + const tmp = response.body.backupSites.find(t => t.id === newSite.id); + expect(tmp.provider).to.be(newSite.provider); expect(tmp.config).to.be.ok(); - expect(tmp.format).to.be(newTarget.format); - expect(tmp.name).to.be(newTarget.name); + expect(tmp.format).to.be(newSite.format); + expect(tmp.name).to.be(newSite.name); expect(tmp.primary).to.be(false); }); }); describe('get', function () { it('succeeds as admin', async function () { - const response = await superagent.get(`${serverUrl}/api/v1/backup_targets/${newTarget.id}`) + const response = await superagent.get(`${serverUrl}/api/v1/backup_sites/${newSite.id}`) .query({ access_token: admin.token }); expect(response.status).to.equal(200); - expect(response.body.provider).to.be(newTarget.provider); + expect(response.body.provider).to.be(newSite.provider); expect(response.body.config).to.be.ok(); - expect(response.body.format).to.be(newTarget.format); - expect(response.body.name).to.be(newTarget.name); + expect(response.body.format).to.be(newSite.format); + expect(response.body.name).to.be(newSite.name); expect(response.body.primary).to.be(false); }); it('succeeds as admin (encrypted)', async function () { - const response = await superagent.get(`${serverUrl}/api/v1/backup_targets/${encryptedTarget.id}`) + const response = await superagent.get(`${serverUrl}/api/v1/backup_sites/${encryptedSite.id}`) .query({ access_token: admin.token }); expect(response.status).to.equal(200); - expect(response.body.provider).to.be(encryptedTarget.provider); + expect(response.body.provider).to.be(encryptedSite.provider); expect(response.body.config).to.be.ok(); - expect(response.body.format).to.be(encryptedTarget.format); - expect(response.body.name).to.be(encryptedTarget.name); + expect(response.body.format).to.be(encryptedSite.format); + expect(response.body.name).to.be(encryptedSite.name); expect(response.body.primary).to.be(false); expect(response.body.encrypted).to.be(true); expect(response.body.encryptedFilenames).to.be(true); @@ -107,7 +107,7 @@ describe('Backups API', function () { describe('schedule', function () { it('cannot set without schedule', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/schedule`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/schedule`) .query({ access_token: owner.token }) .send({}) .ok(() => true); @@ -116,7 +116,7 @@ describe('Backups API', function () { }); it('cannot set invalid schedule', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/schedule`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/schedule`) .query({ access_token: owner.token }) .send({ schedule: 'whatever' }) .ok(() => true); @@ -125,32 +125,32 @@ describe('Backups API', function () { }); it('can set "never" schedule', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/schedule`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/schedule`) .query({ access_token: owner.token }) .send({ schedule: 'never' }) .ok(() => true); expect(response.status).to.equal(200); - const result = await backupTargets.get(newTarget.id); + const result = await backupSites.get(newSite.id); expect(result.schedule).to.be('never'); }); it('can set valid schedule', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/schedule`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/schedule`) .query({ access_token: owner.token }) .send({ schedule: '00 00 3 * * *' }) .ok(() => true); expect(response.status).to.equal(200); - const result = await backupTargets.get(newTarget.id); + const result = await backupSites.get(newSite.id); expect(result.schedule).to.be('00 00 3 * * *'); }); }); describe('retention', function () { it('cannot set without retention', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/retention`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/retention`) .query({ access_token: owner.token }) .send({}) .ok(() => true); @@ -159,7 +159,7 @@ describe('Backups API', function () { }); it('cannot set invalid retention', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/retention`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/retention`) .query({ access_token: owner.token }) .send({ retention: 'whatever' }) .ok(() => true); @@ -168,7 +168,7 @@ describe('Backups API', function () { }); it('cannot set backup_policy with retention with invalid keepWithinSecs', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/retention`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/retention`) .query({ access_token: owner.token }) .send({ retention: { keepWithinSecs: 'not a number' } }) .ok(() => true); @@ -177,20 +177,20 @@ describe('Backups API', function () { }); it('can set valid retention', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/retention`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/retention`) .query({ access_token: owner.token }) .send({ retention: { keepWithinSecs: 2 * 24 * 60 * 60 } }) .ok(() => true); expect(response.status).to.equal(200); - const result = await backupTargets.get(newTarget.id); + const result = await backupSites.get(newSite.id); expect(result.retention).to.eql({ keepWithinSecs: 2 * 24 * 60 * 60 }); }); }); describe('limits', function () { it('cannot set invalid limits', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/limits`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/limits`) .query({ access_token: owner.token }) .send({ limits: 2 }) .ok(() => true); @@ -199,20 +199,20 @@ describe('Backups API', function () { }); it('can set valid limits', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/limits`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/limits`) .query({ access_token: owner.token }) .send({ limits: { syncConcurrency: 34 } }) .ok(() => true); expect(response.status).to.equal(200); - const result = await backupTargets.get(newTarget.id); + const result = await backupSites.get(newSite.id); expect(result.limits).to.eql({ syncConcurrency: 34 }); }); }); describe('primary', function () { it('cannot set invalid id', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}xx/configure/primary`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}xx/configure/primary`) .query({ access_token: owner.token }) .send({}) .ok(() => true); @@ -221,26 +221,26 @@ describe('Backups API', function () { }); it('can set valid primary', async function () { - const oldDefault = await getDefaultBackupTarget(); + const oldDefault = await getDefaultBackupSite(); - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/primary`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/primary`) .query({ access_token: owner.token }) .send({}) .ok(() => true); expect(response.status).to.equal(200); - const result = await backupTargets.get(newTarget.id); + const result = await backupSites.get(newSite.id); expect(result.primary).to.be(true); - const result2 = await backupTargets.get(oldDefault.id); + const result2 = await backupSites.get(oldDefault.id); expect(result2.primary).to.be(false); }); }); - // at this point , newTarget is the primary and the default one is not + // at this point , newSite is the primary and the default one is not describe('del', function () { it('cannot delete invalid id', async function () { - const response = await superagent.del(`${serverUrl}/api/v1/backup_targets/${newTarget.id}xx`) + const response = await superagent.del(`${serverUrl}/api/v1/backup_sites/${newSite.id}xx`) .query({ access_token: owner.token }) .ok(() => true); @@ -248,7 +248,7 @@ describe('Backups API', function () { }); it('cannot delete primary', async function () { - const response = await superagent.del(`${serverUrl}/api/v1/backup_targets/${newTarget.id}`) + const response = await superagent.del(`${serverUrl}/api/v1/backup_sites/${newSite.id}`) .query({ access_token: owner.token }) .ok(() => true); @@ -256,8 +256,8 @@ describe('Backups API', function () { }); it('can delete non-primary', async function () { - const result2 = (await backupTargets.list(1, 10)).pop(); - const response = await superagent.del(`${serverUrl}/api/v1/backup_targets/${result2.id}`) + const result2 = (await backupSites.list(1, 10)).pop(); + const response = await superagent.del(`${serverUrl}/api/v1/backup_sites/${result2.id}`) .query({ access_token: owner.token }) .ok(() => true); @@ -271,7 +271,7 @@ describe('Backups API', function () { }; it('cannot set invalid config', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/config`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/config`) .query({ access_token: owner.token }) .send({ config: 32 }) .ok(() => true); @@ -280,20 +280,20 @@ describe('Backups API', function () { }); it('can set valid config', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/configure/config`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/configure/config`) .query({ access_token: owner.token }) .send({ config: someConfig }) .ok(() => true); expect(response.status).to.equal(200); - const result = await backupTargets.get(newTarget.id); + const result = await backupSites.get(newSite.id); expect(result.config.backupDir).to.be(someConfig.backupDir); }); }); describe('mounting', function () { it('mount status', async function () { - const response = await superagent.get(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/status`) + const response = await superagent.get(`${serverUrl}/api/v1/backup_sites/${newSite.id}/status`) .query({ access_token: owner.token }); expect(response.status).to.equal(200); expect(response.body.state).to.be('active'); @@ -301,7 +301,7 @@ describe('Backups API', function () { }); it('remount', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/remount`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/remount`) .query({ access_token: owner.token }) .send({}); expect(response.status).to.equal(202); @@ -310,7 +310,7 @@ describe('Backups API', function () { describe('create', function () { it('succeeds', async function () { - const response = await superagent.post(`${serverUrl}/api/v1/backup_targets/${newTarget.id}/create_backup`) + const response = await superagent.post(`${serverUrl}/api/v1/backup_sites/${newSite.id}/create_backup`) .query({ access_token: admin.token }); expect(response.status).to.equal(202); expect(response.body.taskId).to.be.a('string'); diff --git a/src/routes/test/common.js b/src/routes/test/common.js index c710a532b..fbcbce399 100644 --- a/src/routes/test/common.js +++ b/src/routes/test/common.js @@ -2,7 +2,7 @@ const apps = require('../../apps.js'), appstore = require('../../appstore.js'), - backupTargets = require('../../backuptargets.js'), + backupSites = require('../../backupsites.js'), debug = require('debug')('box:test/common'), constants = require('../../constants.js'), database = require('../../database.js'), @@ -99,7 +99,7 @@ exports = module.exports = { aliasDomains: [] }, - getDefaultBackupTarget, + getDefaultBackupSite, mockApiServerOrigin: 'http://localhost:6060', dashboardDomain: 'test.example.com', @@ -233,7 +233,7 @@ async function waitForAsyncTask(es) { }); } -async function getDefaultBackupTarget() { - const result = await backupTargets.list(1, 10); +async function getDefaultBackupSite() { + const result = await backupSites.list(1, 10); return result.find(r => r.name === 'Default'); } diff --git a/src/server.js b/src/server.js index d65e458c2..aecaecfde 100644 --- a/src/server.js +++ b/src/server.js @@ -157,22 +157,22 @@ async function initializeExpressSync() { router.post('/api/v1/backups/:id', json, token, authorizeAdmin, routes.backups.load, routes.backups.update); router.post('/api/v1/backups/:id/check_integrity', json, token, authorizeAdmin, routes.backups.load, routes.backups.checkIntegrity); - // backup target (destination) routes - router.get ('/api/v1/backup_targets/', token, authorizeAdmin, routes.backupTargets.list); - router.get ('/api/v1/backup_targets/:id', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.get); - router.post('/api/v1/backup_targets', json, token, authorizeOwner, routes.backupTargets.add); - router.del ('/api/v1/backup_targets/:id', token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.del); - router.get ('/api/v1/backup_targets/:id/status', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.getStatus); - router.post('/api/v1/backup_targets/:id/create_backup', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.createBackup); - router.post('/api/v1/backup_targets/:id/cleanup', json, token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.cleanup); - router.post('/api/v1/backup_targets/:id/remount', json, token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.remount); - router.post('/api/v1/backup_targets/:id/configure/name', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setName); - router.post('/api/v1/backup_targets/:id/configure/config', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setConfig); - router.post('/api/v1/backup_targets/:id/configure/limits', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setLimits); - router.post('/api/v1/backup_targets/:id/configure/schedule', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setSchedule); - router.post('/api/v1/backup_targets/:id/configure/retention', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setRetention); - router.post('/api/v1/backup_targets/:id/configure/primary', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setPrimary); - router.post('/api/v1/backup_targets/:id/configure/encryption', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setEncryption); + // backup site (destination) routes + router.get ('/api/v1/backup_sites/', token, authorizeAdmin, routes.backupSites.list); + router.get ('/api/v1/backup_sites/:id', token, authorizeAdmin, routes.backupSites.load, routes.backupSites.get); + router.post('/api/v1/backup_sites', json, token, authorizeOwner, routes.backupSites.add); + router.del ('/api/v1/backup_sites/:id', token, authorizeOwner, routes.backupSites.load, routes.backupSites.del); + router.get ('/api/v1/backup_sites/:id/status', token, authorizeAdmin, routes.backupSites.load, routes.backupSites.getStatus); + router.post('/api/v1/backup_sites/:id/create_backup', token, authorizeAdmin, routes.backupSites.load, routes.backupSites.createBackup); + router.post('/api/v1/backup_sites/:id/cleanup', json, token, authorizeAdmin, routes.backupSites.load, routes.backupSites.cleanup); + router.post('/api/v1/backup_sites/:id/remount', json, token, authorizeAdmin, routes.backupSites.load, routes.backupSites.remount); + router.post('/api/v1/backup_sites/:id/configure/name', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setName); + router.post('/api/v1/backup_sites/:id/configure/config', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setConfig); + router.post('/api/v1/backup_sites/:id/configure/limits', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setLimits); + router.post('/api/v1/backup_sites/:id/configure/schedule', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setSchedule); + router.post('/api/v1/backup_sites/:id/configure/retention', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setRetention); + router.post('/api/v1/backup_sites/:id/configure/primary', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setPrimary); + router.post('/api/v1/backup_sites/:id/configure/encryption', json, token, authorizeOwner, routes.backupSites.load, routes.backupSites.setEncryption); // app archive routes router.get ('/api/v1/archives', token, authorizeAdmin, routes.archives.list); diff --git a/src/system.js b/src/system.js index e62ec17b1..cf1ec2a64 100644 --- a/src/system.js +++ b/src/system.js @@ -20,7 +20,7 @@ exports = module.exports = { const apps = require('./apps.js'), assert = require('node:assert'), { AsyncTask } = require('./asynctask.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), BoxError = require('./boxerror.js'), debug = require('debug')('box:system'), df = require('./df.js'), @@ -125,15 +125,15 @@ async function getFilesystems() { filesystems[diskInfo.filesystem].contents.push(stdPath); } - for (const backupTarget of await backupTargets.list(1, 100)) { - if (backupTarget.provider === 'filesystem') { - const [, dfResult] = await safe(df.file(backupTarget.config.backupDir)); + for (const backupSite of await backupSites.list(1, 100)) { + if (backupSite.provider === 'filesystem') { + const [, dfResult] = await safe(df.file(backupSite.config.backupDir)); const filesystem = dfResult?.filesystem || rootDisk.filesystem; - if (filesystems[filesystem]) filesystems[filesystem].contents.push({ type: 'cloudron-backup', id: backupTarget.id, path: backupTarget.config.backupDir }); + if (filesystems[filesystem]) filesystems[filesystem].contents.push({ type: 'cloudron-backup', id: backupSite.id, path: backupSite.config.backupDir }); } // often the default backup dir is not cleaned up - if (backupTarget.provider !== 'filesystem' || backupTarget.config.backupDir !== paths.DEFAULT_BACKUP_DIR) { + if (backupSite.provider !== 'filesystem' || backupSite.config.backupDir !== paths.DEFAULT_BACKUP_DIR) { const [, dfResult] = await safe(df.file(paths.DEFAULT_BACKUP_DIR)); const filesystem = dfResult?.filesystem || rootDisk.filesystem; if (filesystems[filesystem]) filesystems[filesystem].contents.push({ type: 'cloudron-backup-default', id: 'cloudron-backup-default', path: paths.DEFAULT_BACKUP_DIR }); diff --git a/src/tasks.js b/src/tasks.js index 16cc8d89d..7ce07a37c 100644 --- a/src/tasks.js +++ b/src/tasks.js @@ -22,7 +22,7 @@ exports = module.exports = { // '_' prefix is removed for lookup TASK_APP: 'app', - // "prefix" allows us to locate the tasks of a specific app or backup target + // "prefix" allows us to locate the tasks of a specific app or backup site TASK_APP_BACKUP_PREFIX: 'appBackup_', TASK_FULL_BACKUP_PREFIX: 'backup_', // full backup TASK_CLEAN_BACKUPS_PREFIX: 'cleanBackups_', diff --git a/src/test/archives-test.js b/src/test/archives-test.js index 142276dc0..4a6f42d06 100644 --- a/src/test/archives-test.js +++ b/src/test/archives-test.js @@ -13,7 +13,7 @@ const archives = require('../archives.js'), safe = require('safetydance'); describe('Archives', function () { - const { setup, cleanup, auditSource, getDefaultBackupTarget } = common; + const { setup, cleanup, auditSource, getDefaultBackupSite } = common; const appBackup = { id: null, @@ -28,12 +28,12 @@ describe('Archives', function () { preserveSecs: 0, label: '', appConfig: { loc: 'loc1' }, - targetId: null + siteId: null }; before(async function () { await setup(); - appBackup.targetId = (await getDefaultBackupTarget()).id; + appBackup.siteId = (await getDefaultBackupSite()).id; appBackup.id = await backups.add(appBackup); }); after(cleanup); diff --git a/src/test/backupcleaner-test.js b/src/test/backupcleaner-test.js index 4e9a2403a..020b459a0 100644 --- a/src/test/backupcleaner-test.js +++ b/src/test/backupcleaner-test.js @@ -9,7 +9,7 @@ const archives = require('../archives.js'), backupCleaner = require('../backupcleaner.js'), backups = require('../backups.js'), - backupTargets = require('../backuptargets.js'), + backupSites = require('../backupsites.js'), common = require('./common.js'), expect = require('expect.js'), moment = require('moment'), @@ -17,7 +17,7 @@ const archives = require('../archives.js'), timers = require('timers/promises'); describe('backup cleaner', function () { - const { setup, cleanup, app, getDefaultBackupTarget, auditSource } = common; + const { setup, cleanup, app, getDefaultBackupSite, auditSource } = common; before(setup); after(cleanup); @@ -126,7 +126,7 @@ describe('backup cleaner', function () { }); describe('task', function () { - let target; + let site; const BACKUP_0_BOX = { id: null, @@ -140,7 +140,7 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; const BACKUP_0_APP_0 = { // backup of installed app @@ -155,7 +155,7 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; const BACKUP_0_APP_1 = { // this app is uninstalled @@ -170,7 +170,7 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; const BACKUP_1_BOX = { @@ -185,7 +185,7 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; const BACKUP_1_APP_0 = { @@ -200,7 +200,7 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; const BACKUP_1_APP_1 = { @@ -215,7 +215,7 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; const BACKUP_2_APP_2 = { // this is archived and left alone @@ -230,21 +230,21 @@ describe('backup cleaner', function () { manifest: null, preserveSecs: 0, appConfig: null, - targetId: null + siteId: null }; before(async function () { - target = await getDefaultBackupTarget(); - await backupTargets.setConfig(target, { + site = await getDefaultBackupSite(); + await backupSites.setConfig(site, { provider: 'filesystem', backupDir: '/tmp/someplace', }, auditSource); - await backupTargets.setRetention(target, { keepWithinSecs: 1 }, auditSource); - await backupTargets.setSchedule(target, '00 00 23 * * *', auditSource); + await backupSites.setRetention(site, { keepWithinSecs: 1 }, auditSource); + await backupSites.setSchedule(site, '00 00 23 * * *', auditSource); }); - async function cleanupBackups(target) { - const taskId = await backupTargets.startCleanupTask(target, auditSource); + async function cleanupBackups(site) { + const taskId = await backupSites.startCleanupTask(site, auditSource); console.log('started task', taskId); @@ -261,12 +261,12 @@ describe('backup cleaner', function () { } it('succeeds without backups', async function () { - await cleanupBackups(target); + await cleanupBackups(site); }); it('add the backups', async function () { for (const b of [BACKUP_0_APP_0, BACKUP_0_APP_1, BACKUP_0_BOX, BACKUP_1_APP_0, BACKUP_1_APP_1, BACKUP_1_BOX, BACKUP_2_APP_2]) { - b.targetId = target.id; + b.siteId = site.id; } BACKUP_0_APP_0.id = await backups.add(BACKUP_0_APP_0); @@ -286,7 +286,7 @@ describe('backup cleaner', function () { }); it('succeeds with box backups, keeps latest', async function () { - await cleanupBackups(target); + await cleanupBackups(site); const results = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 1000); expect(results.length).to.equal(1); @@ -298,7 +298,7 @@ describe('backup cleaner', function () { }); it('does not remove expired backups if only one left', async function () { - await cleanupBackups(target); + await cleanupBackups(site); const results = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 1000); expect(results[0].id).to.equal(BACKUP_1_BOX.id); @@ -316,7 +316,7 @@ describe('backup cleaner', function () { await timers.setTimeout(2000); // wait for expiration - await cleanupBackups(target); + await cleanupBackups(site); let result = await backups.getByTypePaged(backups.BACKUP_TYPE_APP, 1, 1000); expect(result.length).to.equal(4); diff --git a/src/test/backups-test.js b/src/test/backups-test.js index 5f23be806..06583ce22 100644 --- a/src/test/backups-test.js +++ b/src/test/backups-test.js @@ -13,7 +13,7 @@ const backups = require('../backups.js'), safe = require('safetydance'); describe('backups', function () { - const { setup, cleanup, getDefaultBackupTarget } = common; + const { setup, cleanup, getDefaultBackupSite } = common; const boxBackup = { id: null, @@ -28,7 +28,7 @@ describe('backups', function () { preserveSecs: 0, label: '', appConfig: null, - targetId: null, + siteId: null, integrity: null, stats: null }; @@ -46,18 +46,18 @@ describe('backups', function () { preserveSecs: 0, label: '', appConfig: null, - targetId: null, + siteId: null, integrity: null, stats: null }; - let defaultBackupTarget; + let defaultBackupSite; before(async function () { await setup(); - defaultBackupTarget = await getDefaultBackupTarget(); - boxBackup.targetId = defaultBackupTarget.id; - appBackup.targetId = defaultBackupTarget.id; + defaultBackupSite = await getDefaultBackupSite(); + boxBackup.siteId = defaultBackupSite.id; + appBackup.siteId = defaultBackupSite.id; }); after(cleanup); diff --git a/src/test/backupsites-test.js b/src/test/backupsites-test.js new file mode 100644 index 000000000..291dcc27b --- /dev/null +++ b/src/test/backupsites-test.js @@ -0,0 +1,137 @@ +/* jslint node:true */ +/* global it:false */ +/* global describe:false */ +/* global before:false */ +/* global after:false */ + +'use strict'; + +const archives = require('../archives.js'), + backups = require('../backups.js'), + backupSites = require('../backupsites.js'), + BoxError = require('../boxerror.js'), + common = require('./common.js'), + constants = require('../constants.js'), + expect = require('expect.js'), + safe = require('safetydance'); + +describe('backups', function () { + const { setup, cleanup, auditSource, getDefaultBackupSite } = common; + + before(async function () { + await setup(); + }); + after(cleanup); + + const backupSite = { + provider: 'filesystem', + name: 'Another', + config: { backupDir: '/tmp/boxtest2' }, + format: 'rsync', + retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, + schedule: '00 00 23 * * *' + }; + + const appBackup = { + id: null, + remotePath: 'backup-box', + encryptionVersion: 2, + packageVersion: '1.0.0', + type: backups.BACKUP_TYPE_APP, + state: backups.BACKUP_STATE_NORMAL, + identifier: 'box', + dependsOn: [ 'dep1' ], + manifest: null, + preserveSecs: 0, + label: '', + appConfig: { loc: 'loc1' }, + siteId: null + }; + + let defaultBackupSite = null; + + before(async function () { + appBackup.siteId = (await getDefaultBackupSite()).id; + appBackup.id = await backups.add(appBackup); + await archives.add(appBackup.id, {}, auditSource); + }); + + it('can add another site', async function () { + const id = await backupSites.add(backupSite, auditSource); + expect(id).to.be.ok(); + backupSite.id = id; + }); + + it('can list backup sites', async function () { + const result = await backupSites.list(1, 5); + expect(result.length).to.be(2); + defaultBackupSite = result[0]; // first is always primary + }); + + it('can get backup site', async function () { + const backupSite = await backupSites.get(defaultBackupSite.id); + expect(backupSite.provider).to.be('filesystem'); + expect(backupSite.config.backupDir).to.be.ok(); // the test sets this to some tmp location + expect(backupSite.format).to.be('tgz'); + expect(backupSite.encryption).to.be(null); + expect(backupSite.primary).to.be(true); + }); + + it('cannot get random backup site', async function () { + const backupSite = await backupSites.get('random'); + expect(backupSite).to.be(null); + }); + + it('can set backup config', async function () { + const newConfig = Object.assign({}, defaultBackupSite.config, { backupDir: '/tmp/backups' }); + await backupSites.setConfig(defaultBackupSite, newConfig, auditSource); + + const result = await backupSites.get(defaultBackupSite.id); + expect(result.config.backupDir).to.be('/tmp/backups'); + }); + + it('cannot set invalid schedule', async function () { + const [error] = await safe(backupSites.setSchedule(defaultBackupSite, '', auditSource)); + expect(error.reason).to.be(BoxError.BAD_FIELD); + }); + + it('can set valid schedule', async function () { + for (const pattern of [ '00 * * * * *', constants.CRON_PATTERN_NEVER ]) { + await backupSites.setSchedule(defaultBackupSite, pattern, auditSource); + const backupSite = await backupSites.get(defaultBackupSite.id); + expect(backupSite.schedule).to.be(pattern); + } + }); + + it('cannot set invalid retention', async function () { + const [error] = await safe(backupSites.setRetention(defaultBackupSite, { keepWhenever: 4 }, auditSource)); + expect(error.reason).to.be(BoxError.BAD_FIELD); + }); + + it('can set valid retention', async function () { + for (const retention of [ { keepWithinSecs: 1 }, { keepYearly: 3 }, { keepMonthly: 14 } ]) { + await backupSites.setRetention(defaultBackupSite, retention, auditSource); + const backupSite = await backupSites.get(defaultBackupSite.id); + expect(backupSite.retention).to.eql(retention); + } + }); + + it('cannot delete the primary backup site', async function () { + const [error] = await safe(backupSites.del(defaultBackupSite, auditSource)); + expect(error.reason).to.be(BoxError.CONFLICT); + }); + + it('can set another as primary', async function () { + await backupSites.setPrimary(backupSite, auditSource); + + const noMoreDefaultSite = await backupSites.get(defaultBackupSite.id); + expect(noMoreDefaultSite.primary).to.be(false); + defaultBackupSite.primary = false; + }); + + it('can delete non-primary backup site', async function () { + await backupSites.del(defaultBackupSite, auditSource); + expect(await backups.list(1, 10)).to.eql([]); + expect(await archives.list(1, 10)).to.eql([]); + }); +}); diff --git a/src/test/backuptargets-test.js b/src/test/backuptargets-test.js deleted file mode 100644 index ac90e2ded..000000000 --- a/src/test/backuptargets-test.js +++ /dev/null @@ -1,137 +0,0 @@ -/* jslint node:true */ -/* global it:false */ -/* global describe:false */ -/* global before:false */ -/* global after:false */ - -'use strict'; - -const archives = require('../archives.js'), - backups = require('../backups.js'), - backupTargets = require('../backuptargets.js'), - BoxError = require('../boxerror.js'), - common = require('./common.js'), - constants = require('../constants.js'), - expect = require('expect.js'), - safe = require('safetydance'); - -describe('backups', function () { - const { setup, cleanup, auditSource, getDefaultBackupTarget } = common; - - before(async function () { - await setup(); - }); - after(cleanup); - - const backupTarget = { - provider: 'filesystem', - name: 'Another', - config: { backupDir: '/tmp/boxtest2' }, - format: 'rsync', - retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, - schedule: '00 00 23 * * *' - }; - - const appBackup = { - id: null, - remotePath: 'backup-box', - encryptionVersion: 2, - packageVersion: '1.0.0', - type: backups.BACKUP_TYPE_APP, - state: backups.BACKUP_STATE_NORMAL, - identifier: 'box', - dependsOn: [ 'dep1' ], - manifest: null, - preserveSecs: 0, - label: '', - appConfig: { loc: 'loc1' }, - targetId: null - }; - - let defaultBackupTarget = null; - - before(async function () { - appBackup.targetId = (await getDefaultBackupTarget()).id; - appBackup.id = await backups.add(appBackup); - await archives.add(appBackup.id, {}, auditSource); - }); - - it('can add another target', async function () { - const id = await backupTargets.add(backupTarget, auditSource); - expect(id).to.be.ok(); - backupTarget.id = id; - }); - - it('can list backup targets', async function () { - const result = await backupTargets.list(1, 5); - expect(result.length).to.be(2); - defaultBackupTarget = result[0]; // first is always primary - }); - - it('can get backup target', async function () { - const backupTarget = await backupTargets.get(defaultBackupTarget.id); - expect(backupTarget.provider).to.be('filesystem'); - expect(backupTarget.config.backupDir).to.be.ok(); // the test sets this to some tmp location - expect(backupTarget.format).to.be('tgz'); - expect(backupTarget.encryption).to.be(null); - expect(backupTarget.primary).to.be(true); - }); - - it('cannot get random backup target', async function () { - const backupTarget = await backupTargets.get('random'); - expect(backupTarget).to.be(null); - }); - - it('can set backup config', async function () { - const newConfig = Object.assign({}, defaultBackupTarget.config, { backupDir: '/tmp/backups' }); - await backupTargets.setConfig(defaultBackupTarget, newConfig, auditSource); - - const result = await backupTargets.get(defaultBackupTarget.id); - expect(result.config.backupDir).to.be('/tmp/backups'); - }); - - it('cannot set invalid schedule', async function () { - const [error] = await safe(backupTargets.setSchedule(defaultBackupTarget, '', auditSource)); - expect(error.reason).to.be(BoxError.BAD_FIELD); - }); - - it('can set valid schedule', async function () { - for (const pattern of [ '00 * * * * *', constants.CRON_PATTERN_NEVER ]) { - await backupTargets.setSchedule(defaultBackupTarget, pattern, auditSource); - const backupTarget = await backupTargets.get(defaultBackupTarget.id); - expect(backupTarget.schedule).to.be(pattern); - } - }); - - it('cannot set invalid retention', async function () { - const [error] = await safe(backupTargets.setRetention(defaultBackupTarget, { keepWhenever: 4 }, auditSource)); - expect(error.reason).to.be(BoxError.BAD_FIELD); - }); - - it('can set valid retention', async function () { - for (const retention of [ { keepWithinSecs: 1 }, { keepYearly: 3 }, { keepMonthly: 14 } ]) { - await backupTargets.setRetention(defaultBackupTarget, retention, auditSource); - const backupTarget = await backupTargets.get(defaultBackupTarget.id); - expect(backupTarget.retention).to.eql(retention); - } - }); - - it('cannot delete the primary backup target', async function () { - const [error] = await safe(backupTargets.del(defaultBackupTarget, auditSource)); - expect(error.reason).to.be(BoxError.CONFLICT); - }); - - it('can set another as primary', async function () { - await backupTargets.setPrimary(backupTarget, auditSource); - - const noMoreDefaultTarget = await backupTargets.get(defaultBackupTarget.id); - expect(noMoreDefaultTarget.primary).to.be(false); - defaultBackupTarget.primary = false; - }); - - it('can delete non-primary backup target', async function () { - await backupTargets.del(defaultBackupTarget, auditSource); - expect(await backups.list(1, 10)).to.eql([]); - expect(await archives.list(1, 10)).to.eql([]); - }); -}); diff --git a/src/test/backuptask-test.js b/src/test/backuptask-test.js index c3cb4b3ab..93c9952c1 100644 --- a/src/test/backuptask-test.js +++ b/src/test/backuptask-test.js @@ -7,7 +7,7 @@ 'use strict'; const backups = require('../backups.js'), - backupTargets = require('../backuptargets.js'), + backupSites = require('../backupsites.js'), common = require('./common.js'), expect = require('expect.js'), fs = require('node:fs'), @@ -17,7 +17,7 @@ const backups = require('../backups.js'), timers = require('timers/promises'); describe('backuptask', function () { - const { setup, cleanup, getDefaultBackupTarget, auditSource } = common; + const { setup, cleanup, getDefaultBackupSite, auditSource } = common; before(setup); after(cleanup); @@ -30,16 +30,16 @@ describe('backuptask', function () { backupDir: path.join(os.tmpdir(), 'backupstask-test-filesystem'), }; - let defaultBackupTarget; + let defaultBackupSite; before(async function () { fs.rmSync(backupConfig.backupDir, { recursive: true, force: true }); - defaultBackupTarget = await getDefaultBackupTarget(); - await backupTargets.setConfig(defaultBackupTarget, backupConfig, auditSource); + defaultBackupSite = await getDefaultBackupSite(); + await backupSites.setConfig(defaultBackupSite, backupConfig, auditSource); }); - async function createBackup(target) { - const taskId = await backupTargets.startBackupTask(target, auditSource); + async function createBackup(site) { + const taskId = await backupSites.startBackupTask(site, auditSource); while (true) { await timers.setTimeout(1000); @@ -68,7 +68,7 @@ describe('backuptask', function () { return; } - const result = await createBackup(defaultBackupTarget); + const result = await createBackup(defaultBackupSite); expect(fs.statSync(path.join(backupConfig.backupDir, 'snapshot/box.tar.gz')).nlink).to.be(2); // hard linked to a rotated backup expect(fs.statSync(path.join(backupConfig.backupDir, result.remotePath)).nlink).to.be(2); @@ -82,7 +82,7 @@ describe('backuptask', function () { return; } - const result = await createBackup(defaultBackupTarget); + const result = await createBackup(defaultBackupSite); expect(fs.statSync(path.join(backupConfig.backupDir, 'snapshot/box.tar.gz')).nlink).to.be(2); // hard linked to a rotated backup expect(fs.statSync(path.join(backupConfig.backupDir, result.remotePath)).nlink).to.be(2); // hard linked to new backup expect(fs.statSync(path.join(backupConfig.backupDir, backupInfo1.remotePath)).nlink).to.be(1); // not hard linked anymore diff --git a/src/test/common.js b/src/test/common.js index 54c3c2f1a..5ba2b1bc0 100644 --- a/src/test/common.js +++ b/src/test/common.js @@ -2,7 +2,7 @@ const apps = require('../apps.js'), appstore = require('../appstore.js'), - backupTargets = require('../backuptargets.js'), + backupSites = require('../backupsites.js'), constants = require('../constants.js'), cron = require('../cron.js'), dashboard = require('../dashboard.js'), @@ -173,7 +173,7 @@ exports = module.exports = { checkMails, clearMailQueue, - getDefaultBackupTarget, + getDefaultBackupSite, mockApiServerOrigin: 'http://localhost:6060', dashboardDomain: domain.domain, @@ -222,7 +222,7 @@ async function databaseSetup() { await dashboard._setLocation(constants.DASHBOARD_SUBDOMAIN, exports.dashboardDomain); // duplicated here since we clear the database - const id = await backupTargets.add({ + const id = await backupSites.add({ provider: 'filesystem', name: 'Default', config: { backupDir: '/tmp/boxtest' }, @@ -230,7 +230,7 @@ async function databaseSetup() { retention: { keepWithinSecs: 2 * 24 * 60 * 60 }, schedule: '00 00 23 * * *' }, auditSource); - await backupTargets.setPrimary({ id }, auditSource); + await backupSites.setPrimary({ id }, auditSource); } async function domainSetup() { @@ -276,7 +276,7 @@ async function checkMails(number) { return emails; } -async function getDefaultBackupTarget() { - const result = await backupTargets.list(1, 1); +async function getDefaultBackupSite() { + const result = await backupSites.list(1, 1); return result[0]; } diff --git a/src/test/storage-provider-test.js b/src/test/storage-provider-test.js index 24c182acd..e4301605c 100644 --- a/src/test/storage-provider-test.js +++ b/src/test/storage-provider-test.js @@ -6,7 +6,7 @@ 'use strict'; -const backupTargets = require('../backuptargets.js'), +const backupSites = require('../backupsites.js'), BoxError = require('../boxerror.js'), common = require('./common.js'), consumers = require('node:stream/consumers'), @@ -23,7 +23,7 @@ const backupTargets = require('../backuptargets.js'), stream = require('stream/promises'); describe('Storage', function () { - const { setup, cleanup, getDefaultBackupTarget, auditSource } = common; + const { setup, cleanup, getDefaultBackupSite, auditSource } = common; before(setup); after(cleanup); @@ -37,11 +37,11 @@ describe('Storage', function () { prefix: 'someprefix' }; - let defaultBackupTarget; + let defaultBackupSite; before(async function () { gTmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), 'filesystem-storage-test_')); - defaultBackupTarget = await getDefaultBackupTarget(); + defaultBackupSite = await getDefaultBackupSite(); gBackupConfig.backupDir = path.join(gTmpFolder, 'backups/'); }); @@ -52,12 +52,12 @@ describe('Storage', function () { it('fails to set backup storage for bad folder', async function () { const tmp = Object.assign({}, gBackupConfig, { backupDir: '/root/oof' }); - const [error] = await safe(backupTargets.setConfig(defaultBackupTarget, tmp, auditSource)); + const [error] = await safe(backupSites.setConfig(defaultBackupSite, tmp, auditSource)); expect(error.reason).to.equal(BoxError.BAD_FIELD); }); it('succeeds to set backup storage', async function () { - await backupTargets.setConfig(defaultBackupTarget, gBackupConfig, auditSource); + await backupSites.setConfig(defaultBackupSite, gBackupConfig, auditSource); expect(fs.existsSync(path.join(gBackupConfig.backupDir, 'someprefix/snapshot'))).to.be(true); // auto-created }); diff --git a/src/updater.js b/src/updater.js index 381566ca3..119d09620 100644 --- a/src/updater.js +++ b/src/updater.js @@ -23,7 +23,7 @@ const apps = require('./apps.js'), assert = require('node:assert'), AuditSource = require('./auditsource.js'), BoxError = require('./boxerror.js'), - backupTargets = require('./backuptargets.js'), + backupSites = require('./backupsites.js'), backuptask = require('./backuptask.js'), constants = require('./constants.js'), cron = require('./cron.js'), @@ -176,10 +176,10 @@ async function updateBox(boxUpdateInfo, options, progressCallback) { if (!options.skipBackup) { progressCallback({ percent: 10, message: 'Backing up' }); - const target = await backupTargets.getPrimary(); - if (!target) throw new BoxError(BoxError.BAD_STATE, 'no default backup target'); + const site = await backupSites.getPrimary(); + if (!site) throw new BoxError(BoxError.BAD_STATE, 'no default backup site'); - await backuptask.fullBackup(target.id, { preserveSecs: 3*7*24*60*60 }, (progress) => progressCallback({ percent: 10+progress.percent*70/100, message: progress.message })); + await backuptask.fullBackup(site.id, { preserveSecs: 3*7*24*60*60 }, (progress) => progressCallback({ percent: 10+progress.percent*70/100, message: progress.message })); await checkFreeDiskSpace(2*1024*1024*1024); // check again in case backup is in same disk } @@ -226,8 +226,8 @@ async function startBoxUpdateTask(options, auditSource) { const [error] = await safe(locks.acquire(locks.TYPE_BOX_UPDATE_TASK)); if (error) throw new BoxError(BoxError.BAD_STATE, `Another update task is in progress: ${error.message}`); - const backupTarget = await backupTargets.getPrimary(); - const memoryLimit = backupTarget.limits?.memoryLimit ? Math.max(backupTarget.limits.memoryLimit/1024/1024, 400) : 400; + const backupSite = await backupSites.getPrimary(); + const memoryLimit = backupSite.limits?.memoryLimit ? Math.max(backupSite.limits.memoryLimit/1024/1024, 400) : 400; const taskId = await tasks.add(tasks.TASK_BOX_UPDATE, [ boxUpdateInfo, options ]); await eventlog.add(eventlog.ACTION_UPDATE, auditSource, { taskId, boxUpdateInfo });