diff --git a/migrations/20250724102340-backupTargets-create-table.js b/migrations/20250724102340-backupTargets-create-table.js index 764238135..eb5a484a9 100644 --- a/migrations/20250724102340-backupTargets-create-table.js +++ b/migrations/20250724102340-backupTargets-create-table.js @@ -9,7 +9,7 @@ const child_process = require('child_process'), exports.up = async function (db) { const cmd = 'CREATE TABLE IF NOT EXISTS backupTargets(' + 'id VARCHAR(128) NOT NULL UNIQUE,' + - 'label VARCHAR(128),' + + 'name VARCHAR(128) NOT NULL,' + 'provider VARCHAR(32) NOT NULL,' + 'configJson TEXT,' + 'limitsJson TEXT,' + @@ -26,7 +26,7 @@ exports.up = async function (db) { const results = await db.runSql('SELECT name, value FROM settings WHERE name=? OR name=? OR name=?', [ 'backup_storage', 'backup_limits', 'backup_policy' ]); - const label = 'Default', main = true; + const name = 'Default', main = true; let config = null, limits = null, encryption = null, format = null, provider = null; let retention = { keepWithinSecs: 2 * 24 * 60 * 60 }; let schedule = '00 00 23 * * *'; @@ -83,8 +83,8 @@ exports.up = async function (db) { fs.mkdirSync(targetInfoDir, { recursive: true }); child_process.execSync(`find ${paths.BACKUP_INFO_DIR}/ -maxdepth 1 -type f -exec mv -t ${targetInfoDir}/ {} +`); - await db.runSql('INSERT INTO backupTargets (id, label, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ id, label, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]); + await db.runSql('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ id, name, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]); await db.runSql('DELETE FROM settings WHERE name=? OR name=? OR name=?', [ 'backup_storage', 'backup_limits', 'backup_policy' ]); await db.runSql('COMMIT'); diff --git a/migrations/20250724141339-backups-add-targetId.js b/migrations/20250724141339-backups-add-targetId.js index ad74f6b50..b10534f4c 100644 --- a/migrations/20250724141339-backups-add-targetId.js +++ b/migrations/20250724141339-backups-add-targetId.js @@ -25,12 +25,13 @@ exports.up = async function(db) { cloneBackupTarget = Object.assign({}, results[0], { id: cloneId }); cloneBackupTarget.format = currentBackupTarget.format === 'rsync' ? 'tgz' : 'rsync'; cloneBackupTarget.priority = false; + cloneBackupTarget.name = 'Copy of Default'; cloneBackupTarget.schedule = 'never'; cloneBackupTarget._managedMountPath = path.join(paths.MANAGED_BACKUP_MOUNT_DIR, cloneId); // this won't work until the user remounts console.log(`Existing format is ${currentBackupTarget.format} . Adding clone backup target for ${cloneBackupTarget.format}`); - await db.runSql('INSERT INTO backupTargets (id, label, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ cloneBackupTarget.id, cloneBackupTarget.label, cloneBackupTarget.configJson, cloneBackupTarget.limitsJson, cloneBackupTarget.retentionJson, cloneBackupTarget.schedule, + await db.runSql('INSERT INTO backupTargets (id, name, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, priority) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ cloneBackupTarget.id, cloneBackupTarget.name, cloneBackupTarget.configJson, cloneBackupTarget.limitsJson, cloneBackupTarget.retentionJson, cloneBackupTarget.schedule, cloneBackupTarget.encryptionJson, cloneBackupTarget.format, cloneBackupTarget.priority ]); } diff --git a/migrations/schema.sql b/migrations/schema.sql index e0c103a14..0a51ef887 100644 --- a/migrations/schema.sql +++ b/migrations/schema.sql @@ -304,7 +304,7 @@ CREATE TABLE IF NOT EXISTS dockerRegistries( CREATE TABLE IF NOT EXISTS backupTargets( id VARCHAR(128) NOT NULL UNIQUE, - label VARCHAR(128), + name VARCHAR(128) NOT NULL UNIQUE, provider VARCHAR(32) NOT NULL, configJson TEXT, limitsJson TEXT, diff --git a/src/backuptargets.js b/src/backuptargets.js index 1977f4dd3..0a3f0e26a 100644 --- a/src/backuptargets.js +++ b/src/backuptargets.js @@ -12,6 +12,7 @@ exports = module.exports = { setSchedule, setRetention, setPrimary, + setName, removePrivateFields, @@ -59,7 +60,7 @@ const assert = require('assert'), // filesystem - backupDir, noHardlinks // mountpoint - mountPoint, prefix, noHardlinks // encryption: 'encryptionPassword' and 'encryptedFilenames' is converted into an 'encryption' object using hush.js. Password is lost forever after conversion. -const BACKUP_TARGET_FIELDS = [ 'id', 'label', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'creationTime', 'ts' ].join(','); +const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'creationTime', 'ts' ].join(','); function storageApi(backupTarget) { assert.strictEqual(typeof backupTarget, 'object'); @@ -128,10 +129,10 @@ function removePrivateFields(target) { return target; } -function validateLabel(label) { - assert.strictEqual(typeof label, 'string'); +function validateName(name) { + assert.strictEqual(typeof name, 'string'); - if (label.length > 48) return new BoxError(BoxError.BAD_FIELD, 'Label too long'); + if (name.length > 48) return new BoxError(BoxError.BAD_FIELD, 'name too long'); } function validateSchedule(schedule) { @@ -169,7 +170,7 @@ async function list(page, perPage) { assert(typeof page === 'number' && page > 0); assert(typeof perPage === 'number' && perPage > 0); - const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets ORDER BY main DESC LIMIT ?,?`, [ (page-1)*perPage, perPage ]); + const results = await database.query(`SELECT ${BACKUP_TARGET_FIELDS} FROM backupTargets ORDER BY name DESC LIMIT ?,?`, [ (page-1)*perPage, perPage ]); results.forEach(function (result) { postProcess(result); }); @@ -195,7 +196,7 @@ async function update(target, data) { const args = []; const fields = []; for (const k in data) { - if (k === 'label' || k === 'schedule' || k === 'main') { // format, provider cannot be updated + if (k === 'name' || k === 'schedule' || k === 'main') { // format, provider cannot be updated fields.push(k + ' = ?'); args.push(data[k]); } else if (k === 'config' || k === 'limits' || k === 'retention') { // encryption cannot be updated @@ -264,6 +265,17 @@ async function setPrimary(backupTarget, auditSource) { await eventlog.add(eventlog.ACTION_BACKUP_TARGET_UPDATE, auditSource, { backupTarget, primary: true }); } +async function setName(backupTarget, name, auditSource) { + assert.strictEqual(typeof backupTarget, 'object'); + assert.strictEqual(typeof name, 'string'); + assert.strictEqual(typeof auditSource, 'object'); + + const nameError = validateName(name); + if (nameError) throw nameError; + + await update(backupTarget, { name }); +} + async function del(backupTarget, auditSource) { assert.strictEqual(typeof backupTarget, 'object'); assert.strictEqual(typeof auditSource, 'object'); @@ -437,7 +449,7 @@ async function add(data, auditSource) { if (constants.DEMO) throw new BoxError(BoxError.BAD_STATE, 'Not allowed in demo mode'); - const { provider, label, config, format, retention, schedule } = data; // required + const { provider, name, config, format, retention, schedule } = data; // required const limits = data.limits || null, encryptionPassword = data.encryptionPassword || null, encryptedFilenames = data.encryptedFilenames || false; @@ -445,8 +457,8 @@ async function add(data, auditSource) { const formatError = backupFormat.validateFormat(format); if (formatError) throw formatError; - const labelError = validateLabel(label); - if (labelError) throw labelError; + const nameError = validateName(name); + if (nameError) throw nameError; let encryption = null; if (encryptionPassword) { @@ -462,13 +474,13 @@ async function add(data, auditSource) { debug('add: validating new storage configuration'); const sanitizedConfig = await storageApi({ provider }).verifyConfig({id, provider, config }); - await database.query('INSERT INTO backupTargets (id, label, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ id, label, provider, JSON.stringify(sanitizedConfig), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]); + await database.query('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]); debug('add: setting up new storage configuration'); await storageApi({ provider }).setup(sanitizedConfig); - await eventlog.add(eventlog.ACTION_BACKUP_TARGET_ADD, auditSource, { id, label, provider, config, schedule, format }); + await eventlog.add(eventlog.ACTION_BACKUP_TARGET_ADD, auditSource, { id, name, provider, config, schedule, format }); return id; } diff --git a/src/routes/backuptargets.js b/src/routes/backuptargets.js index 677e23527..e68cdeec8 100644 --- a/src/routes/backuptargets.js +++ b/src/routes/backuptargets.js @@ -14,6 +14,7 @@ exports = module.exports = { setSchedule, setRetention, setPrimary, + setName, createBackup, cleanup, @@ -63,10 +64,10 @@ async function list(req, res, next) { async function add(req, res, next) { assert.strictEqual(typeof req.body, 'object'); - const { label, format, provider, config } = req.body; + const { name, format, provider, config } = req.body; if (typeof format !== 'string') return next(new HttpError(400, 'format must be a string')); - if (typeof label !== 'string') return next(new HttpError(400, 'label must be a string')); + if (typeof name !== 'string') return next(new HttpError(400, 'name must be a string')); if (typeof provider !== 'string') return next(new HttpError(400, 'provider is required')); // provider specific options are validated by provider backends @@ -180,6 +181,17 @@ async function setPrimary(req, res, next) { next(new HttpSuccess(200, {})); } +async function setName(req, res, next) { + assert.strictEqual(typeof req.body, 'object'); + + if (typeof req.body.name !== 'string') return next(new HttpError(400, 'name is required')); + + const [error] = await safe(backupTargets.setName(req.resources.backupTarget, req.body.name, AuditSource.fromRequest(req))); + if (error) return next(BoxError.toHttpError(error)); + + next(new HttpSuccess(200, {})); +} + async function createBackup(req, res, next) { assert.strictEqual(typeof req.resources.backupTarget, 'object'); diff --git a/src/server.js b/src/server.js index 71cead55e..aead9079e 100644 --- a/src/server.js +++ b/src/server.js @@ -165,6 +165,7 @@ async function initializeExpressSync() { router.post('/api/v1/backup_targets/:id/create_backup', token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.createBackup); router.post('/api/v1/backup_targets/:id/cleanup', json, token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.cleanup); router.post('/api/v1/backup_targets/:id/remount', json, token, authorizeAdmin, routes.backupTargets.load, routes.backupTargets.remount); + router.post('/api/v1/backup_targets/:id/configure/name', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setName); router.post('/api/v1/backup_targets/:id/configure/config', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setConfig); router.post('/api/v1/backup_targets/:id/configure/limits', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setLimits); router.post('/api/v1/backup_targets/:id/configure/schedule', json, token, authorizeOwner, routes.backupTargets.load, routes.backupTargets.setSchedule);