Files
cloudron-box/migrations/20250724102340-backupTargets-create-table.js
T
Girish Ramakrishnan 53e9925880 backups: make id, provider a backend specific setting
the backend can stash whatever values it wants in the config.
just like the DNS backends, we make verifyConfig return a sanitized config.
added benefit is that extra user fields (via API) are also not dumped into the db.
2025-08-01 18:55:04 +02:00

81 lines
3.1 KiB
JavaScript

'use strict';
const child_process = require('child_process'),
crypto = require('crypto'),
fs = require('fs'),
path = require('path'),
paths = require('../src/paths.js');
exports.up = async function (db) {
const cmd = 'CREATE TABLE IF NOT EXISTS backupTargets(' +
'id VARCHAR(128) NOT NULL UNIQUE,' +
'label VARCHAR(128),' +
'provider VARCHAR(32) NOT NULL,' +
'configJson TEXT,' +
'limitsJson TEXT,' +
'retentionJson TEXT,' +
'encryptionJson TEXT,' +
'format VARCHAR(16) NOT NULL,' +
'schedule VARCHAR(128),' +
'main BOOLEAN DEFAULT false,' +
'creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,' +
'PRIMARY KEY (id)) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin';
await db.runSql(cmd);
const results = await db.runSql('SELECT name, value FROM settings WHERE name=? OR name=? OR name=?', [ 'backup_storage', 'backup_limits', 'backup_policy' ]);
const label = 'Default', main = true;
let config = null, limits = null, encryption = null, format = null, provider = null;
let retention = { keepWithinSecs: 2 * 24 * 60 * 60 };
let schedule = '00 00 23 * * *';
const id = `bc-${crypto.randomUUID()}`;
if (results.length === 0) {
provider = 'filesystem';
config = { id, provider, backupFolder: paths.DEFAULT_BACKUP_DIR };
format = 'tgz';
} else {
for (const r of results) {
if (r.name === 'backup_storage') {
const tmp = JSON.parse(r.value);
provider = tmp.provider;
encryption = tmp.encryption || null;
delete tmp.encryption;
format = tmp.format;
delete tmp.format;
tmp.id = id;
config = tmp;
} else if (r.name === 'backup_limits') {
limits = JSON.parse(r.value);
} else if (r.name === 'backup_policy') {
const tmp = JSON.parse(r.value);
retention = tmp.retention;
schedule = tmp.schedule;
}
}
}
await db.runSql('START TRANSACTION');
const targetInfoDir = path.join(paths.BACKUP_INFO_DIR, id);
console.log(`Moving existing cache and snapshot file into ${targetInfoDir}`);
fs.mkdirSync(targetInfoDir, { recursive: true });
child_process.execSync(`find ${paths.BACKUP_INFO_DIR}/ -maxdepth 1 -type f -exec mv -t ${targetInfoDir}/ {} +`);
await db.runSql('INSERT INTO backupTargets (id, label, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, label, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]);
await db.runSql('DELETE FROM settings WHERE name=? OR name=? OR name=?', [ 'backup_storage', 'backup_limits', 'backup_policy' ]);
await db.runSql('COMMIT');
};
exports.down = async function (db) {
await db.runSql('DROP TABLE backupTargets');
};