diff --git a/migrations/20250724102340-backupTargets-create-table.js b/migrations/20250724102340-backupTargets-create-table.js index 63de5daac..b0433088e 100644 --- a/migrations/20250724102340-backupTargets-create-table.js +++ b/migrations/20250724102340-backupTargets-create-table.js @@ -15,6 +15,7 @@ exports.up = async function (db) { 'limitsJson TEXT,' + 'retentionJson TEXT,' + 'encryptionJson TEXT,' + + 'integrityKeyPairJson TEXT,' + 'format VARCHAR(16) NOT NULL,' + 'schedule VARCHAR(128),' + 'main BOOLEAN DEFAULT false,' + @@ -37,6 +38,11 @@ exports.up = async function (db) { let config = null, limits = null, encryption = null, format = null, provider = null; let retention = { keepWithinSecs: 2 * 24 * 60 * 60 }; let schedule = '00 00 23 * * *'; + const integrityKeyPair = crypto.generateKeyPairSync('ed25519', { + publicKeyEncoding: { type: 'spki', format: 'pem' }, + privateKeyEncoding: { type: 'pkcs8', format: 'pem' } + }); + const id = `bc-${crypto.randomUUID()}`; // convert existing configuration into a backup target @@ -84,8 +90,8 @@ exports.up = async function (db) { child_process.execSync(`find ${paths.BACKUP_INFO_DIR}/ -maxdepth 1 -type f -exec mv -t ${targetInfoDir}/ {} +`); await db.runSql('START TRANSACTION'); - await db.runSql('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ id, name, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]); + await db.runSql('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ id, name, provider, JSON.stringify(config), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, main ]); await db.runSql('DELETE FROM settings WHERE name=? OR name=? OR name=?', [ 'backup_storage', 'backup_limits', 'backup_policy' ]); await db.runSql('COMMIT'); }; diff --git a/migrations/20250812102445-backups-add-integrityJson.js b/migrations/20250812102445-backups-add-integrityJson.js new file mode 100644 index 000000000..1e8945efc --- /dev/null +++ b/migrations/20250812102445-backups-add-integrityJson.js @@ -0,0 +1,15 @@ +'use strict'; + +exports.up = function(db, callback) { + db.runSql('ALTER TABLE backups ADD COLUMN integrityJson TEXT', function (error) { + if (error) console.error(error); + callback(error); + }); +}; + +exports.down = function(db, callback) { + db.runSql('ALTER TABLE backups DROP COLUMN integrityJson', function (error) { + if (error) console.error(error); + callback(error); + }); +}; diff --git a/src/backupformat/tgz.js b/src/backupformat/tgz.js index ff838003a..7c9bbc031 100644 --- a/src/backupformat/tgz.js +++ b/src/backupformat/tgz.js @@ -2,7 +2,9 @@ const assert = require('assert'), backupTargets = require('../backuptargets.js'), + blobs = require('../blobs.js'), BoxError = require('../boxerror.js'), + crypto = require('crypto'), DataLayout = require('../datalayout.js'), debug = require('debug')('box:backupformat/tgz'), { DecryptStream, EncryptStream } = require('../hush.js'), @@ -10,6 +12,7 @@ const assert = require('assert'), path = require('path'), ProgressStream = require('../progress-stream.js'), promiseRetry = require('../promise-retry.js'), + { Readable } = require('stream'), safe = require('safetydance'), stream = require('stream/promises'), { Transform } = require('node:stream'), @@ -139,12 +142,14 @@ async function tarPack(dataLayout, encryption, uploader, progressCallback) { const pack = tar.pack(); + const hash = crypto.createHash('sha256'); + let pipeline = null; if (encryption) { const encryptStream = new EncryptStream(encryption); - pipeline = safe(stream.pipeline(pack, gzip, encryptStream, ps, uploader.stream)); + pipeline = safe(stream.pipeline(pack, gzip, encryptStream, ps, hash, uploader.stream)); } else { - pipeline = safe(stream.pipeline(pack, gzip, ps, uploader.stream)); + pipeline = safe(stream.pipeline(pack, gzip, ps, hash, uploader.stream)); } for (const localPath of dataLayout.localPaths()) { @@ -159,6 +164,7 @@ async function tarPack(dataLayout, encryption, uploader, progressCallback) { debug(`tarPack: pipeline finished: ${JSON.stringify(ps.stats())}`); await uploader.finish(); + return { size: ps.stats().transferred, sha256: hash.digest('hex') }; } async function tarExtract(inStream, dataLayout, encryption, progressCallback) { @@ -242,11 +248,21 @@ async function upload(backupTarget, remotePath, dataLayout, progressCallback) { debug(`upload: Uploading ${dataLayout.toString()} to ${remotePath}`); - await promiseRetry({ times: 5, interval: 20000, debug }, async () => { + return await promiseRetry({ times: 5, interval: 20000, debug }, async () => { progressCallback({ message: `Uploading backup ${remotePath}` }); const uploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, remotePath); - await tarPack(dataLayout, backupTarget.encryption, uploader, progressCallback); + const { size, sha256 } = await tarPack(dataLayout, backupTarget.encryption, uploader, progressCallback); + + const checksumData = [{ filename: path.basename(remotePath), size, sha256 }]; + const checksumDataJsonString = JSON.stringify(checksumData, null, 4); + const checksumDataStream = Readable.from(checksumDataJsonString); + const checksumUploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, `${remotePath}.checksum`); + await stream.pipeline(checksumDataStream, checksumUploader.stream); + await checksumUploader.finish(); + + const checksumSignature = await crypto.sign(null /* algorithm */, checksumDataJsonString, backupTarget.integrityKeyPair.privateKey); + return { size, sha256, checksumSignature }; }); } diff --git a/src/backups.js b/src/backups.js index c38f05c72..90605e3a3 100644 --- a/src/backups.js +++ b/src/backups.js @@ -8,6 +8,7 @@ exports = module.exports = { add, update, setState, + setIntegrity, list, del, @@ -29,7 +30,7 @@ const assert = require('assert'), hat = require('./hat.js'), safe = require('safetydance'); -const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOnJson', 'state', 'manifestJson', 'preserveSecs', 'encryptionVersion', 'appConfigJson', 'targetId' ].join(','); +const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'integrityJson', 'dependsOnJson', 'state', 'manifestJson', 'preserveSecs', 'encryptionVersion', 'appConfigJson', 'targetId' ].join(','); function postProcess(result) { assert.strictEqual(typeof result, 'object'); @@ -40,6 +41,9 @@ function postProcess(result) { result.manifest = result.manifestJson ? safe.JSON.parse(result.manifestJson) : null; delete result.manifestJson; + result.integrity = result.integrityJson ? safe.JSON.parse(result.integrityJson) : null; + delete result.integrityJson; + result.appConfig = result.appConfigJson ? safe.JSON.parse(result.appConfigJson) : null; delete result.appConfigJson; @@ -166,6 +170,14 @@ async function setState(id, state) { if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); } +async function setIntegrity(id, integrity) { + assert.strictEqual(typeof id, 'string'); + assert.strictEqual(typeof integrity, 'object'); + + const result = await database.query('UPDATE backups SET integrityJson = ? WHERE id = ?', [JSON.stringify(integrity), id]); + if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); +} + async function list(page, perPage) { assert(typeof page === 'number' && page > 0); assert(typeof perPage === 'number' && perPage > 0); diff --git a/src/backuptargets.js b/src/backuptargets.js index 7ab33db78..deb814bfb 100644 --- a/src/backuptargets.js +++ b/src/backuptargets.js @@ -62,7 +62,7 @@ const assert = require('assert'), // filesystem - backupDir, noHardlinks // mountpoint - mountPoint, prefix, noHardlinks // encryption: 'encryptionPassword' and 'encryptedFilenames' is converted into an 'encryption' object using hush.js. Password is lost forever after conversion. -const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'creationTime', 'ts' ].join(','); +const BACKUP_TARGET_FIELDS = [ 'id', 'name', 'provider', 'configJson', 'limitsJson', 'retentionJson', 'schedule', 'encryptionJson', 'format', 'main', 'creationTime', 'ts', 'integrityKeyPairJson' ].join(','); function storageApi(backupTarget) { assert.strictEqual(typeof backupTarget, 'object'); @@ -113,6 +113,9 @@ function postProcess(result) { result.encryption = result.encryptionJson ? safe.JSON.parse(result.encryptionJson) : null; delete result.encryptionJson; + result.integrityKeyPair = result.integrityKeyPairJson ? safe.JSON.parse(result.integrityKeyPairJson) : null; + delete result.integrityKeyPairJson; + result.primary = !!result.main; // primary is a reserved keyword in mysql delete result.main; @@ -127,6 +130,8 @@ function removePrivateFields(target) { target.encryptionPasswordHint = target.encryption?.encryptionPasswordHint || null; delete target.encryption; + delete target.integrityKeyPair.privateKey; + delete target.config.rootPath; target.config = storageApi(target).removePrivateFields(target.config); return target; @@ -500,14 +505,19 @@ async function add(data, auditSource) { encryption.encryptionPasswordHint = encryptionPasswordHint; } + const integrityKeyPair = crypto.generateKeyPairSync('ed25519', { + publicKeyEncoding: { type: 'spki', format: 'pem' }, + privateKeyEncoding: { type: 'pkcs8', format: 'pem' } + }); + const id = `bc-${crypto.randomUUID()}`; if (!safe.fs.mkdirSync(`${paths.BACKUP_INFO_DIR}/${id}`)) throw new BoxError(BoxError.FS_ERROR, `Failed to create info dir: ${safe.error.message}`); debug('add: validating new storage configuration'); const sanitizedConfig = await storageApi({ provider }).verifyConfig({id, provider, config }); - await database.query('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - [ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(limits), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]); + await database.query('INSERT INTO backupTargets (id, name, provider, configJson, limitsJson, integrityKeyPairJson, retentionJson, schedule, encryptionJson, format, main) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', + [ id, name, provider, JSON.stringify(sanitizedConfig), JSON.stringify(limits), JSON.stringify(integrityKeyPair), JSON.stringify(retention), schedule, JSON.stringify(encryption), format, false ]); debug('add: setting up new storage configuration'); await storageApi({ provider }).setup(sanitizedConfig); diff --git a/src/backuptask.js b/src/backuptask.js index eae1c5c26..15f82b7d3 100644 --- a/src/backuptask.js +++ b/src/backuptask.js @@ -85,7 +85,7 @@ async function upload(remotePath, targetId, dataLayoutString, progressCallback) await checkPreconditions(backupTarget, dataLayout); - await backupFormat.api(backupTarget.format).upload(backupTarget, remotePath, dataLayout, progressCallback); + return await backupFormat.api(backupTarget.format).upload(backupTarget, remotePath, dataLayout, progressCallback); } async function download(backupTarget, remotePath, dataLayout, progressCallback) { @@ -158,11 +158,11 @@ async function runBackupUpload(uploadConfig, progressCallback) { envCopy.NODE_OPTIONS = `--max-old-space-size=${heapSize}`; } - let result = ''; // the script communicates error result as a string + let lastMessage = null; // the script communicates error result as a string function onMessage(progress) { // this is { message } or { result } if ('message' in progress) return progressCallback({ message: `${progress.message} (${progressTag})` }); debug(`runBackupUpload: result - ${JSON.stringify(progress)}`); - result = progress.result; + lastMessage = progress; } // do not use debug for logging child output because it already has timestamps via it's own debug @@ -171,8 +171,10 @@ async function runBackupUpload(uploadConfig, progressCallback) { debug(`runBackupUpload: backuptask crashed`, error); throw new BoxError(BoxError.INTERNAL_ERROR, 'Backuptask crashed'); } else if (error && error.code === 50) { // exited with error - throw new BoxError(BoxError.EXTERNAL_ERROR, error); + throw new BoxError(BoxError.EXTERNAL_ERROR, lastMessage.errorMessage); } + + return lastMessage.result; } async function snapshotBox(progressCallback) { @@ -208,11 +210,13 @@ async function uploadBoxSnapshot(backupTarget, progressCallback) { const startTime = new Date(); - await runBackupUpload(uploadConfig, progressCallback); + const integrity = await runBackupUpload(uploadConfig, progressCallback); debug(`uploadBoxSnapshot: took ${(new Date() - startTime)/1000} seconds`); await backupTargets.setSnapshotInfo(backupTarget, 'box', { timestamp: new Date().toISOString() }); + + return integrity; } async function copy(backupTarget, srcRemotePath, destRemotePath, progressCallback) { @@ -228,6 +232,13 @@ async function copy(backupTarget, srcRemotePath, destRemotePath, progressCallbac throw copyError; } debug(`copy: copied successfully to ${destRemotePath}. Took ${(new Date() - startTime)/1000} seconds`); + + const [copyChecksumError] = await safe(backupTargets.storageApi(backupTarget).copy(backupTarget.config, `${srcRemotePath}.checksum`, `${destRemotePath}.checksum`, progressCallback)); + if (copyChecksumError) { + debug(`copy: copied to ${destRemotePath} errored. error: ${copyChecksumError.message}`); + throw copyChecksumError; + } + debug(`copy: copied checksum successfully to ${destRemotePath}.checksum`); } async function rotateBoxBackup(backupTarget, tag, options, dependsOn, progressCallback) { @@ -272,8 +283,10 @@ async function backupBox(backupTarget, dependsOn, tag, options, progressCallback assert.strictEqual(typeof options, 'object'); assert.strictEqual(typeof progressCallback, 'function'); - await uploadBoxSnapshot(backupTarget, progressCallback); - return await rotateBoxBackup(backupTarget, tag, options, dependsOn, progressCallback); + const integrity = await uploadBoxSnapshot(backupTarget, progressCallback); + const id = await rotateBoxBackup(backupTarget, tag, options, dependsOn, progressCallback); + await backups.setIntegrity(id, integrity); + return id; } async function rotateAppBackup(backupTarget, app, tag, options, progressCallback) { @@ -368,11 +381,13 @@ async function uploadAppSnapshot(backupTarget, app, progressCallback) { const startTime = new Date(); - await runBackupUpload(uploadConfig, progressCallback); + const integrity = await runBackupUpload(uploadConfig, progressCallback); debug(`uploadAppSnapshot: ${app.fqdn} uploaded to ${remotePath}. ${(new Date() - startTime)/1000} seconds`); await backupTargets.setSnapshotInfo(backupTarget, app.id, { timestamp: new Date().toISOString(), manifest: app.manifest }); + + return integrity; } async function backupAppWithTag(app, backupTarget, tag, options, progressCallback) { @@ -388,8 +403,10 @@ async function backupAppWithTag(app, backupTarget, tag, options, progressCallbac return lastKnownGoodAppBackup.id; } - await uploadAppSnapshot(backupTarget, app, progressCallback); - return await rotateAppBackup(backupTarget, app, tag, options, progressCallback); + const integrity = await uploadAppSnapshot(backupTarget, app, progressCallback); + const id = await rotateAppBackup(backupTarget, app, tag, options, progressCallback); + await backups.setIntegrity(id, integrity); + return id; } async function uploadMailSnapshot(backupTarget, progressCallback) { @@ -412,11 +429,13 @@ async function uploadMailSnapshot(backupTarget, progressCallback) { const startTime = new Date(); - await runBackupUpload(uploadConfig, progressCallback); + const integrity = await runBackupUpload(uploadConfig, progressCallback); debug(`uploadMailSnapshot: took ${(new Date() - startTime)/1000} seconds`); await backupTargets.setSnapshotInfo(backupTarget, 'mail', { timestamp: new Date().toISOString() }); + + return integrity; } async function rotateMailBackup(backupTarget, tag, options, progressCallback) { @@ -461,8 +480,10 @@ async function backupMailWithTag(backupTarget, tag, options, progressCallback) { debug(`backupMailWithTag: backing up mail with tag ${tag}`); - await uploadMailSnapshot(backupTarget, progressCallback); - return await rotateMailBackup(backupTarget, tag, options, progressCallback); + const integrity = await uploadMailSnapshot(backupTarget, progressCallback); + const id = await rotateMailBackup(backupTarget, tag, options, progressCallback); + await backups.setIntegrity(id, integrity); + return id; } async function downloadMail(backupTarget, remotePath, progressCallback) { diff --git a/src/platform.js b/src/platform.js index 9f78c197b..1065ab42a 100644 --- a/src/platform.js +++ b/src/platform.js @@ -177,7 +177,7 @@ async function initialize() { // we remove the config as a simple security measure to not expose IP <-> domain const activated = await users.isActivated(); if (!activated) { - debug('start: not activated. generating IP based redirection config'); + debug('initialize: not activated. generating IP based redirection config'); await safe(reverseProxy.writeDefaultConfig({ activated: false }), { debug }); // ok to fail if no disk space } diff --git a/src/scripts/backupupload.js b/src/scripts/backupupload.js index 551ca680f..54ad62223 100755 --- a/src/scripts/backupupload.js +++ b/src/scripts/backupupload.js @@ -44,12 +44,10 @@ function throttledProgressCallback(msecs) { (async function main() { await database.initialize(); - const [uploadError] = await safe(backuptask.upload(remotePath, format, dataLayoutString, throttledProgressCallback(5000))); + const [uploadError, result] = await safe(backuptask.upload(remotePath, format, dataLayoutString, throttledProgressCallback(5000))); debug('upload completed. error: %o', uploadError); - process.send({ result: uploadError ? uploadError.message : '' }); + process.send({ result, errorMessage: uploadError?.message }); - // https://nodejs.org/api/process.html are exit codes used by node. apps.js uses the value below - // to check apptask crashes process.exit(uploadError ? 50 : 0); })(); diff --git a/src/test/backups-test.js b/src/test/backups-test.js index 276bbfc48..0962202b6 100644 --- a/src/test/backups-test.js +++ b/src/test/backups-test.js @@ -28,7 +28,8 @@ describe('backups', function () { preserveSecs: 0, label: '', appConfig: null, - targetId: null + targetId: null, + integrity: null }; const appBackup = { @@ -44,7 +45,8 @@ describe('backups', function () { preserveSecs: 0, label: '', appConfig: null, - targetId: null + targetId: null, + integrity: null }; let defaultBackupTarget;