backups (tgz): save integrity information

we generate a signing key pair for each target. Initially, I had this
as global. We needed a route to return the public key and putting it
under backup target seemed natural. Since we delete the backups when
we delete a target, we lose all the signing hashes. So, it's fine to lose
the key pair on target delete.
This commit is contained in:
Girish Ramakrishnan
2025-08-11 19:30:22 +05:30
parent 25fa999259
commit 47fc9561ab
9 changed files with 110 additions and 30 deletions

View File

@@ -2,7 +2,9 @@
const assert = require('assert'),
backupTargets = require('../backuptargets.js'),
blobs = require('../blobs.js'),
BoxError = require('../boxerror.js'),
crypto = require('crypto'),
DataLayout = require('../datalayout.js'),
debug = require('debug')('box:backupformat/tgz'),
{ DecryptStream, EncryptStream } = require('../hush.js'),
@@ -10,6 +12,7 @@ const assert = require('assert'),
path = require('path'),
ProgressStream = require('../progress-stream.js'),
promiseRetry = require('../promise-retry.js'),
{ Readable } = require('stream'),
safe = require('safetydance'),
stream = require('stream/promises'),
{ Transform } = require('node:stream'),
@@ -139,12 +142,14 @@ async function tarPack(dataLayout, encryption, uploader, progressCallback) {
const pack = tar.pack();
const hash = crypto.createHash('sha256');
let pipeline = null;
if (encryption) {
const encryptStream = new EncryptStream(encryption);
pipeline = safe(stream.pipeline(pack, gzip, encryptStream, ps, uploader.stream));
pipeline = safe(stream.pipeline(pack, gzip, encryptStream, ps, hash, uploader.stream));
} else {
pipeline = safe(stream.pipeline(pack, gzip, ps, uploader.stream));
pipeline = safe(stream.pipeline(pack, gzip, ps, hash, uploader.stream));
}
for (const localPath of dataLayout.localPaths()) {
@@ -159,6 +164,7 @@ async function tarPack(dataLayout, encryption, uploader, progressCallback) {
debug(`tarPack: pipeline finished: ${JSON.stringify(ps.stats())}`);
await uploader.finish();
return { size: ps.stats().transferred, sha256: hash.digest('hex') };
}
async function tarExtract(inStream, dataLayout, encryption, progressCallback) {
@@ -242,11 +248,21 @@ async function upload(backupTarget, remotePath, dataLayout, progressCallback) {
debug(`upload: Uploading ${dataLayout.toString()} to ${remotePath}`);
await promiseRetry({ times: 5, interval: 20000, debug }, async () => {
return await promiseRetry({ times: 5, interval: 20000, debug }, async () => {
progressCallback({ message: `Uploading backup ${remotePath}` });
const uploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, remotePath);
await tarPack(dataLayout, backupTarget.encryption, uploader, progressCallback);
const { size, sha256 } = await tarPack(dataLayout, backupTarget.encryption, uploader, progressCallback);
const checksumData = [{ filename: path.basename(remotePath), size, sha256 }];
const checksumDataJsonString = JSON.stringify(checksumData, null, 4);
const checksumDataStream = Readable.from(checksumDataJsonString);
const checksumUploader = await backupTargets.storageApi(backupTarget).upload(backupTarget.config, `${remotePath}.checksum`);
await stream.pipeline(checksumDataStream, checksumUploader.stream);
await checksumUploader.finish();
const checksumSignature = await crypto.sign(null /* algorithm */, checksumDataJsonString, backupTarget.integrityKeyPair.privateKey);
return { size, sha256, checksumSignature };
});
}