backups: encrypted backups must have .enc extension

This commit is contained in:
Girish Ramakrishnan
2023-07-24 22:25:06 +05:30
parent febac9e8ca
commit 3d5c21d9ca
7 changed files with 28 additions and 42 deletions
+14 -5
View File
@@ -176,6 +176,7 @@ const appstore = require('./appstore.js'),
os = require('os'),
path = require('path'),
paths = require('./paths.js'),
PassThrough = require('stream').PassThrough,
reverseProxy = require('./reverseproxy.js'),
safe = require('safetydance'),
semver = require('semver'),
@@ -2669,12 +2670,20 @@ async function getBackupDownloadStream(app, backupId) {
const backupConfig = await settings.getBackupConfig();
return new Promise((resolve, reject) => {
storage.api(backupConfig.provider).download(backupConfig, tgz.getBackupFilePath(backupConfig, backup.remotePath), function (error, sourceStream) {
if (error) return reject(error);
resolve(sourceStream);
});
const ps = new PassThrough();
const stream = await storage.api(backupConfig.provider).download(backupConfig, tgz.getBackupFilePath(backupConfig, backup.remotePath));
stream.on('error', function(error) {
debug(`getBackupDownloadStream: read stream error: ${error.message}`);
ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error));
});
stream.pipe(ps);
const now = (new Date()).toISOString().replace(/:|T/g,'-').replace(/\..*/,'');
const encryptionSuffix = backup.encryptionVersion ? '.enc' : '';
const filename = `app-backup-${now} (${app.fqdn}).tar.gz${encryptionSuffix}`;
return { stream: ps, filename };
}
async function restoreInstalledApps(options, auditSource) {
+2 -2
View File
@@ -890,8 +890,8 @@ async function downloadBackup(req, res, next) {
const [error, result] = await safe(apps.getBackupDownloadStream(req.app, req.params.backupId));
if (error) return next(BoxError.toHttpError(error));
res.attachment(`${req.params.backupId}.tgz`);
result.pipe(res);
res.attachment(result.filename);
result.stream.pipe(res);
}
async function uploadFile(req, res, next) {
+3 -5
View File
@@ -138,17 +138,15 @@ function upload(apiConfig, backupFilePath, sourceStream, callback) {
});
}
function download(apiConfig, sourceFilePath, callback) {
async function download(apiConfig, sourceFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof sourceFilePath, 'string');
assert.strictEqual(typeof callback, 'function');
debug(`download: ${sourceFilePath}`);
if (!safe.fs.existsSync(sourceFilePath)) return callback(new BoxError(BoxError.NOT_FOUND, `File not found: ${sourceFilePath}`));
if (!safe.fs.existsSync(sourceFilePath)) throw new BoxError(BoxError.NOT_FOUND, `File not found: ${sourceFilePath}`);
var fileStream = fs.createReadStream(sourceFilePath);
callback(null, fileStream);
return fs.createReadStream(sourceFilePath);
}
async function exists(apiConfig, sourceFilePath) {
+3 -19
View File
@@ -31,7 +31,6 @@ const assert = require('assert'),
BoxError = require('../boxerror.js'),
constants = require('../constants.js'),
debug = require('debug')('box:storage/gcs'),
PassThrough = require('stream').PassThrough,
path = require('path'),
safe = require('safetydance'),
util = require('util');
@@ -136,29 +135,14 @@ async function exists(apiConfig, backupFilePath) {
}
}
function download(apiConfig, backupFilePath, callback) {
async function download(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
assert.strictEqual(typeof callback, 'function');
debug(`Download ${backupFilePath} starting`);
var file = getBucket(apiConfig).file(backupFilePath);
var ps = new PassThrough();
var readStream = file.createReadStream()
.on('error', function(error) {
if (error && error.code == 404){
ps.emit('error', new BoxError(BoxError.NOT_FOUND));
} else {
debug(`download: [${backupFilePath}] gcp stream error. %o`, error);
ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error));
}
})
;
readStream.pipe(ps);
callback(null, ps);
const file = getBucket(apiConfig).file(backupFilePath);
return file.createReadStream();
}
function listDir(apiConfig, backupFilePath, batchSize, iteratorCallback, callback) {
+2 -3
View File
@@ -85,13 +85,12 @@ async function exists(apiConfig, backupFilePath) {
throw new BoxError(BoxError.NOT_IMPLEMENTED, 'exists is not implemented');
}
function download(apiConfig, backupFilePath, callback) {
async function download(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
assert.strictEqual(typeof callback, 'function');
// Result: download stream
callback(new BoxError(BoxError.NOT_IMPLEMENTED, 'download is not implemented'));
throw new BoxError(BoxError.NOT_IMPLEMENTED, 'download is not implemented');
}
async function copy(apiConfig, oldFilePath, newFilePath, progressCallback) {
+2 -3
View File
@@ -63,14 +63,13 @@ async function exists(apiConfig, backupFilePath) {
return false;
}
function download(apiConfig, backupFilePath, callback) {
async function download(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
assert.strictEqual(typeof callback, 'function');
debug('download: %s', backupFilePath);
callback(new BoxError(BoxError.NOT_IMPLEMENTED, 'Cannot download from noop backend'));
throw new BoxError(BoxError.NOT_IMPLEMENTED, 'Cannot download from noop backend');
}
function listDir(apiConfig, dir, batchSize, iteratorCallback, callback) {
+2 -5
View File
@@ -262,10 +262,9 @@ class S3MultipartDownloadStream extends Readable {
}
}
function download(apiConfig, backupFilePath, callback) {
async function download(apiConfig, backupFilePath) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
assert.strictEqual(typeof callback, 'function');
const credentials = getS3Config(apiConfig);
@@ -275,9 +274,7 @@ function download(apiConfig, backupFilePath, callback) {
};
const s3 = new aws.S3(credentials);
const multipartDownloadStream = new S3MultipartDownloadStream(s3, params, { blockSize: 64 * 1024 * 1024 });
return callback(null, multipartDownloadStream);
return new S3MultipartDownloadStream(s3, params, { blockSize: 64 * 1024 * 1024 });
}
function listDir(apiConfig, dir, batchSize, iteratorCallback, callback) {