Add backuptask.js to work with new storage interface

This commit is contained in:
Johannes Zellner
2017-04-11 11:00:55 +02:00
parent 798c2ff921
commit 7fdf491815
11 changed files with 2885 additions and 351 deletions
+39 -88
View File
@@ -8,7 +8,8 @@ exports = module.exports = {
getPaged: getPaged,
getByAppIdPaged: getByAppIdPaged,
getRestoreUrl: getRestoreUrl,
getDownloadStream: getDownloadStream,
getRestoreConfig: getRestoreConfig,
ensureBackup: ensureBackup,
@@ -19,8 +20,6 @@ exports = module.exports = {
backupBoxAndApps: backupBoxAndApps,
getLocalDownloadPath: getLocalDownloadPath,
removeBackup: removeBackup
};
@@ -147,30 +146,6 @@ function getRestoreConfig(backupId, callback) {
});
}
function getRestoreUrl(backupId, callback) {
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
api(backupConfig.provider).getRestoreUrl(backupConfig, backupId, function (error, result) {
if (error) return callback(error);
var obj = {
id: backupId,
url: result.url,
backupKey: backupConfig.key,
sha1: result.sha1 || null // not supported by all backends
};
debug('getRestoreUrl: id:%s url:%s backupKey:%s sha1:%s', obj.id, obj.url, obj.backupKey, obj.sha1);
callback(null, obj);
});
});
}
function copyLastBackup(app, manifest, prefix, callback) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof app.lastBackupId, 'string');
@@ -179,27 +154,17 @@ function copyLastBackup(app, manifest, prefix, callback) {
assert.strictEqual(typeof callback, 'function');
var timestamp = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,'');
var toFilenameArchive = util.format('%s/app_%s_%s_v%s.tar.gz', prefix, app.id, timestamp, manifest.version);
var toFilenameConfig = util.format('%s/app_%s_%s_v%s.json', prefix, app.id, timestamp, manifest.version);
var newBackupId = util.format('%s/app_%s_%s_v%s.tar.gz', prefix, app.id, timestamp, manifest.version);
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
debug('copyLastBackup: copying archive %s to %s', app.lastBackupId, toFilenameArchive);
debug('copyLastBackup: copying backup %s to %s', app.lastBackupId, newBackupId);
api(backupConfig.provider).copyObject(backupConfig, app.lastBackupId, toFilenameArchive, function (error) {
api(backupConfig.provider).copyBackup(backupConfig, app.lastBackupId, newBackupId, function (error) {
if (error) return callback(new BackupsError(BackupsError.EXTERNAL_ERROR, error));
// TODO change that logic by adjusting app.lastBackupId to not contain the file type
var configFileId = app.lastBackupId.slice(0, -'.tar.gz'.length) + '.json';
debug('copyLastBackup: copying config %s to %s', configFileId, toFilenameConfig);
api(backupConfig.provider).copyObject(backupConfig, configFileId, toFilenameConfig, function (error) {
if (error) return callback(new BackupsError(BackupsError.EXTERNAL_ERROR, error));
return callback(null, toFilenameArchive);
});
callback(null, newBackupId);
});
});
}
@@ -209,29 +174,23 @@ function backupBoxWithAppBackupIds(appBackupIds, prefix, callback) {
assert.strictEqual(typeof prefix, 'string');
var timestamp = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,'');
var filebase = util.format('%s/box_%s_v%s', prefix, timestamp, config.version());
var filename = filebase + '.tar.gz';
var backupId = util.format('%s/box_%s_v%s', prefix, timestamp, config.version());
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
api(backupConfig.provider).getBoxBackupDetails(backupConfig, filename, function (error, result) {
if (error) return callback(error);
shell.sudo('backupBox', [ BACKUP_BOX_CMD, backupId ], function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
debug('backupBoxWithAppBackupIds: backup details %j', result);
debug('backupBoxWithAppBackupIds: success');
shell.sudo('backupBox', [ BACKUP_BOX_CMD ].concat(result.backupScriptArguments), function (error) {
backupdb.add({ id: backupId, version: config.version(), type: backupdb.BACKUP_TYPE_BOX, dependsOn: appBackupIds }, function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
debug('backupBoxWithAppBackupIds: success');
backupdb.add({ id: filename, version: config.version(), type: backupdb.BACKUP_TYPE_BOX, dependsOn: appBackupIds }, function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
api(backupConfig.provider).backupDone(filename, null /* app */, appBackupIds, function (error) {
if (error) return callback(error);
callback(null, filename);
});
// FIXME this is only needed for caas, is it really???
api(backupConfig.provider).backupDone(backupId, null /* app */, appBackupIds, function (error) {
if (error) return callback(error);
callback(null, backupId);
});
});
});
@@ -254,31 +213,21 @@ function createNewAppBackup(app, manifest, prefix, callback) {
assert.strictEqual(typeof callback, 'function');
var timestamp = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,'');
var filebase = util.format('%s/app_%s_%s_v%s', prefix, app.id, timestamp, manifest.version);
var configFilename = filebase + '.json', dataFilename = filebase + '.tar.gz';
var backupId = util.format('%s/app_%s_%s_v%s', prefix, app.id, timestamp, config.version());
settings.getBackupConfig(function (error, backupConfig) {
// FIXME move addon backup into backuptask
async.series([
addons.backupAddons.bind(null, app, manifest.addons),
shell.sudo.bind(null, 'backupApp', [ BACKUP_APP_CMD, backupId, app.id ])
], function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
api(backupConfig.provider).getAppBackupDetails(backupConfig, app.id, dataFilename, configFilename, function (error, result) {
if (error) return callback(error);
debugApp(app, 'createNewAppBackup: %s done', backupId);
debug('createNewAppBackup: backup details %j', result);
backupdb.add({ id: backupId, version: manifest.version, type: backupdb.BACKUP_TYPE_APP, dependsOn: [ ] }, function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
async.series([
addons.backupAddons.bind(null, app, manifest.addons),
shell.sudo.bind(null, 'backupApp', [ BACKUP_APP_CMD ].concat(result.backupScriptArguments))
], function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
debugApp(app, 'createNewAppBackup: %s done', dataFilename);
backupdb.add({ id: dataFilename, version: manifest.version, type: backupdb.BACKUP_TYPE_APP, dependsOn: [ ] }, function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
callback(null, dataFilename);
});
});
callback(null, backupId);
});
});
}
@@ -438,32 +387,34 @@ function restoreApp(app, addonsToRestore, backupId, callback) {
assert.strictEqual(typeof callback, 'function');
assert(app.lastBackupId);
getRestoreUrl(backupId, function (error, result) {
if (error) return callback(error);
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
debugApp(app, 'restoreApp: restoreUrl:%s', result.url);
var directoryMapping = [{
source: '/',
destination: path.join(paths.APPS_DATA_DIR, app.id)
}];
shell.sudo('restoreApp', [ RESTORE_APP_CMD, app.id, result.url, result.backupKey, result.sessionToken ], function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
addons.restoreAddons(app, addonsToRestore, callback);
});
async.series([
api(backupConfig.provider).restore.bind(null, backupConfig, backupId, directoryMapping),
addons.restoreAddons.bind(null, app, addonsToRestore)
], callback);
});
}
function getLocalDownloadPath(backupId, callback) {
function getDownloadStream(backupId, callback) {
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
api(backupConfig.provider).getLocalFilePath(backupConfig, backupId, function (error, result) {
api(backupConfig.provider).getDownloadStream(backupConfig, backupId, function (error, result) {
if (error) return callback(error);
debug('getLocalDownloadPath: id:%s path:%s', backupId, result.filePath);
debug('getDownloadStream: %s', backupId);
callback(null, result.filePath);
callback(null, result);
});
});
}
+106
View File
@@ -0,0 +1,106 @@
#!/usr/bin/env node
'use strict';
require('supererror')({ splatchError: true });
// remove timestamp from debug() based output
require('debug').formatArgs = function formatArgs(args) {
args[0] = this.namespace + ' ' + args[0];
};
var assert = require('assert'),
database = require('./database.js'),
debug = require('debug')('box:backuptask'),
path = require('path'),
paths = require('./paths.js'),
filesystem = require('./storage/filesystem.js'),
caas = require('./storage/caas.js'),
s3 = require('./storage/s3.js'),
BackupsError = require('./backups.js').BackupsError,
settings = require('./settings.js');
// choose which storage backend we use for test purpose we use s3
function api(provider) {
switch (provider) {
case 'caas': return caas;
case 's3': return s3;
case 'filesystem': return filesystem;
default: return null;
}
}
function initialize(callback) {
assert.strictEqual(typeof callback, 'function');
database.initialize(callback);
}
function backupApp(backupId, appId, callback) {
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof callback, 'function');
debug('Start app backup with id %s for %s', backupId, appId);
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
var backupMapping = [{
source: path.join(paths.APPS_DATA_DIR, appId),
destination: '/'
}];
api(backupConfig.provider).backup(backupConfig, backupId, backupMapping, callback);
});
}
function backupBox(backupId, callback) {
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
debug('Start box backup with id %s', backupId);
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
var backupMapping = [{
source: paths.BOX_DATA_DIR,
destination: '/box/'
}, {
source: path.join(paths.PLATFORM_DATA_DIR, 'mail'),
destination: '/mail/'
}];
api(backupConfig.provider).backup(backupConfig, backupId, backupMapping, callback);
});
}
if (require.main === module) {
var backupId = process.argv[2];
var appId = process.argv[3];
if (appId) debug('Backuptask for the app %s with id %s', appId, backupId);
else debug('Backuptask for the whole Cloudron with id %s', backupId);
process.on('SIGTERM', function () {
process.exit(0);
});
initialize(function (error) {
if (error) throw error;
function resultHandler(error) {
if (error) debug('Backuptask completed with error', error);
debug('Backuptask completed');
// https://nodejs.org/api/process.html are exit codes used by node. apps.js uses the value below
// to check apptask crashes
process.exit(error ? 50 : 0);
}
if (appId) backupApp(backupId, appId, resultHandler);
else backupBox(backupId, resultHandler);
});
}
+3 -13
View File
@@ -3,7 +3,6 @@
exports = module.exports = {
get: get,
create: create,
createDownloadUrl: createDownloadUrl,
download: download
};
@@ -44,22 +43,13 @@ function create(req, res, next) {
});
}
function createDownloadUrl(req, res, next) {
assert.strictEqual(typeof req.params.backupId, 'string');
backups.getRestoreUrl(req.params.backupId, function (error, result) {
if (error) return next(new HttpError(500, error));
next(new HttpSuccess(200, result));
});
}
function download(req, res, next) {
assert.strictEqual(typeof req.params.backupId, 'string');
backups.getLocalDownloadPath(req.params.backupId, function (error, result) {
backups.getDownloadStream(req.params.backupId, function (error, result) {
if (error && error.reason === BackupsError.NOT_FOUND) return next(new HttpError(404, error.message));
if (error) return next(new HttpError(500, error));
res.sendFile(result);
result.pipe(res);
});
}
+8 -82
View File
@@ -2,6 +2,8 @@
set -eu -o pipefail
readonly script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ $EUID -ne 0 ]]; then
echo "This script should be run as root." >&2
exit 1
@@ -15,92 +17,16 @@ fi
readonly APPS_DATA_DIR="${HOME}/appsdata"
# verify argument count
if [[ "$1" == "s3" && $# -lt 9 ]]; then
echo "Usage: backupapp.sh s3 <appId> <s3 config url> <s3 data url> <access key id> <access key> <region> <endpoint> <password> [session token]"
if [[ $# -lt 2 ]]; then
echo "Usage: backupbox.sh <backupId> <appId>"
exit 1
fi
if [[ "$1" == "filesystem" && $# -lt 6 ]]; then
echo "Usage: backupapp.sh filesystem <appId> <backupFolder> <configFileName> <dataFileName> <password>"
exit 1
fi
# extract arguments
readonly backup_id="$1"
readonly app_id="$2"
if [[ "$1" == "s3" ]]; then
# env vars used by the awscli
readonly s3_config_url="$3"
readonly s3_data_url="$4"
export AWS_ACCESS_KEY_ID="$5"
export AWS_SECRET_ACCESS_KEY="$6"
export AWS_DEFAULT_REGION="$7"
readonly endpoint_url="$8"
readonly password="$9"
echo "Running app backup task"
DEBUG="box*" ${script_dir}/../backuptask.js "${backup_id}" "${app_id}"
if [ $# -gt 9 ]; then
export AWS_SESSION_TOKEN="${10}"
fi
elif [[ "$1" == "filesystem" ]]; then
readonly backup_folder="$3"
readonly backup_config_fileName="$4"
readonly backup_data_fileName="$5"
readonly password="$6"
fi
# perform backup
readonly app_data_dir="${APPS_DATA_DIR}/${app_id}"
readonly tar_bin="/home/yellowtent/box/helper/tarjs"
# will be checked at the end
try=0
if [[ "$1" == "s3" ]]; then
# may be empty
optional_args=""
if [ -n "${endpoint_url}" ]; then
optional_args="--endpoint-url ${endpoint_url}"
fi
# Upload config.json first because uploading tarball might take a lot of time, leading to token expiry
for try in `seq 1 5`; do
echo "Uploading config.json to ${s3_config_url} (try ${try})"
error_log=$(mktemp)
# use aws instead of curl because curl will always read entire stream memory to set Content-Length
# aws will do multipart upload
if cat "${app_data_dir}/config.json" \
| aws ${optional_args} s3 cp - "${s3_config_url}" 2>"${error_log}"; then
break
fi
cat "${error_log}" && rm "${error_log}"
done
for try in `seq 1 5`; do
echo "Uploading backup to ${s3_data_url} (try ${try})"
error_log=$(mktemp)
if ${tar_bin} "${app_data_dir}" . \
| openssl aes-256-cbc -e -pass "pass:${password}" \
| aws ${optional_args} s3 cp - "${s3_data_url}" 2>"${error_log}"; then
break
fi
cat "${error_log}" && rm "${error_log}"
done
elif [[ "$1" == "filesystem" ]]; then
mkdir -p $(dirname "${backup_folder}/${backup_config_fileName}")
echo "Storing backup config to ${backup_folder}/${backup_config_fileName}"
cat "${app_data_dir}/config.json" > "${backup_folder}/${backup_config_fileName}"
echo "Storing backup data to ${backup_folder}/${backup_data_fileName}"
${tar_bin} "${app_data_dir}" . | openssl aes-256-cbc -e -pass "pass:${password}" > "${backup_folder}/${backup_data_fileName}"
fi
if [[ ${try} -eq 5 ]]; then
echo "Backup failed uploading backup tarball"
exit 3
else
echo "Backup successful"
fi
echo "App backup successful"
+10 -65
View File
@@ -2,6 +2,10 @@
set -eu -o pipefail
readonly script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BOX_DATA_DIR="${HOME}/boxdata"
if [[ $EUID -ne 0 ]]; then
echo "This script should be run as root." >&2
exit 1
@@ -12,78 +16,19 @@ if [[ $# == 1 && "$1" == "--check" ]]; then
exit 0
fi
# verify argument count
if [[ "$1" == "s3" && $# -lt 7 ]]; then
echo "Usage: backupbox.sh s3 <s3 url> <access key id> <access key> <region> <endpoint> <password> [session token]"
exit 1
fi
if [[ "$1" == "filesystem" && $# -lt 4 ]]; then
echo "Usage: backupbox.sh filesystem <backupFolder> <fileName> <password>"
if [[ $# -lt 1 ]]; then
echo "Usage: backupbox.sh <backupId>"
exit 1
fi
# extract arguments
if [[ "$1" == "s3" ]]; then
# env vars used by the awscli
readonly s3_url="$2"
export AWS_ACCESS_KEY_ID="$3"
export AWS_SECRET_ACCESS_KEY="$4"
export AWS_DEFAULT_REGION="$5"
readonly endpoint_url="$6"
readonly password="$7"
if [ $# -gt 7 ]; then
export AWS_SESSION_TOKEN="$8"
fi
elif [[ "$1" == "filesystem" ]]; then
readonly backup_folder="$2"
readonly backup_fileName="$3"
readonly password="$4"
fi
# perform backup
BOX_DATA_DIR="${HOME}/boxdata"
readonly backup_id="$1"
echo "Creating MySQL dump"
mysqldump -u root -ppassword --single-transaction --routines --triggers box > "${BOX_DATA_DIR}/box.mysqldump"
# will be checked at the end
try=0
echo "Running backup task"
DEBUG="box*" ${script_dir}/../backuptask.js "${backup_id}"
if [[ "$1" == "s3" ]]; then
for try in `seq 1 5`; do
echo "Uploading backup to ${s3_url} (try ${try})"
error_log=$(mktemp)
# may be empty
optional_args=""
if [ -n "${endpoint_url}" ]; then
optional_args="--endpoint-url ${endpoint_url}"
fi
# use aws instead of curl because curl will always read entire stream memory to set Content-Length
# aws will do multipart upload
if tar -czf - -C "${HOME}" --transform="s,^boxdata/\?,box/," --transform="s,^platformdata/mail/\?,mail/," --show-transformed-names boxdata platformdata/mail \
| openssl aes-256-cbc -e -pass "pass:${password}" \
| aws ${optional_args} s3 cp - "${s3_url}" 2>"${error_log}"; then
break
fi
cat "${error_log}" && rm "${error_log}"
done
elif [[ "$1" == "filesystem" ]]; then
echo "Storing backup to ${backup_folder}/${backup_fileName}"
mkdir -p $(dirname "${backup_folder}/${backup_fileName}")
tar -czf - -C "${HOME}" --transform="s,^boxdata/\?,box/," --transform="s,^platformdata/mail/\?,mail/," --show-transformed-names boxdata platformdata/mail \
| openssl aes-256-cbc -e -pass "pass:${password}" > "${backup_folder}/${backup_fileName}"
fi
if [[ ${try} -eq 5 ]]; then
echo "Backup failed"
exit 3
else
echo "Backup successful"
fi
echo "Backup successful"
+1
View File
@@ -24,6 +24,7 @@ if [[ "${BOX_ENV}" == "cloudron" ]]; then
# only the top level ownership is changed because containers own the subdirectores
# and will chown them as necessary
chown yellowtent:yellowtent "${app_data_dir}"
chown yellowtent:yellowtent "${app_data_dir}/data"
else
readonly app_data_dir="${HOME}/.cloudron_test/appsdata/$1"
mkdir -p "${app_data_dir}/data"
-1
View File
@@ -219,7 +219,6 @@ function initializeExpressSync() {
// backup routes
router.get ('/api/v1/backups', settingsScope, routes.user.requireAdmin, routes.backups.get);
router.post('/api/v1/backups', settingsScope, routes.user.requireAdmin, routes.backups.create);
router.post('/api/v1/backups/:backupId/download_url', appsScope, routes.user.requireAdmin, routes.backups.createDownloadUrl);
router.get ('/api/v1/backups/:backupId/download', appsScope, routes.user.requireAdmin, routes.backups.download);
// disable server socket "idle" timeout. we use the timeout middleware to handle timeouts on a route level
+125 -74
View File
@@ -1,14 +1,13 @@
'use strict';
exports = module.exports = {
getBoxBackupDetails: getBoxBackupDetails,
getAppBackupDetails: getAppBackupDetails,
backup: backup,
restore: restore,
getRestoreUrl: getRestoreUrl,
getAppRestoreConfig: getAppRestoreConfig,
getLocalFilePath: getLocalFilePath,
getDownloadStream: getDownloadStream,
copyObject: copyObject,
copyBackup: copyBackup,
removeBackup: removeBackup,
backupDone: backupDone,
@@ -19,111 +18,167 @@ exports = module.exports = {
var assert = require('assert'),
async = require('async'),
BackupsError = require('../backups.js').BackupsError,
checksum = require('checksum'),
debug = require('debug')('box:storage/filesystem'),
fs = require('fs'),
path = require('path'),
mkdirp = require('mkdirp'),
once = require('once'),
safe = require('safetydance'),
SettingsError = require('../settings.js').SettingsError,
shell = require('../shell.js'),
util = require('util');
tar = require('tar-fs'),
archiver = require('archiver');
var FALLBACK_BACKUP_FOLDER = '/var/backups';
var RMBACKUP_CMD = path.join(__dirname, '../scripts/rmbackup.sh');
function getBoxBackupDetails(apiConfig, id, callback) {
function backup(apiConfig, backupId, sourceDirectories, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof id, 'string');
assert.strictEqual(typeof backupId, 'string');
assert(Array.isArray(sourceDirectories));
assert.strictEqual(typeof callback, 'function');
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
callback = once(callback);
var details = {
backupScriptArguments: [ 'filesystem', backupFolder, id, apiConfig.key ]
};
var backupFilePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, backupId + '.tar.gz');
callback(null, details);
}
debug('[%s] backup: %j -> %s', backupId, sourceDirectories, backupFilePath);
function getAppBackupDetails(apiConfig, appId, dataId, configId, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof dataId, 'string');
assert.strictEqual(typeof configId, 'string');
assert.strictEqual(typeof callback, 'function');
mkdirp(path.dirname(backupFilePath), function (error) {
if (error) return callback(error);
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
var fileStream = fs.createWriteStream(backupFilePath);
var archive = archiver('tar', { gzip: true });
var details = {
backupScriptArguments: [ 'filesystem', appId, backupFolder, configId, dataId, apiConfig.key ]
};
fileStream.on('error', function (error) {
console.error('[%s] backup: out stream error.', error);
});
callback(null, details);
}
archive.on('error', function (error) {
console.error('[%s] backup: archive stream error.', error);
});
function getRestoreUrl(apiConfig, filename, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof filename, 'string');
assert.strictEqual(typeof callback, 'function');
fileStream.on('close', function () {
debug('[%s] backup: done.', backupId);
callback();
});
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
var restoreUrl = 'file://' + path.join(backupFolder, filename);
checksum.file(path.join(backupFolder, filename), function (error, result) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, util.format('Failed to calculate checksum:', error)));
callback(null, { url: restoreUrl, sha1: result });
archive.pipe(fileStream);
sourceDirectories.forEach(function (directoryMap) {
archive.directory(directoryMap.source, directoryMap.destination);
});
archive.finalize();
});
}
function restore(apiConfig, backupId, destinationDirectories, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupId, 'string');
assert(Array.isArray(destinationDirectories));
assert.strictEqual(typeof callback, 'function');
var sourceFilePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, backupId + '.tar.gz');
debug('[%s] restore: %s -> %j', backupId, sourceFilePath, destinationDirectories);
if (!fs.existsSync(sourceFilePath)) return callback(new BackupsError(BackupsError.NOT_FOUND, 'backup file does not exist'));
async.eachSeries(destinationDirectories, function (directory, callback) {
debug('[%s] restore: directory %s -> %s', backupId, directory.source, directory.destination);
mkdirp(directory, function (error) {
if (error) return callback(error);
var fileStream = fs.createReadStream(sourceFilePath);
var extract = tar.extract(directory);
fileStream.on('error', function (error) {
console.error('[%s] restore: file stream error.', error);
callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
});
extract.on('error', function (error) {
console.error('[%s] restore: extract stream error.', error);
callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
});
extract.on('end', function () {
debug('[%s] restore: directory %s done.', backupId, directory.source);
callback();
});
fileStream.pipe(extract);
});
}, function (error) {
if (error) return callback(error);
debug('[%s] restore: done', backupId);
callback();
});
}
function getDownloadStream(apiConfig, backupId, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
var backupFilePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, backupId);
debug('[%s] getDownloadStream: %s %s', backupId, backupId, backupFilePath);
if (!fs.existsSync(backupFilePath)) return callback(new BackupsError(BackupsError.NOT_FOUND, 'backup file does not exist'));
var stream = fs.createReadStream(backupFilePath);
callback(null, stream);
}
function getAppRestoreConfig(apiConfig, backupId, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
var configFilename = backupId.replace(/\.tar\.gz$/, '.json');
callback = once(callback);
var restoreConfig = safe.require(path.join(backupFolder, configFilename));
if (!restoreConfig) return callback(new BackupsError(BackupsError.NOT_FOUND, 'No app backup config found for ' + configFilename));
var sourceFilePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, backupId + '.json');
debug('[%s] getAppRestoreConfig: %s', backupId, sourceFilePath);
if (!fs.existsSync(sourceFilePath)) return callback(new BackupsError(BackupsError.NOT_FOUND, 'restore config file does not exist'));
var restoreConfig = safe.require(sourceFilePath);
if (!restoreConfig) {
console.error('[%s] getAppRestoreConfig: failed', safe.error);
return callback(new BackupsError(BackupsError.INTERNAL_ERROR, safe.error));
}
callback(null, restoreConfig);
}
function getLocalFilePath(apiConfig, filename, callback) {
function copyBackup(apiConfig, oldBackupId, newBackupId, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof filename, 'string');
assert.strictEqual(typeof oldBackupId, 'string');
assert.strictEqual(typeof newBackupId, 'string');
assert.strictEqual(typeof callback, 'function');
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
callback = once(callback);
callback(null, { filePath: path.join(backupFolder, filename) });
}
var oldBackupFilePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, oldBackupId + '.tar.gz');
var newBackupFilePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, newBackupId + '.tar.gz');
function copyObject(apiConfig, from, to, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof from, 'string');
assert.strictEqual(typeof to, 'string');
assert.strictEqual(typeof callback, 'function');
// FIXME this most likely has a permissions issue as this process runs as yellowtent not root
mkdirp(path.dirname(newBackupFilePath), function (error) {
if (error) return callback(new BackupsError(BackupsError.INTERNAL_ERROR, error));
var calledBack = false;
function done (error) {
if (!calledBack) callback(error);
calledBack = true;
}
var readStream = fs.createReadStream(oldBackupFilePath);
var writeStream = fs.createWriteStream(newBackupFilePath);
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
var readStream = fs.createReadStream(path.join(backupFolder, from));
var writeStream = fs.createWriteStream(path.join(backupFolder, to));
readStream.on('error', callback);
writeStream.on('error', callback);
writeStream.on('close', callback);
readStream.on('error', done);
writeStream.on('error', done);
writeStream.on('close', function () {
// avoid passing arguments
done(null);
readStream.pipe(writeStream);
});
readStream.pipe(writeStream);
}
function removeBackup(apiConfig, backupId, appBackupIds, callback) {
@@ -132,11 +187,8 @@ function removeBackup(apiConfig, backupId, appBackupIds, callback) {
assert(Array.isArray(appBackupIds));
assert.strictEqual(typeof callback, 'function');
var backupFolder = apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER;
var appBackupJSONFiles = appBackupIds.map(function (id) { return id.replace(/\.tar\.gz$/, '.json'); });
async.each([backupId].concat(appBackupIds).concat(appBackupJSONFiles), function (id, callback) {
var filePath = path.join(backupFolder, id);
async.each([backupId].concat(appBackupIds), function (id, callback) {
var filePath = path.join(apiConfig.backupFolder || FALLBACK_BACKUP_FOLDER, id + '.tar.gz');
shell.sudo('deleteBackup', [ RMBACKUP_CMD, filePath ], function (error) {
if (error) console.error('Unable to remove %s. Not fatal.', filePath, safe.error);
@@ -162,4 +214,3 @@ function backupDone(filename, app, appBackupIds, callback) {
callback();
}
+21 -12
View File
@@ -7,8 +7,9 @@
// -------------------------------------------
exports = module.exports = {
getBoxBackupDetails: getBoxBackupDetails,
getAppBackupDetails: getAppBackupDetails,
backupBegin: backupBegin,
backupDirectory: backupDirectory,
backupFinalize: backupFinalize,
getRestoreUrl: getRestoreUrl,
getAppRestoreConfig: getAppRestoreConfig,
@@ -24,26 +25,34 @@ exports = module.exports = {
var assert = require('assert');
function getBoxBackupDetails(apiConfig, id, callback) {
function backupBegin(apiConfig, backupId, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof id, 'string');
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
// Result: { backupScriptArguments: [] }
// The resulting array consists of string passed down 1to1 to the backupbox.sh
// Result: none
callback(new Error('not implemented'));
}
function getAppBackupDetails(apiConfig, appId, dataId, configId, callback) {
function backupDirectory(apiConfig, backupId, source, destination, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof dataId, 'string');
assert.strictEqual(typeof configId, 'string');
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof source, 'string');
assert.strictEqual(typeof destination, 'string');
assert.strictEqual(typeof callback, 'function');
// Result: { backupScriptArguments: [] }
// The resulting array consists of string passed down 1to1 to the backupapp.sh
// Result: none
callback(new Error('not implemented'));
}
function backupFinalize(apiConfig, backupId, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof callback, 'function');
// Result: none
callback(new Error('not implemented'));
}