backups: add remotePath
the main motivation is that id can be used in REST API routes. previously, the id was a path and this had a "/" in it. This made /api/v1/backups/:backupId not work.
This commit is contained in:
+41
-40
@@ -81,27 +81,27 @@ function applyBackupRetentionPolicy(allBackups, policy, referencedBackupIds) {
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanupBackup(backupConfig, backup, progressCallback) {
|
||||
async function removeBackup(backupConfig, backup, progressCallback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof backup, 'object');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
const backupFilePath = storage.getBackupFilePath(backupConfig, backup.id, backup.format);
|
||||
const backupFilePath = storage.getBackupFilePath(backupConfig, backup.remotePath, backup.format);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
function done(error) {
|
||||
if (error) {
|
||||
debug('cleanupBackup: error removing backup %j : %s', backup, error.message);
|
||||
debug('removeBackup: error removing backup %j : %s', backup, error.message);
|
||||
return resolve();
|
||||
}
|
||||
|
||||
// prune empty directory if possible
|
||||
storage.api(backupConfig.provider).remove(backupConfig, path.dirname(backupFilePath), async function (error) {
|
||||
if (error) debug('cleanupBackup: unable to prune backup directory %s : %s', path.dirname(backupFilePath), error.message);
|
||||
if (error) debug('removeBackup: unable to prune backup directory %s : %s', path.dirname(backupFilePath), error.message);
|
||||
|
||||
const [delError] = await safe(backups.del(backup.id));
|
||||
if (delError) debug('cleanupBackup: error removing from database', delError);
|
||||
else debug('cleanupBackup: removed %s', backup.id);
|
||||
if (delError) debug(`removeBackup: error removing ${backup.id} from database`, delError);
|
||||
else debug(`removeBackup: removed ${backup.remotePath}`);
|
||||
|
||||
resolve();
|
||||
});
|
||||
@@ -112,18 +112,18 @@ async function cleanupBackup(backupConfig, backup, progressCallback) {
|
||||
storage.api(backupConfig.provider).remove(backupConfig, backupFilePath, done);
|
||||
} else {
|
||||
const events = storage.api(backupConfig.provider).removeDir(backupConfig, backupFilePath);
|
||||
events.on('progress', (message) => progressCallback({ message: `${backup.id}: ${message}` }));
|
||||
events.on('progress', (message) => progressCallback({ message: `${backup.remotePath}: ${message}` }));
|
||||
events.on('done', done);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function cleanupAppBackups(backupConfig, referencedAppBackupIds, progressCallback) {
|
||||
async function cleanupAppBackups(backupConfig, referencedBackupIds, progressCallback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert(Array.isArray(referencedAppBackupIds));
|
||||
assert(Array.isArray(referencedBackupIds));
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
let removedAppBackupIds = [];
|
||||
const removedAppBackupPaths = [];
|
||||
|
||||
const allApps = await apps.list();
|
||||
const allAppIds = allApps.map(a => a.id);
|
||||
@@ -140,49 +140,49 @@ async function cleanupAppBackups(backupConfig, referencedAppBackupIds, progressC
|
||||
// apply backup policy per app. keep latest backup only for existing apps
|
||||
let appBackupsToRemove = [];
|
||||
for (const appId of Object.keys(appBackupsById)) {
|
||||
applyBackupRetentionPolicy(appBackupsById[appId], _.extend({ keepLatest: allAppIds.includes(appId) }, backupConfig.retentionPolicy), referencedAppBackupIds);
|
||||
applyBackupRetentionPolicy(appBackupsById[appId], _.extend({ keepLatest: allAppIds.includes(appId) }, backupConfig.retentionPolicy), referencedBackupIds);
|
||||
appBackupsToRemove = appBackupsToRemove.concat(appBackupsById[appId].filter(b => !b.keepReason));
|
||||
}
|
||||
|
||||
for (const appBackup of appBackupsToRemove) {
|
||||
await progressCallback({ message: `Removing app backup (${appBackup.identifier}): ${appBackup.id}`});
|
||||
removedAppBackupIds.push(appBackup.id);
|
||||
await cleanupBackup(backupConfig, appBackup, progressCallback); // never errors
|
||||
removedAppBackupPaths.push(appBackup.remotePath);
|
||||
await removeBackup(backupConfig, appBackup, progressCallback); // never errors
|
||||
}
|
||||
|
||||
debug('cleanupAppBackups: done');
|
||||
|
||||
return removedAppBackupIds;
|
||||
return removedAppBackupPaths;
|
||||
}
|
||||
|
||||
async function cleanupMailBackups(backupConfig, referencedAppBackupIds, progressCallback) {
|
||||
async function cleanupMailBackups(backupConfig, referencedBackupIds, progressCallback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert(Array.isArray(referencedAppBackupIds));
|
||||
assert(Array.isArray(referencedBackupIds));
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
let removedMailBackupIds = [];
|
||||
const removedMailBackupPaths = [];
|
||||
|
||||
const mailBackups = await backups.getByTypePaged(backups.BACKUP_TYPE_MAIL, 1, 1000);
|
||||
|
||||
applyBackupRetentionPolicy(mailBackups, _.extend({ keepLatest: true }, backupConfig.retentionPolicy), referencedAppBackupIds);
|
||||
applyBackupRetentionPolicy(mailBackups, _.extend({ keepLatest: true }, backupConfig.retentionPolicy), referencedBackupIds);
|
||||
|
||||
for (const mailBackup of mailBackups) {
|
||||
if (mailBackup.keepReason) continue;
|
||||
await progressCallback({ message: `Removing mail backup ${mailBackup.id}`});
|
||||
removedMailBackupIds.push(mailBackup.id);
|
||||
await cleanupBackup(backupConfig, mailBackup, progressCallback); // never errors
|
||||
await progressCallback({ message: `Removing mail backup ${mailBackup.remotePath}`});
|
||||
removedMailBackupPaths.push(mailBackup.remotePath);
|
||||
await removeBackup(backupConfig, mailBackup, progressCallback); // never errors
|
||||
}
|
||||
|
||||
debug('cleanupMailBackups: done');
|
||||
|
||||
return removedMailBackupIds;
|
||||
return removedMailBackupPaths;
|
||||
}
|
||||
|
||||
async function cleanupBoxBackups(backupConfig, progressCallback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
let referencedAppBackupIds = [], removedBoxBackupIds = [];
|
||||
let referencedBackupIds = [], removedBoxBackupPaths = [];
|
||||
|
||||
const boxBackups = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 1000);
|
||||
|
||||
@@ -190,48 +190,49 @@ async function cleanupBoxBackups(backupConfig, progressCallback) {
|
||||
|
||||
for (const boxBackup of boxBackups) {
|
||||
if (boxBackup.keepReason) {
|
||||
referencedAppBackupIds = referencedAppBackupIds.concat(boxBackup.dependsOn);
|
||||
referencedBackupIds = referencedBackupIds.concat(boxBackup.dependsOn);
|
||||
continue;
|
||||
}
|
||||
|
||||
await progressCallback({ message: `Removing box backup ${boxBackup.id}`});
|
||||
await progressCallback({ message: `Removing box backup ${boxBackup.remotePath}`});
|
||||
|
||||
removedBoxBackupIds.push(boxBackup.id);
|
||||
await cleanupBackup(backupConfig, boxBackup, progressCallback);
|
||||
removedBoxBackupPaths.push(boxBackup.remotePath);
|
||||
await removeBackup(backupConfig, boxBackup, progressCallback);
|
||||
}
|
||||
|
||||
debug('cleanupBoxBackups: done');
|
||||
|
||||
return { removedBoxBackupIds, referencedAppBackupIds };
|
||||
return { removedBoxBackupPaths, referencedBackupIds };
|
||||
}
|
||||
|
||||
// cleans up the database by checking if backup existsing in the remote
|
||||
async function cleanupMissingBackups(backupConfig, progressCallback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
const perPage = 1000;
|
||||
let missingBackupIds = [];
|
||||
let missingBackupPaths = [];
|
||||
const backupExists = util.promisify(storage.api(backupConfig.provider).exists);
|
||||
|
||||
if (constants.TEST) return missingBackupIds;
|
||||
if (constants.TEST) return missingBackupPaths;
|
||||
|
||||
let page = 1, result = [];
|
||||
do {
|
||||
result = await backups.list(page, perPage);
|
||||
|
||||
for (const backup of result) {
|
||||
let backupFilePath = storage.getBackupFilePath(backupConfig, backup.id, backup.format);
|
||||
let backupFilePath = storage.getBackupFilePath(backupConfig, backup.remotePath, backup.format);
|
||||
if (backup.format === 'rsync') backupFilePath = backupFilePath + '/'; // add trailing slash to indicate directory
|
||||
|
||||
const [existsError, exists] = await safe(backupExists(backupConfig, backupFilePath));
|
||||
if (existsError || exists) continue;
|
||||
|
||||
await progressCallback({ message: `Removing missing backup ${backup.id}`});
|
||||
await progressCallback({ message: `Removing missing backup ${backup.remotePath}`});
|
||||
|
||||
const [delError] = await safe(backups.del(backup.id));
|
||||
if (delError) debug(`cleanupBackup: error removing ${backup.id} from database`, delError);
|
||||
if (delError) debug(`cleanupMissingBackups: error removing ${backup.id} from database`, delError);
|
||||
|
||||
missingBackupIds.push(backup.id);
|
||||
missingBackupPaths.push(backup.remotePath);
|
||||
}
|
||||
|
||||
++ page;
|
||||
@@ -239,7 +240,7 @@ async function cleanupMissingBackups(backupConfig, progressCallback) {
|
||||
|
||||
debug('cleanupMissingBackups: done');
|
||||
|
||||
return missingBackupIds;
|
||||
return missingBackupPaths;
|
||||
}
|
||||
|
||||
// removes the snapshots of apps that have been uninstalled
|
||||
@@ -298,19 +299,19 @@ async function run(progressCallback) {
|
||||
}
|
||||
|
||||
await progressCallback({ percent: 10, message: 'Cleaning box backups' });
|
||||
const { removedBoxBackupIds, referencedAppBackupIds } = await cleanupBoxBackups(backupConfig, progressCallback);
|
||||
const { removedBoxBackupPaths, referencedBackupIds } = await cleanupBoxBackups(backupConfig, progressCallback); // references is app or mail backup ids
|
||||
|
||||
await progressCallback({ percent: 20, message: 'Cleaning mail backups' });
|
||||
const removedMailBackupIds = await cleanupMailBackups(backupConfig, referencedAppBackupIds, progressCallback);
|
||||
const removedMailBackupPaths = await cleanupMailBackups(backupConfig, referencedBackupIds, progressCallback);
|
||||
|
||||
await progressCallback({ percent: 40, message: 'Cleaning app backups' });
|
||||
const removedAppBackupIds = await cleanupAppBackups(backupConfig, referencedAppBackupIds, progressCallback);
|
||||
const removedAppBackupPaths = await cleanupAppBackups(backupConfig, referencedBackupIds, progressCallback);
|
||||
|
||||
await progressCallback({ percent: 70, message: 'Cleaning missing backups' });
|
||||
const missingBackupIds = await cleanupMissingBackups(backupConfig, progressCallback);
|
||||
const missingBackupPaths = await cleanupMissingBackups(backupConfig, progressCallback);
|
||||
|
||||
await progressCallback({ percent: 90, message: 'Cleaning snapshots' });
|
||||
await cleanupSnapshots(backupConfig);
|
||||
|
||||
return { removedBoxBackupIds, removedMailBackupIds, removedAppBackupIds, missingBackupIds };
|
||||
return { removedBoxBackupPaths, removedMailBackupPaths, removedAppBackupPaths, missingBackupPaths };
|
||||
}
|
||||
|
||||
+17
-12
@@ -59,11 +59,12 @@ const assert = require('assert'),
|
||||
settings = require('./settings.js'),
|
||||
storage = require('./storage.js'),
|
||||
tasks = require('./tasks.js'),
|
||||
util = require('util');
|
||||
util = require('util'),
|
||||
uuid = require('uuid');
|
||||
|
||||
const COLLECTD_CONFIG_EJS = fs.readFileSync(__dirname + '/collectd/cloudron-backup.ejs', { encoding: 'utf8' });
|
||||
|
||||
const BACKUPS_FIELDS = [ 'id', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOn', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ];
|
||||
const BACKUPS_FIELDS = [ 'id', 'remotePath', 'label', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOnJson', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ];
|
||||
|
||||
// helper until all storage providers have been ported
|
||||
function maybePromisify(func) {
|
||||
@@ -74,7 +75,8 @@ function maybePromisify(func) {
|
||||
function postProcess(result) {
|
||||
assert.strictEqual(typeof result, 'object');
|
||||
|
||||
result.dependsOn = result.dependsOn ? result.dependsOn.split(',') : [ ];
|
||||
result.dependsOn = result.dependsOnJson ? safe.JSON.parse(result.dependsOnJson) : [];
|
||||
delete result.dependsOnJson;
|
||||
|
||||
result.manifest = result.manifestJson ? safe.JSON.parse(result.manifestJson) : null;
|
||||
delete result.manifestJson;
|
||||
@@ -116,9 +118,9 @@ function generateEncryptionKeysSync(password) {
|
||||
};
|
||||
}
|
||||
|
||||
async function add(id, data) {
|
||||
async function add(data) {
|
||||
assert(data && typeof data === 'object');
|
||||
assert.strictEqual(typeof id, 'string');
|
||||
assert.strictEqual(typeof data.remotePath, 'string');
|
||||
assert(data.encryptionVersion === null || typeof data.encryptionVersion === 'number');
|
||||
assert.strictEqual(typeof data.packageVersion, 'string');
|
||||
assert.strictEqual(typeof data.type, 'string');
|
||||
@@ -130,12 +132,15 @@ async function add(id, data) {
|
||||
|
||||
const creationTime = data.creationTime || new Date(); // allow tests to set the time
|
||||
const manifestJson = JSON.stringify(data.manifest);
|
||||
const id = 'bid-' + uuid.v4();
|
||||
|
||||
const [error] = await safe(database.query('INSERT INTO backups (id, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOn, manifestJson, format) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
[ id, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, data.dependsOn.join(','), manifestJson, data.format ]));
|
||||
const [error] = await safe(database.query('INSERT INTO backups (id, remotePath, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOnJson, manifestJson, format) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
[ id, data.remotePath, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, JSON.stringify(data.dependsOn), manifestJson, data.format ]));
|
||||
|
||||
if (error && error.code === 'ER_DUP_ENTRY') throw new BoxError(BoxError.ALREADY_EXISTS, 'Backup already exists');
|
||||
if (error) throw error;
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
async function getByIdentifierAndStatePaged(identifier, state, page, perPage) {
|
||||
@@ -285,14 +290,14 @@ async function startCleanupTask(auditSource) {
|
||||
|
||||
const taskId = await tasks.add(tasks.TASK_CLEAN_BACKUPS, []);
|
||||
|
||||
tasks.startTask(taskId, {}, async (error, result) => { // result is { removedBoxBackupIds, removedAppBackupIds, removedMailBackupIds, missingBackupIds }
|
||||
tasks.startTask(taskId, {}, async (error, result) => { // result is { removedBoxBackupPaths, removedAppBackupPaths, removedMailBackupPaths, missingBackupPaths }
|
||||
await safe(eventlog.add(eventlog.ACTION_BACKUP_CLEANUP_FINISH, auditSource, {
|
||||
taskId,
|
||||
errorMessage: error ? error.message : null,
|
||||
removedBoxBackupIds: result ? result.removedBoxBackupIds : [],
|
||||
removedMailBackupIds: result ? result.removedMailBackupIds : [],
|
||||
removedAppBackupIds: result ? result.removedAppBackupIds : [],
|
||||
missingBackupIds: result ? result.missingBackupIds : []
|
||||
removedBoxBackupPaths: result ? result.removedBoxBackupPaths : [],
|
||||
removedMailBackupPaths: result ? result.removedMailBackupPaths : [],
|
||||
removedAppBackupPaths: result ? result.removedAppBackupPaths : [],
|
||||
missingBackupPaths: result ? result.missingBackupPaths : []
|
||||
}), { debug });
|
||||
});
|
||||
|
||||
|
||||
+64
-60
@@ -315,9 +315,9 @@ function tarPack(dataLayout, encryption, callback) {
|
||||
return callback(null, ps);
|
||||
}
|
||||
|
||||
function sync(backupConfig, backupId, dataLayout, progressCallback, callback) {
|
||||
function sync(backupConfig, remotePath, dataLayout, progressCallback, callback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof backupId, 'string');
|
||||
assert.strictEqual(typeof remotePath, 'string');
|
||||
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
assert.strictEqual(typeof callback, 'function');
|
||||
@@ -329,7 +329,7 @@ function sync(backupConfig, backupId, dataLayout, progressCallback, callback) {
|
||||
debug('sync: processing task: %j', task);
|
||||
// the empty task.path is special to signify the directory
|
||||
const destPath = task.path && backupConfig.encryption ? encryptFilePath(task.path, backupConfig.encryption) : task.path;
|
||||
const backupFilePath = path.join(storage.getBackupFilePath(backupConfig, backupId, backupConfig.format), destPath);
|
||||
const backupFilePath = path.join(storage.getBackupFilePath(backupConfig, remotePath, backupConfig.format), destPath);
|
||||
|
||||
if (task.operation === 'removedir') {
|
||||
debug(`Removing directory ${backupFilePath}`);
|
||||
@@ -341,7 +341,7 @@ function sync(backupConfig, backupId, dataLayout, progressCallback, callback) {
|
||||
return storage.api(backupConfig.provider).remove(backupConfig, backupFilePath, iteratorCallback);
|
||||
}
|
||||
|
||||
var retryCount = 0;
|
||||
let retryCount = 0;
|
||||
async.retry({ times: 5, interval: 20000 }, function (retryCallback) {
|
||||
retryCallback = once(retryCallback); // protect again upload() erroring much later after read stream error
|
||||
|
||||
@@ -379,7 +379,7 @@ async function saveFsMetadata(dataLayout, metadataFile) {
|
||||
assert.strictEqual(typeof metadataFile, 'string');
|
||||
|
||||
// contains paths prefixed with './'
|
||||
let metadata = {
|
||||
const metadata = {
|
||||
emptyDirs: [],
|
||||
execFiles: [],
|
||||
symlinks: []
|
||||
@@ -407,14 +407,14 @@ async function saveFsMetadata(dataLayout, metadataFile) {
|
||||
}
|
||||
|
||||
// this function is called via backupupload (since it needs root to traverse app's directory)
|
||||
function upload(backupId, format, dataLayoutString, progressCallback, callback) {
|
||||
assert.strictEqual(typeof backupId, 'string');
|
||||
function upload(remotePath, format, dataLayoutString, progressCallback, callback) {
|
||||
assert.strictEqual(typeof remotePath, 'string');
|
||||
assert.strictEqual(typeof format, 'string');
|
||||
assert.strictEqual(typeof dataLayoutString, 'string');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
assert.strictEqual(typeof callback, 'function');
|
||||
|
||||
debug(`upload: id ${backupId} format ${format} dataLayout ${dataLayoutString}`);
|
||||
debug(`upload: path ${remotePath} format ${format} dataLayout ${dataLayoutString}`);
|
||||
|
||||
const dataLayout = DataLayout.fromString(dataLayoutString);
|
||||
|
||||
@@ -438,13 +438,13 @@ function upload(backupId, format, dataLayoutString, progressCallback, callback)
|
||||
});
|
||||
tarStream.on('error', retryCallback); // already returns BoxError
|
||||
|
||||
storage.api(backupConfig.provider).upload(backupConfig, storage.getBackupFilePath(backupConfig, backupId, format), tarStream, retryCallback);
|
||||
storage.api(backupConfig.provider).upload(backupConfig, storage.getBackupFilePath(backupConfig, remotePath, format), tarStream, retryCallback);
|
||||
});
|
||||
}, callback);
|
||||
} else {
|
||||
async.series([
|
||||
saveFsMetadata.bind(null, dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`),
|
||||
sync.bind(null, backupConfig, backupId, dataLayout, progressCallback)
|
||||
sync.bind(null, backupConfig, remotePath, dataLayout, progressCallback)
|
||||
], callback);
|
||||
}
|
||||
});
|
||||
@@ -686,8 +686,8 @@ function runBackupUpload(uploadConfig, progressCallback, callback) {
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
assert.strictEqual(typeof callback, 'function');
|
||||
|
||||
const { backupId, backupConfig, dataLayout, progressTag } = uploadConfig;
|
||||
assert.strictEqual(typeof backupId, 'string');
|
||||
const { remotePath, backupConfig, dataLayout, progressTag } = uploadConfig;
|
||||
assert.strictEqual(typeof remotePath, 'string');
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof progressTag, 'string');
|
||||
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
|
||||
@@ -702,7 +702,7 @@ function runBackupUpload(uploadConfig, progressCallback, callback) {
|
||||
envCopy.NODE_OPTIONS = `--max-old-space-size=${heapSize}`;
|
||||
}
|
||||
|
||||
shell.sudo(`backup-${backupId}`, [ BACKUP_UPLOAD_CMD, backupId, backupConfig.format, dataLayout.toString() ], { env: envCopy, preserveEnv: true, ipc: true }, function (error) {
|
||||
shell.sudo(`backup-${remotePath}`, [ BACKUP_UPLOAD_CMD, remotePath, backupConfig.format, dataLayout.toString() ], { env: envCopy, preserveEnv: true, ipc: true }, function (error) {
|
||||
if (error && (error.code === null /* signal */ || (error.code !== 0 && error.code !== 50))) { // backuptask crashed
|
||||
return callback(new BoxError(BoxError.INTERNAL_ERROR, 'Backuptask crashed'));
|
||||
} else if (error && error.code === 50) { // exited with error
|
||||
@@ -738,7 +738,7 @@ async function uploadBoxSnapshot(backupConfig, progressCallback) {
|
||||
if (!boxDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving boxdata: ${safe.error.message}`);
|
||||
|
||||
const uploadConfig = {
|
||||
backupId: 'snapshot/box',
|
||||
remotePath: 'snapshot/box',
|
||||
backupConfig,
|
||||
dataLayout: new DataLayout(boxDataDir, []),
|
||||
progressTag: 'box'
|
||||
@@ -755,29 +755,22 @@ async function uploadBoxSnapshot(backupConfig, progressCallback) {
|
||||
await backups.setSnapshotInfo('box', { timestamp: new Date().toISOString(), format: backupConfig.format });
|
||||
}
|
||||
|
||||
async function copy(backupConfig, sourceBackupId, destBackupId, options, progressCallback) {
|
||||
async function copy(backupConfig, srcRemotePath, destRemotePath, progressCallback) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof sourceBackupId, 'string');
|
||||
assert.strictEqual(typeof destBackupId, 'string');
|
||||
assert.strictEqual(typeof options, 'object');
|
||||
assert.strictEqual(typeof srcRemotePath, 'string');
|
||||
assert.strictEqual(typeof destRemotePath, 'string');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
const format = backupConfig.format;
|
||||
const { provider, format } = backupConfig;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const startTime = new Date();
|
||||
|
||||
const copyEvents = storage.api(backupConfig.provider).copy(backupConfig, storage.getBackupFilePath(backupConfig, sourceBackupId, format), storage.getBackupFilePath(backupConfig, destBackupId, format));
|
||||
const copyEvents = storage.api(provider).copy(backupConfig, storage.getBackupFilePath(backupConfig, srcRemotePath, format), storage.getBackupFilePath(backupConfig, destRemotePath, format));
|
||||
copyEvents.on('progress', (message) => progressCallback({ message }));
|
||||
copyEvents.on('done', async function (copyBackupError) {
|
||||
const state = copyBackupError ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL;
|
||||
|
||||
const [error] = await safe(backups.update(destBackupId, { preserveSecs: options.preserveSecs || 0, state }));
|
||||
if (copyBackupError) return reject(copyBackupError);
|
||||
copyEvents.on('done', function (error) {
|
||||
if (error) return reject(error);
|
||||
|
||||
debug(`copy: copied successfully to id ${destBackupId}. Took ${(new Date() - startTime)/1000} seconds`);
|
||||
|
||||
debug(`copy: copied successfully to ${destRemotePath}. Took ${(new Date() - startTime)/1000} seconds`);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
@@ -790,12 +783,13 @@ async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCa
|
||||
assert(Array.isArray(dependsOn));
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
const backupId = `${tag}/box_v${constants.VERSION}`;
|
||||
const remotePath = `${tag}/box_v${constants.VERSION}`;
|
||||
const format = backupConfig.format;
|
||||
|
||||
debug(`rotateBoxBackup: rotating to id ${backupId}`);
|
||||
debug(`rotateBoxBackup: rotating to id ${remotePath}`);
|
||||
|
||||
const data = {
|
||||
remotePath,
|
||||
encryptionVersion: backupConfig.encryption ? 2 : null,
|
||||
packageVersion: constants.VERSION,
|
||||
type: backups.BACKUP_TYPE_BOX,
|
||||
@@ -806,9 +800,13 @@ async function rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCa
|
||||
format
|
||||
};
|
||||
|
||||
await backups.add(backupId, data);
|
||||
await copy(backupConfig, 'snapshot/box', backupId, options, progressCallback);
|
||||
return backupId;
|
||||
const id = await backups.add(data);
|
||||
const [copyBackupError] = await safe(copy(backupConfig, 'snapshot/box', remotePath, progressCallback));
|
||||
const state = copyBackupError ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL;
|
||||
await backups.update(id, { preserveSecs: options.preserveSecs || 0, state });
|
||||
if (copyBackupError) throw copyBackupError;
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
async function backupBox(dependsOn, tag, options, progressCallback) {
|
||||
@@ -820,9 +818,7 @@ async function backupBox(dependsOn, tag, options, progressCallback) {
|
||||
const backupConfig = await settings.getBackupConfig();
|
||||
|
||||
await uploadBoxSnapshot(backupConfig, progressCallback);
|
||||
|
||||
const backupId = await rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCallback);
|
||||
return backupId;
|
||||
return await rotateBoxBackup(backupConfig, tag, options, dependsOn, progressCallback);
|
||||
}
|
||||
|
||||
async function rotateAppBackup(backupConfig, app, tag, options, progressCallback) {
|
||||
@@ -835,12 +831,13 @@ async function rotateAppBackup(backupConfig, app, tag, options, progressCallback
|
||||
const snapshotInfo = backups.getSnapshotInfo(app.id);
|
||||
|
||||
const manifest = snapshotInfo.restoreConfig ? snapshotInfo.restoreConfig.manifest : snapshotInfo.manifest; // compat
|
||||
const backupId = `${tag}/app_${app.fqdn}_v${manifest.version}`;
|
||||
const remotePath = `${tag}/app_${app.fqdn}_v${manifest.version}`;
|
||||
const format = backupConfig.format;
|
||||
|
||||
debug(`rotateAppBackup: rotating ${app.fqdn} to id ${backupId}`);
|
||||
debug(`rotateAppBackup: rotating ${app.fqdn} to path ${remotePath}`);
|
||||
|
||||
const data = {
|
||||
remotePath,
|
||||
encryptionVersion: backupConfig.encryption ? 2 : null,
|
||||
packageVersion: manifest.version,
|
||||
type: backups.BACKUP_TYPE_APP,
|
||||
@@ -851,9 +848,13 @@ async function rotateAppBackup(backupConfig, app, tag, options, progressCallback
|
||||
format: format
|
||||
};
|
||||
|
||||
await backups.add(backupId, data);
|
||||
await copy(backupConfig, `snapshot/app_${app.id}`, backupId, options, progressCallback);
|
||||
return backupId;
|
||||
const id = await backups.add(data);
|
||||
const copyBackupError = await safe(copy(backupConfig, `snapshot/app_${app.id}`, remotePath, progressCallback));
|
||||
const state = copyBackupError ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL;
|
||||
await backups.update(id, { preserveSecs: options.preserveSecs || 0, state });
|
||||
if (copyBackupError) throw copyBackupError;
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
async function backupApp(app, options, progressCallback) {
|
||||
@@ -890,7 +891,7 @@ async function uploadAppSnapshot(backupConfig, app, progressCallback) {
|
||||
|
||||
await snapshotApp(app, progressCallback);
|
||||
|
||||
const backupId = util.format('snapshot/app_%s', app.id);
|
||||
const remotePath = util.format('snapshot/app_%s', app.id);
|
||||
const appDataDir = safe.fs.realpathSync(path.join(paths.APPS_DATA_DIR, app.id));
|
||||
if (!appDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving appsdata: ${safe.error.message}`);
|
||||
|
||||
@@ -899,7 +900,7 @@ async function uploadAppSnapshot(backupConfig, app, progressCallback) {
|
||||
progressCallback({ message: `Uploading app snapshot ${app.fqdn}`});
|
||||
|
||||
const uploadConfig = {
|
||||
backupId,
|
||||
remotePath,
|
||||
backupConfig,
|
||||
dataLayout,
|
||||
progressTag: app.fqdn
|
||||
@@ -909,7 +910,7 @@ async function uploadAppSnapshot(backupConfig, app, progressCallback) {
|
||||
|
||||
await util.promisify(runBackupUpload)(uploadConfig, progressCallback);
|
||||
|
||||
debug(`uploadAppSnapshot: ${app.fqdn} upload with id ${backupId}. ${(new Date() - startTime)/1000} seconds`);
|
||||
debug(`uploadAppSnapshot: ${app.fqdn} upload to ${remotePath}. ${(new Date() - startTime)/1000} seconds`);
|
||||
|
||||
await backups.setSnapshotInfo(app.id, { timestamp: new Date().toISOString(), manifest: app.manifest, format: backupConfig.format });
|
||||
}
|
||||
@@ -930,8 +931,7 @@ async function backupAppWithTag(app, tag, options, progressCallback) {
|
||||
const backupConfig = await settings.getBackupConfig();
|
||||
|
||||
await uploadAppSnapshot(backupConfig, app, progressCallback);
|
||||
const backupId = await rotateAppBackup(backupConfig, app, tag, options, progressCallback);
|
||||
return backupId;
|
||||
return await rotateAppBackup(backupConfig, app, tag, options, progressCallback);
|
||||
}
|
||||
|
||||
async function uploadMailSnapshot(backupConfig, progressCallback) {
|
||||
@@ -942,7 +942,7 @@ async function uploadMailSnapshot(backupConfig, progressCallback) {
|
||||
if (!mailDataDir) throw new BoxError(BoxError.FS_ERROR, `Error resolving maildata: ${safe.error.message}`);
|
||||
|
||||
const uploadConfig = {
|
||||
backupId: 'snapshot/mail',
|
||||
remotePath: 'snapshot/mail',
|
||||
backupConfig,
|
||||
dataLayout: new DataLayout(mailDataDir, []),
|
||||
progressTag: 'mail'
|
||||
@@ -965,12 +965,13 @@ async function rotateMailBackup(backupConfig, tag, options, progressCallback) {
|
||||
assert.strictEqual(typeof options, 'object');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
const backupId = `${tag}/mail_v${constants.VERSION}`;
|
||||
const remotePath = `${tag}/mail_v${constants.VERSION}`;
|
||||
const format = backupConfig.format;
|
||||
|
||||
debug(`rotateMailBackup: rotating to id ${backupId}`);
|
||||
debug(`rotateMailBackup: rotating to ${remotePath}`);
|
||||
|
||||
const data = {
|
||||
remotePath,
|
||||
encryptionVersion: backupConfig.encryption ? 2 : null,
|
||||
packageVersion: constants.VERSION,
|
||||
type: backups.BACKUP_TYPE_MAIL,
|
||||
@@ -981,9 +982,13 @@ async function rotateMailBackup(backupConfig, tag, options, progressCallback) {
|
||||
format: format
|
||||
};
|
||||
|
||||
await backups.add(backupId, data);
|
||||
await copy(backupConfig, 'snapshot/mail', backupId, options, progressCallback);
|
||||
return backupId;
|
||||
const id = await backups.add(data);
|
||||
const [copyBackupError] = await safe(copy(backupConfig, 'snapshot/mail', remotePath, progressCallback));
|
||||
const state = copyBackupError ? backups.BACKUP_STATE_ERROR : backups.BACKUP_STATE_NORMAL;
|
||||
await backups.update(id, { preserveSecs: options.preserveSecs || 0, state });
|
||||
if (copyBackupError) throw copyBackupError;
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
async function backupMailWithTag(tag, options, progressCallback) {
|
||||
@@ -995,8 +1000,7 @@ async function backupMailWithTag(tag, options, progressCallback) {
|
||||
|
||||
const backupConfig = await settings.getBackupConfig();
|
||||
await uploadMailSnapshot(backupConfig, progressCallback);
|
||||
const backupId = await rotateMailBackup(backupConfig, tag, options, progressCallback);
|
||||
return backupId;
|
||||
return await rotateMailBackup(backupConfig, tag, options, progressCallback);
|
||||
}
|
||||
|
||||
async function backupMail(options, progressCallback) {
|
||||
@@ -1028,7 +1032,7 @@ async function fullBackup(options, progressCallback) {
|
||||
assert.strictEqual(typeof options, 'object');
|
||||
assert.strictEqual(typeof progressCallback, 'function');
|
||||
|
||||
const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,'');
|
||||
const tag = (new Date()).toISOString().replace(/[T.]/g, '-').replace(/[:Z]/g,''); // unique tag under which all apps/mail/box backs up
|
||||
|
||||
const allApps = await apps.list();
|
||||
|
||||
@@ -1047,19 +1051,19 @@ async function fullBackup(options, progressCallback) {
|
||||
}
|
||||
|
||||
const startTime = new Date();
|
||||
const appBackupId = await backupAppWithTag(app, tag, options, (progress) => progressCallback({ percent: percent, message: progress.message }));
|
||||
const appBackupId = await backupAppWithTag(app, tag, options, (progress) => progressCallback({ percent, message: progress.message }));
|
||||
debug(`fullBackup: app ${app.fqdn} backup finished. Took ${(new Date() - startTime)/1000} seconds`);
|
||||
if (appBackupId) appBackupIds.push(appBackupId); // backupId can be null if in BAD_STATE and never backed up
|
||||
}
|
||||
|
||||
progressCallback({ percent: percent, message: 'Backing up mail' });
|
||||
progressCallback({ percent, message: 'Backing up mail' });
|
||||
percent += step;
|
||||
const mailBackupId = await backupMailWithTag(tag, options, (progress) => progressCallback({ percent: percent, message: progress.message }));
|
||||
const mailBackupId = await backupMailWithTag(tag, options, (progress) => progressCallback({ percent, message: progress.message }));
|
||||
|
||||
progressCallback({ percent: percent, message: 'Backing up system data' });
|
||||
progressCallback({ percent, message: 'Backing up system data' });
|
||||
percent += step;
|
||||
|
||||
const dependsOn = appBackupIds.concat(mailBackupId);
|
||||
const backupId = await backupBox(dependsOn, tag, options, (progress) => progressCallback({ percent: percent, message: progress.message }));
|
||||
const backupId = await backupBox(dependsOn, tag, options, (progress) => progressCallback({ percent, message: progress.message }));
|
||||
return backupId;
|
||||
}
|
||||
|
||||
@@ -92,7 +92,7 @@ describe('Backups API', function () {
|
||||
const response = await superagent.post(`${serverUrl}/api/v1/backups/${someBackup.id}`)
|
||||
.query({ access_token: owner.token })
|
||||
.send({ preserveSecs: 30, label: 'NewOrleans' });
|
||||
expect(response.statusCode).to.equal(400);
|
||||
expect(response.statusCode).to.equal(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
if (process.argv[2] === '--check') return console.log('OK');
|
||||
if (process.argv[2] === '--check') {
|
||||
console.log('OK');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const assert = require('assert'),
|
||||
async = require('async'),
|
||||
@@ -22,11 +25,11 @@ function initialize(callback) {
|
||||
}
|
||||
|
||||
// Main process starts here
|
||||
const backupId = process.argv[2];
|
||||
const remotePath = process.argv[2];
|
||||
const format = process.argv[3];
|
||||
const dataLayoutString = process.argv[4];
|
||||
|
||||
debug(`Backing up ${dataLayoutString} to ${backupId}`);
|
||||
debug(`Backing up ${dataLayoutString} to ${remotePath}`);
|
||||
|
||||
process.on('SIGTERM', function () {
|
||||
process.exit(0);
|
||||
@@ -73,7 +76,7 @@ initialize(function (error) {
|
||||
dumpMemoryInfo();
|
||||
const timerId = setInterval(dumpMemoryInfo, 30000);
|
||||
|
||||
backuptask.upload(backupId, format, dataLayoutString, throttledProgressCallback(5000), function resultHandler(error) {
|
||||
backuptask.upload(remotePath, format, dataLayoutString, throttledProgressCallback(5000), function resultHandler(error) {
|
||||
debug('upload completed. error: ', error);
|
||||
|
||||
process.send({ result: error ? error.message : '' });
|
||||
|
||||
+5
-5
@@ -38,17 +38,17 @@ function api(provider) {
|
||||
}
|
||||
|
||||
// This is not part of the storage api, since we don't want to pull the "format" logistics into that
|
||||
function getBackupFilePath(backupConfig, backupId, format) {
|
||||
function getBackupFilePath(backupConfig, remotePath, format) {
|
||||
assert.strictEqual(typeof backupConfig, 'object');
|
||||
assert.strictEqual(typeof backupId, 'string');
|
||||
assert.strictEqual(typeof remotePath, 'string');
|
||||
assert.strictEqual(typeof format, 'string');
|
||||
|
||||
const basePath = api(backupConfig.provider).getBasePath(backupConfig);
|
||||
const rootPath = api(backupConfig.provider).getRootPath(backupConfig);
|
||||
|
||||
if (format === 'tgz') {
|
||||
const fileType = backupConfig.encryption ? '.tar.gz.enc' : '.tar.gz';
|
||||
return path.join(basePath, backupId+fileType);
|
||||
return path.join(rootPath, remotePath + fileType);
|
||||
} else {
|
||||
return path.join(basePath, backupId);
|
||||
return path.join(rootPath, remotePath);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
exports = module.exports = {
|
||||
getBasePath,
|
||||
getRootPath,
|
||||
checkPreconditions,
|
||||
|
||||
upload,
|
||||
@@ -46,7 +46,7 @@ const assert = require('assert'),
|
||||
shell = require('../shell.js');
|
||||
|
||||
// storage api
|
||||
function getBasePath(apiConfig) {
|
||||
function getRootPath(apiConfig) {
|
||||
assert.strictEqual(typeof apiConfig, 'object');
|
||||
|
||||
switch (apiConfig.provider) {
|
||||
@@ -78,7 +78,7 @@ async function checkPreconditions(apiConfig, dataLayout, callback) {
|
||||
|
||||
debug(`checkPreconditions: ${used} bytes`);
|
||||
|
||||
const [error, result] = await safe(df.file(getBasePath(apiConfig)));
|
||||
const [error, result] = await safe(df.file(getRootPath(apiConfig)));
|
||||
if (error) return callback(new BoxError(BoxError.FS_ERROR, `Error when checking for disk space: ${error.message}`));
|
||||
|
||||
// Check filesystem is mounted so we don't write into the actual folder on disk
|
||||
@@ -318,7 +318,7 @@ function testConfig(apiConfig, callback) {
|
||||
if (!safe.child_process.execSync(`mountpoint -q -- ${apiConfig.mountPoint}`)) return callback(new BoxError(BoxError.BAD_FIELD, `${apiConfig.mountPoint} is not mounted`));
|
||||
}
|
||||
|
||||
const basePath = getBasePath(apiConfig);
|
||||
const basePath = getRootPath(apiConfig);
|
||||
const field = apiConfig.provider === PROVIDER_FILESYSTEM ? 'backupFolder' : 'mountPoint';
|
||||
|
||||
if (!safe.fs.mkdirSync(path.join(basePath, 'snapshot'), { recursive: true }) && safe.error.code !== 'EEXIST') {
|
||||
|
||||
+2
-2
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
exports = module.exports = {
|
||||
getBasePath,
|
||||
getRootPath,
|
||||
checkPreconditions,
|
||||
|
||||
upload,
|
||||
@@ -64,7 +64,7 @@ function getBucket(apiConfig) {
|
||||
}
|
||||
|
||||
// storage api
|
||||
function getBasePath(apiConfig) {
|
||||
function getRootPath(apiConfig) {
|
||||
assert.strictEqual(typeof apiConfig, 'object');
|
||||
|
||||
return apiConfig.prefix;
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
// for the other API calls we leave it to the backend to retry. this allows
|
||||
// them to tune the concurrency based on failures/rate limits accordingly
|
||||
exports = module.exports = {
|
||||
getBasePath,
|
||||
getRootPath,
|
||||
checkPreconditions,
|
||||
|
||||
upload,
|
||||
@@ -49,7 +49,7 @@ function injectPrivateFields(newConfig, currentConfig) {
|
||||
// in-place injection of tokens and api keys which came in with constants.SECRET_PLACEHOLDER
|
||||
}
|
||||
|
||||
function getBasePath(apiConfig) {
|
||||
function getRootPath(apiConfig) {
|
||||
assert.strictEqual(typeof apiConfig, 'object');
|
||||
|
||||
// Result: path at the backup storage
|
||||
|
||||
+2
-2
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
exports = module.exports = {
|
||||
getBasePath,
|
||||
getRootPath,
|
||||
checkPreconditions,
|
||||
|
||||
upload,
|
||||
@@ -28,7 +28,7 @@ var assert = require('assert'),
|
||||
debug = require('debug')('box:storage/noop'),
|
||||
EventEmitter = require('events');
|
||||
|
||||
function getBasePath(apiConfig) {
|
||||
function getRootPath(apiConfig) {
|
||||
assert.strictEqual(typeof apiConfig, 'object');
|
||||
return '';
|
||||
}
|
||||
|
||||
+2
-2
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
exports = module.exports = {
|
||||
getBasePath,
|
||||
getRootPath,
|
||||
checkPreconditions,
|
||||
|
||||
upload,
|
||||
@@ -92,7 +92,7 @@ function getS3Config(apiConfig, callback) {
|
||||
}
|
||||
|
||||
// storage api
|
||||
function getBasePath(apiConfig) {
|
||||
function getRootPath(apiConfig) {
|
||||
assert.strictEqual(typeof apiConfig, 'object');
|
||||
|
||||
return apiConfig.prefix;
|
||||
|
||||
@@ -22,7 +22,8 @@ describe('backup cleaner', function () {
|
||||
after(cleanup);
|
||||
|
||||
const backupTemplate = {
|
||||
id: 'someid',
|
||||
id: null,
|
||||
remotePath: 'somepath',
|
||||
encryptionVersion: 2,
|
||||
packageVersion: '1.0.0',
|
||||
type: backups.BACKUP_TYPE_BOX,
|
||||
@@ -125,7 +126,8 @@ describe('backup cleaner', function () {
|
||||
|
||||
describe('task', function () {
|
||||
const BACKUP_0_BOX = {
|
||||
id: 'backup-box-0',
|
||||
id: null,
|
||||
remotePath: 'backup-box-0',
|
||||
identifier: 'box',
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
@@ -137,7 +139,8 @@ describe('backup cleaner', function () {
|
||||
};
|
||||
|
||||
const BACKUP_0_APP_0 = { // backup of installed app
|
||||
id: 'backup-app-00',
|
||||
id: null,
|
||||
remotePath: 'backup-app-00',
|
||||
identifier: app.id,
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
@@ -149,7 +152,8 @@ describe('backup cleaner', function () {
|
||||
};
|
||||
|
||||
const BACKUP_0_APP_1 = { // this app is uninstalled
|
||||
id: 'backup-app-01',
|
||||
id: null,
|
||||
remotePath: 'backup-app-01',
|
||||
identifier: 'app1',
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
@@ -161,7 +165,8 @@ describe('backup cleaner', function () {
|
||||
};
|
||||
|
||||
const BACKUP_1_BOX = {
|
||||
id: 'backup-box-1',
|
||||
id: null,
|
||||
remotePath: 'backup-box-1',
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
type: backups.BACKUP_TYPE_BOX,
|
||||
@@ -173,7 +178,8 @@ describe('backup cleaner', function () {
|
||||
};
|
||||
|
||||
const BACKUP_1_APP_0 = {
|
||||
id: 'backup-app-10',
|
||||
id: null,
|
||||
remotePath: 'backup-app-10',
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
type: backups.BACKUP_TYPE_APP,
|
||||
@@ -185,7 +191,8 @@ describe('backup cleaner', function () {
|
||||
};
|
||||
|
||||
const BACKUP_1_APP_1 = {
|
||||
id: 'backup-app-11',
|
||||
id: null,
|
||||
remotePath: 'backup-app-11',
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
type: backups.BACKUP_TYPE_APP,
|
||||
@@ -228,14 +235,21 @@ describe('backup cleaner', function () {
|
||||
await cleanupBackups();
|
||||
});
|
||||
|
||||
it('succeeds with box backups, keeps latest', async function () {
|
||||
for (const backup of [[ BACKUP_0_BOX, BACKUP_0_APP_0, BACKUP_0_APP_1 ], [ BACKUP_1_BOX, BACKUP_1_APP_0, BACKUP_1_APP_1 ]]) {
|
||||
await delay(2000); // space out backups
|
||||
for (const b of backup) {
|
||||
await backups.add(b.id, b);
|
||||
}
|
||||
}
|
||||
it('add the backups', async function () {
|
||||
BACKUP_0_APP_0.id = await backups.add(BACKUP_0_APP_0);
|
||||
BACKUP_0_APP_1.id = await backups.add(BACKUP_0_APP_1);
|
||||
BACKUP_0_BOX.dependsOn = [ BACKUP_0_APP_0.id, BACKUP_0_APP_1.id ];
|
||||
BACKUP_0_BOX.id = await backups.add(BACKUP_0_BOX);
|
||||
|
||||
await delay(2000); // space out backups
|
||||
|
||||
BACKUP_1_APP_0.id = await backups.add(BACKUP_1_APP_0);
|
||||
BACKUP_1_APP_1.id = await backups.add(BACKUP_1_APP_1);
|
||||
BACKUP_1_BOX.dependsOn = [ BACKUP_1_APP_0.id, BACKUP_1_APP_1.id ];
|
||||
BACKUP_1_BOX.id = await backups.add(BACKUP_1_BOX);
|
||||
});
|
||||
|
||||
it('succeeds with box backups, keeps latest', async function () {
|
||||
await cleanupBackups();
|
||||
|
||||
const results = await backups.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 1000);
|
||||
@@ -261,7 +275,7 @@ describe('backup cleaner', function () {
|
||||
it('succeeds for app backups not referenced by a box backup', async function () {
|
||||
// add two dangling app backups not referenced by box backup. app1 is uninstalled. app0 is there
|
||||
for (const backup of [BACKUP_0_APP_0, BACKUP_0_APP_1]) {
|
||||
await backups.add(backup.id, backup);
|
||||
backup.id = await backups.add(backup);
|
||||
}
|
||||
|
||||
await delay(2000); // wait for expiration
|
||||
@@ -270,7 +284,7 @@ describe('backup cleaner', function () {
|
||||
|
||||
let result = await backups.getByTypePaged(backups.BACKUP_TYPE_APP, 1, 1000);
|
||||
expect(result.length).to.equal(3);
|
||||
result = result.sort((r1, r2) => r1.id.localeCompare(r2.id));
|
||||
result = result.sort((r1, r2) => r1.remotePath.localeCompare(r2.remotePath));
|
||||
expect(result[0].id).to.be(BACKUP_0_APP_0.id); // because app is installed, latest backup is preserved
|
||||
expect(result[1].id).to.be(BACKUP_1_APP_0.id); // referenced by box
|
||||
expect(result[2].id).to.be(BACKUP_1_APP_1.id); // referenced by box
|
||||
|
||||
@@ -19,7 +19,8 @@ describe('backups', function () {
|
||||
after(cleanup);
|
||||
|
||||
const boxBackup = {
|
||||
id: 'backup-box',
|
||||
id: null,
|
||||
remotePath: 'backup-box',
|
||||
encryptionVersion: 2,
|
||||
packageVersion: '1.0.0',
|
||||
type: backups.BACKUP_TYPE_BOX,
|
||||
@@ -33,7 +34,8 @@ describe('backups', function () {
|
||||
};
|
||||
|
||||
const appBackup = {
|
||||
id: 'app_appid_123',
|
||||
id: null,
|
||||
remotePath: 'app_appid_123',
|
||||
encryptionVersion: null,
|
||||
packageVersion: '1.0.0',
|
||||
type: backups.BACKUP_TYPE_APP,
|
||||
@@ -47,11 +49,11 @@ describe('backups', function () {
|
||||
};
|
||||
|
||||
it('add succeeds', async function () {
|
||||
await backups.add(boxBackup.id, boxBackup);
|
||||
boxBackup.id = await backups.add(boxBackup);
|
||||
});
|
||||
|
||||
it('fails with duplicating id', async function () {
|
||||
const [error] = await safe(backups.add(boxBackup.id, boxBackup));
|
||||
it('fails with duplicate path', async function () {
|
||||
const [error] = await safe(backups.add(boxBackup));
|
||||
expect(error.reason).to.be(BoxError.ALREADY_EXISTS);
|
||||
});
|
||||
|
||||
@@ -87,7 +89,7 @@ describe('backups', function () {
|
||||
});
|
||||
|
||||
it('add app backup succeeds', async function () {
|
||||
await backups.add(appBackup.id, appBackup);
|
||||
appBackup.id = await backups.add(appBackup);
|
||||
});
|
||||
|
||||
it('get app backup succeeds', async function () {
|
||||
|
||||
Reference in New Issue
Block a user