make startBackupTask async

This commit is contained in:
Girish Ramakrishnan
2021-09-10 12:10:10 -07:00
parent 242fad137c
commit ae13fe60a7
4 changed files with 53 additions and 69 deletions

View File

@@ -44,6 +44,7 @@ const assert = require('assert'),
CronJob = require('cron').CronJob,
crypto = require('crypto'),
database = require('./database.js'),
debug = require('debug')('box:backups'),
ejs = require('ejs'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
@@ -53,10 +54,7 @@ const assert = require('assert'),
safe = require('safetydance'),
settings = require('./settings.js'),
storage = require('./storage.js'),
tasks = require('./tasks.js'),
util = require('util');
const getBackupConfig = util.callbackify(settings.getBackupConfig);
tasks = require('./tasks.js');
const COLLECTD_CONFIG_EJS = fs.readFileSync(__dirname + '/collectd/cloudron-backup.ejs', { encoding: 'utf8' });
@@ -177,31 +175,28 @@ async function update(id, backup) {
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
}
function startBackupTask(auditSource, callback) {
async function startBackupTask(auditSource) {
let error = locker.lock(locker.OP_FULL_BACKUP);
if (error) return callback(new BoxError(BoxError.BAD_STATE, `Cannot backup now: ${error.message}`));
if (error) throw new BoxError(BoxError.BAD_STATE, `Cannot backup now: ${error.message}`);
getBackupConfig(async function (error, backupConfig) {
if (error) return callback(error);
const backupConfig = await settings.getBackupConfig();
const memoryLimit = 'memoryLimit' in backupConfig ? Math.max(backupConfig.memoryLimit/1024/1024, 400) : 400;
const memoryLimit = 'memoryLimit' in backupConfig ? Math.max(backupConfig.memoryLimit/1024/1024, 400) : 400;
const [taskError, taskId] = await safe(tasks.add(tasks.TASK_BACKUP, [ { /* options */ } ]));
if (taskError) return callback(taskError);
const taskId = await tasks.add(tasks.TASK_BACKUP, [ { /* options */ } ]);
eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId });
await eventlog.add(eventlog.ACTION_BACKUP_START, auditSource, { taskId });
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit }, async function (error, backupId) {
locker.unlock(locker.OP_FULL_BACKUP);
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit }, async function (error, backupId) {
locker.unlock(locker.OP_FULL_BACKUP);
const errorMessage = error ? error.message : '';
const timedOut = error ? error.code === tasks.ETIMEOUT : false;
const errorMessage = error ? error.message : '';
const timedOut = error ? error.code === tasks.ETIMEOUT : false;
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage, timedOut, backupId }));
});
callback(null, taskId);
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage, timedOut, backupId }), { debug });
});
return taskId;
}
async function list(page, perPage) {
@@ -223,12 +218,14 @@ async function del(id) {
}
function cleanupCacheFilesSync() {
var files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR));
const files = safe.fs.readdirSync(path.join(paths.BACKUP_INFO_DIR));
if (!files) return;
files.filter(function (f) { return f.endsWith('.sync.cache'); }).forEach(function (f) {
safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, f));
});
files
.filter(function (f) { return f.endsWith('.sync.cache'); })
.forEach(function (f) {
safe.fs.unlinkSync(path.join(paths.BACKUP_INFO_DIR, f));
});
}
function getSnapshotInfo(id) {