backup target: create snapshot and cache files per target
snapshot file tracks the snapshot directory. when app gets deleted, the cleaner will remove the upstream snapshot directory when it runs. cache files are used in rsync logic to track what was uploading into snapshot in the previous run without needing to rescan upstream.
This commit is contained in:
@@ -19,6 +19,7 @@ const assert = require('assert'),
|
||||
fs = require('fs'),
|
||||
hush = require('../hush.js'),
|
||||
path = require('path'),
|
||||
paths = require('../paths.js'),
|
||||
ProgressStream = require('../progress-stream.js'),
|
||||
promiseRetry = require('../promise-retry.js'),
|
||||
safe = require('safetydance'),
|
||||
@@ -109,7 +110,8 @@ async function sync(backupTarget, remotePath, dataLayout, progressCallback) {
|
||||
// the number here has to take into account the s3.upload partSize (which is 10MB). So 20=200MB
|
||||
const concurrency = backupTarget.limits?.syncConcurrency || (backupTarget.provider === 's3' ? 20 : 10);
|
||||
|
||||
const changes = await syncer.sync(dataLayout);
|
||||
const cacheFile = path.join(paths.BACKUP_INFO_DIR, backupTarget.id, `${dataLayout.getBasename()}.sync.cache`);
|
||||
const changes = await syncer.sync(dataLayout, { cacheFile });
|
||||
debug(`sync: processing ${changes.delQueue.length} deletes and ${changes.addQueue.length} additions`);
|
||||
|
||||
const [delError] = await safe(async.eachLimit(changes.delQueue, concurrency, async (change) => await processSyncerChange(change, backupTarget, remotePath, dataLayout, progressCallback)));
|
||||
|
||||
Reference in New Issue
Block a user