syncer: simply return the changes

this is easier to test. the initial code wanted to make the changes a stream.
but this never happenned since the need never arose
This commit is contained in:
Girish Ramakrishnan
2025-02-13 17:05:35 +01:00
parent 6a303ae50a
commit b94ce542c3
3 changed files with 181 additions and 271 deletions
+21 -28
View File
@@ -1,18 +1,17 @@
'use strict';
const assert = require('assert'),
async = require('async'),
BoxError = require('./boxerror.js'),
DataLayout = require('./datalayout.js'),
debug = require('debug')('box:syncer'),
fs = require('fs'),
path = require('path'),
paths = require('./paths.js'),
safe = require('safetydance'),
util = require('util');
safe = require('safetydance');
exports = module.exports = {
sync: util.promisify(sync)
sync,
finalize
};
function readCache(cacheFile) {
@@ -64,19 +63,16 @@ function ISFILE(x) {
return (x & fs.constants.S_IFREG) === fs.constants.S_IFREG;
}
function sync(dataLayout, taskProcessor, concurrency, callback) {
async function sync(dataLayout) {
assert(dataLayout instanceof DataLayout, 'Expecting dataLayout to be a DataLayout');
assert.strictEqual(typeof taskProcessor, 'function');
assert.strictEqual(typeof concurrency, 'number');
assert.strictEqual(typeof callback, 'function');
const addQueue = [], delQueue = [];
const addQueue = [], delQueue = []; // separate queues. we have to process the del first and then the add
let curCacheIndex = 0;
const cacheFile = path.join(paths.BACKUP_INFO_DIR, dataLayout.getBasename() + '.sync.cache'),
newCacheFile = path.join(paths.BACKUP_INFO_DIR, dataLayout.getBasename() + '.sync.cache.new');
let cache = [ ];
let cache = [];
// if cache is missing or if we crashed/errored in previous run, start out empty. TODO: do a remote listDir and rebuild
if (!safe.fs.existsSync(cacheFile)) {
@@ -88,7 +84,7 @@ function sync(dataLayout, taskProcessor, concurrency, callback) {
}
const newCacheFd = safe.fs.openSync(newCacheFile, 'w'); // truncates any existing file
if (newCacheFd === -1) return callback(new BoxError(BoxError.FS_ERROR, 'Error opening new cache file: ' + safe.error.message));
if (newCacheFd === -1) throw new BoxError(BoxError.FS_ERROR, 'Error opening new cache file: ' + safe.error.message);
function advanceCache(entryPath) {
let lastRemovedDir = null;
@@ -157,21 +153,18 @@ function sync(dataLayout, taskProcessor, concurrency, callback) {
safe.fs.closeSync(newCacheFd);
debug(`sync: processing ${delQueue.length} deletes and ${addQueue.length} additions`);
async.eachLimit(delQueue, concurrency, taskProcessor, function (error) {
debug('sync: done processing deletes. error: %o', error);
async.eachLimit(addQueue, concurrency, taskProcessor, function (error) {
debug('sync: done processing adds. error: %o', error);
if (error) return callback(error);
safe.fs.unlinkSync(cacheFile);
if (!safe.fs.renameSync(newCacheFile, cacheFile)) debug('Unable to save new cache file');
callback();
});
});
return {
delQueue,
addQueue,
cacheFile,
newCacheFile
};
}
async function finalize(changes) {
assert.strictEqual(typeof changes, 'object');
safe.fs.unlinkSync(changes.cacheFile);
if (!safe.fs.renameSync(changes.newCacheFile, changes.cacheFile)) debug('Unable to save new cache file');
}