200 lines
8.1 KiB
JavaScript
200 lines
8.1 KiB
JavaScript
'use strict';
|
|
|
|
exports = module.exports = {
|
|
sync,
|
|
finalize
|
|
};
|
|
|
|
const assert = require('node:assert'),
|
|
BoxError = require('./boxerror.js'),
|
|
DataLayout = require('./datalayout.js'),
|
|
debug = require('debug')('box:syncer'),
|
|
fs = require('node:fs'),
|
|
path = require('node:path'),
|
|
readline = require('node:readline'),
|
|
safe = require('safetydance'),
|
|
util = require('node:util');
|
|
|
|
function readCache(cacheFile) {
|
|
assert.strictEqual(typeof cacheFile, 'string');
|
|
|
|
const cache = safe.fs.readFileSync(cacheFile, 'utf8');
|
|
if (!cache) return [];
|
|
const result = cache.trim().split('\n').map(JSON.parse);
|
|
return result;
|
|
}
|
|
|
|
function readTree(dirPath) {
|
|
assert.strictEqual(typeof dirPath, 'string');
|
|
|
|
const names = safe.fs.readdirSync(dirPath).sort();
|
|
if (!names) return [];
|
|
|
|
return names.map((name) => {
|
|
const absolutePath = path.join(dirPath, name);
|
|
return {
|
|
stat: safe.fs.lstatSync(absolutePath),
|
|
absolutePath: absolutePath,
|
|
name: name
|
|
};
|
|
});
|
|
}
|
|
|
|
function readDataLayoutTree(dataLayout) {
|
|
assert.strictEqual(typeof dataLayout, 'object');
|
|
|
|
const rootEntries = readTree(dataLayout.localRoot());
|
|
|
|
for (const l of dataLayout.directoryMap()) {
|
|
rootEntries.push({
|
|
stat: safe.fs.lstatSync(l.localDir),
|
|
absolutePath: l.localDir,
|
|
name: l.remoteDir,
|
|
});
|
|
}
|
|
|
|
return rootEntries.sort((e1, e2) => { return e1.name < e2.name ? -1 : (e1.name > e2.name ? +1 : 0); });
|
|
}
|
|
|
|
function ISDIR(x) {
|
|
return (x & fs.constants.S_IFDIR) === fs.constants.S_IFDIR;
|
|
}
|
|
|
|
function ISFILE(x) {
|
|
return (x & fs.constants.S_IFREG) === fs.constants.S_IFREG;
|
|
}
|
|
|
|
async function sync(dataLayout, cacheFile) {
|
|
assert(dataLayout instanceof DataLayout, 'Expecting dataLayout to be a DataLayout');
|
|
assert.strictEqual(typeof cacheFile, 'string');
|
|
|
|
const addQueue = [], delQueue = []; // separate queues. we have to process the del first and then the add
|
|
let curCacheIndex = 0;
|
|
const integrityMap = new Map(); // integrity of unchanged files
|
|
const newCacheFile = `${cacheFile}.new`;
|
|
let cache = [];
|
|
|
|
// if cache is missing or if we crashed/errored in previous run, start out empty
|
|
if (!safe.fs.existsSync(cacheFile)) {
|
|
debug(`sync: cache file ${cacheFile} is missing, starting afresh`);
|
|
delQueue.push({ operation: 'removedir', path: '', reason: 'nocache' });
|
|
} else if (safe.fs.existsSync(newCacheFile)) {
|
|
debug(`sync: new cache file ${newCacheFile} exists. previous run crashed, starting afresh`);
|
|
delQueue.push({ operation: 'removedir', path: '', reason: 'crash' });
|
|
} else {
|
|
debug(`sync: loading cache file ${cacheFile}`);
|
|
cache = readCache(cacheFile);
|
|
}
|
|
|
|
const newCacheFd = safe.fs.openSync(newCacheFile, 'w'); // truncates any existing file
|
|
if (newCacheFd === -1) throw new BoxError(BoxError.FS_ERROR, 'Error opening new cache file: ' + safe.error.message);
|
|
|
|
function advanceCache(entryPath) {
|
|
let lastRemovedDir = null;
|
|
|
|
for (; curCacheIndex !== cache.length && (entryPath === '' || cache[curCacheIndex].path < entryPath); ++curCacheIndex) {
|
|
// ignore subdirs of lastRemovedDir since it was removed already
|
|
if (lastRemovedDir && cache[curCacheIndex].path.startsWith(lastRemovedDir)) continue;
|
|
|
|
if (ISDIR(cache[curCacheIndex].stat.mode)) {
|
|
delQueue.push({ operation: 'removedir', path: cache[curCacheIndex].path, reason: 'missing' });
|
|
lastRemovedDir = cache[curCacheIndex].path;
|
|
} else {
|
|
delQueue.push({ operation: 'remove', path: cache[curCacheIndex].path, reason: 'missing' });
|
|
lastRemovedDir = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
function traverse(entries, relpath) {
|
|
for (const entry of entries) {
|
|
const entryPath = path.join(relpath, entry.name);
|
|
const entryStat = entry.stat;
|
|
|
|
if (!entryStat) continue; // some stat error. prented it doesn't exist
|
|
if (!entryStat.isDirectory() && !entryStat.isFile()) continue; // ignore non-files and dirs
|
|
if (entryStat.isSymbolicLink()) continue;
|
|
|
|
safe.fs.appendFileSync(newCacheFd, JSON.stringify({ path: entryPath, stat: { mtime: entryStat.mtime.getTime(), size: entryStat.size, inode: entryStat.inode, mode: entryStat.mode } }) + '\n');
|
|
|
|
if (curCacheIndex !== cache.length && cache[curCacheIndex].path < entryPath) { // files disappeared. first advance cache as needed
|
|
advanceCache(entryPath);
|
|
}
|
|
|
|
const cachePath = curCacheIndex === cache.length ? null : cache[curCacheIndex].path;
|
|
const cacheStat = curCacheIndex === cache.length ? null : cache[curCacheIndex].stat;
|
|
|
|
if (cachePath === null || cachePath > entryPath) { // new files appeared
|
|
if (entryStat.isDirectory()) {
|
|
traverse(readTree(entry.absolutePath), entryPath);
|
|
} else {
|
|
addQueue.push({ operation: 'add', path: entryPath, reason: 'new', position: addQueue.length });
|
|
}
|
|
} else if (ISDIR(cacheStat.mode) && entryStat.isDirectory()) { // dir names match
|
|
++curCacheIndex;
|
|
traverse(readTree(entry.absolutePath), entryPath);
|
|
} else if (ISFILE(cacheStat.mode) && entryStat.isFile()) { // file names match
|
|
if (entryStat.mtime.getTime() !== cacheStat.mtime || entryStat.size != cacheStat.size || entryStat.inode !== cacheStat.inode) { // file changed
|
|
addQueue.push({ operation: 'add', path: entryPath, reason: 'changed', position: addQueue.length });
|
|
} else {
|
|
integrityMap.set(entryPath, cache[curCacheIndex].integrity);
|
|
}
|
|
++curCacheIndex;
|
|
} else if (entryStat.isDirectory()) { // was a file, now a directory
|
|
delQueue.push({ operation: 'remove', path: cachePath, reason: 'wasfile' });
|
|
++curCacheIndex;
|
|
traverse(readTree(entry.absolutePath), entryPath);
|
|
} else { // was a dir, now a file
|
|
delQueue.push({ operation: 'removedir', path: cachePath, reason: 'wasdir' });
|
|
while (curCacheIndex !== cache.length && cache[curCacheIndex].path.startsWith(cachePath)) ++curCacheIndex;
|
|
addQueue.push({ operation: 'add', path: entryPath, reason: 'wasdir', position: addQueue.length });
|
|
}
|
|
}
|
|
}
|
|
|
|
traverse(readDataLayoutTree(dataLayout), '');
|
|
advanceCache(''); // remove rest of the cache entries
|
|
cache = []; // clear cache to clear up some memory
|
|
|
|
safe.fs.closeSync(newCacheFd);
|
|
|
|
return {
|
|
delQueue,
|
|
addQueue,
|
|
integrityMap
|
|
};
|
|
}
|
|
|
|
async function finalize(integrityMap, cacheFile) {
|
|
assert(util.types.isMap(integrityMap), 'integrityMap is not a Map');
|
|
assert.strictEqual(typeof cacheFile, 'string');
|
|
|
|
const newCacheFile = `${cacheFile}.new`, tempCacheFile = `${cacheFile}.tmp`;
|
|
|
|
debug(`finalize: patching in integrity information into ${cacheFile}`);
|
|
|
|
const tempCacheFd = safe.fs.openSync(tempCacheFile, 'w'); // truncates any existing file
|
|
if (tempCacheFd === -1) throw new BoxError(BoxError.FS_ERROR, 'Error opening temp cache file: ' + safe.error.message);
|
|
|
|
const rl = readline.createInterface({
|
|
input: fs.createReadStream(newCacheFile, { encoding: 'utf8' }),
|
|
crlfDelay: Infinity,
|
|
});
|
|
|
|
for await (const line of rl) {
|
|
if (!line) continue;
|
|
const cacheEntry = JSON.parse(line);
|
|
if (ISFILE(cacheEntry.stat.mode)) {
|
|
cacheEntry.integrity = integrityMap.get(cacheEntry.path); // { size, sha256 }
|
|
if (typeof cacheEntry.integrity === 'undefined') throw new BoxError(BoxError.INTERNAL_ERROR, `No integrity information for ${cacheEntry.path}`);
|
|
}
|
|
safe.fs.appendFileSync(tempCacheFd, JSON.stringify(cacheEntry) + '\n');
|
|
}
|
|
|
|
safe.fs.closeSync(tempCacheFd);
|
|
|
|
safe.fs.unlinkSync(cacheFile);
|
|
safe.fs.unlinkSync(newCacheFile);
|
|
if (!safe.fs.renameSync(tempCacheFile, cacheFile)) debug('Unable to save new cache file');
|
|
}
|