96dc79cfe6
- Convert all require()/module.exports to import/export across 260+ files - Add "type": "module" to package.json to enable ESM by default - Add migrations/package.json with "type": "commonjs" to keep db-migrate compatible - Convert eslint.config.js to ESM with sourceType: "module" - Replace __dirname/__filename with import.meta.dirname/import.meta.filename - Replace require.main === module with process.argv[1] === import.meta.filename - Remove 'use strict' directives (implicit in ESM) - Convert dynamic require() in switch statements to static import lookup maps (dns.js, domains.js, backupformats.js, backupsites.js, network.js) - Extract self-referencing exports.CONSTANT patterns into standalone const declarations (apps.js, services.js, locks.js, users.js, mail.js, etc.) - Lazify SERVICES object in services.js to avoid circular dependency TDZ issues - Add clearMailQueue() to mailer.js for ESM-safe queue clearing in tests - Add _setMockApp() to ldapserver.js for ESM-safe test mocking - Add _setMockResolve() wrapper to dig.js for ESM-safe DNS mocking in tests - Convert backupupload.js to use dynamic imports so --check exits before loading the module graph (which requires BOX_ENV) - Update check-install to use ESM import for infra_version.js - Convert scripts/ (hotfix, release, remote_hotfix.js, find-unused-translations) - All 1315 tests passing Migration stats (AI-assisted using Cursor with Claude): - Wall clock time: ~3-4 hours - Assistant completions: ~80-100 - Estimated token usage: ~1-2M tokens Co-authored-by: Cursor <cursoragent@cursor.com>
106 lines
4.1 KiB
JavaScript
106 lines
4.1 KiB
JavaScript
/* jslint node:true */
|
|
|
|
import * as common from './common.js';
|
|
import DataLayout from '../datalayout.js';
|
|
import * as tgz from '../backupformat/tgz.js';
|
|
const EnsureFileSizeStream = tgz._EnsureFileSizeStream;
|
|
import expect from 'expect.js';
|
|
import fs from 'node:fs';
|
|
import os from 'node:os';
|
|
import path from 'node:path';
|
|
import * as rsync from '../backupformat/rsync.js';
|
|
import safe from 'safetydance';
|
|
import stream from 'node:stream/promises';
|
|
|
|
/* global it:false */
|
|
/* global describe:false */
|
|
/* global before:false */
|
|
/* global after:false */
|
|
|
|
describe('backuptask', function () {
|
|
const { setup, cleanup, createTree } = common;
|
|
|
|
before(setup);
|
|
after(cleanup);
|
|
|
|
describe('EnsureFileSizeStream', function () {
|
|
const name = 'eberswalde.txt';
|
|
const data = Buffer.from('This file has 22 bytes');
|
|
before(function () {
|
|
fs.writeFileSync(`/tmp/${name}`, data);
|
|
});
|
|
after(function () {
|
|
fs.rmSync(`/tmp/${name}`);
|
|
});
|
|
|
|
it('correct size', async function () {
|
|
const efs = new EnsureFileSizeStream({ name, size: 22 });
|
|
const ins = fs.createReadStream(`/tmp/${name}`);
|
|
const outs = fs.createWriteStream('/tmp/out.txt');
|
|
const [error] = await safe(stream.pipeline(ins, efs, outs));
|
|
expect(error).to.be(null);
|
|
const out = fs.readFileSync('/tmp/out.txt');
|
|
expect(out).to.eql(data);
|
|
});
|
|
|
|
it('overflow', async function () {
|
|
const efs = new EnsureFileSizeStream({ name, size: 20 });
|
|
const ins = fs.createReadStream(`/tmp/${name}`);
|
|
const outs = fs.createWriteStream('/tmp/out.txt');
|
|
const [error] = await safe(stream.pipeline(ins, efs, outs));
|
|
expect(error).to.be(null);
|
|
const out = fs.readFileSync('/tmp/out.txt');
|
|
expect(out).to.eql(data.subarray(0, 20));
|
|
});
|
|
|
|
it('underflow', async function () {
|
|
const efs = new EnsureFileSizeStream({ name, size: 30 });
|
|
const ins = fs.createReadStream(`/tmp/${name}`);
|
|
const outs = fs.createWriteStream('/tmp/out.txt');
|
|
const [error] = await safe(stream.pipeline(ins, efs, outs));
|
|
expect(error).to.be(null);
|
|
const out = fs.readFileSync('/tmp/out.txt');
|
|
expect(out).to.eql(Buffer.concat([data, Buffer.alloc(8)]));
|
|
});
|
|
});
|
|
|
|
describe('fs meta data', function () {
|
|
let tmpdir;
|
|
before(function () {
|
|
tmpdir = fs.mkdtempSync(path.join(os.tmpdir(), 'backups-test'));
|
|
});
|
|
after(function () {
|
|
fs.rmSync(tmpdir, { recursive: true, force: true });
|
|
});
|
|
|
|
it('saves special files', async function () {
|
|
createTree(tmpdir, { 'data': { 'subdir': { 'emptydir': { } } }, 'dir2': { 'file': 'stuff' } });
|
|
fs.chmodSync(path.join(tmpdir, 'dir2/file'), parseInt('0755', 8));
|
|
|
|
const dataLayout = new DataLayout(tmpdir, []);
|
|
|
|
await rsync._saveFsMetadata(dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`);
|
|
|
|
const emptyDirs = JSON.parse(fs.readFileSync(path.join(tmpdir, 'fsmetadata.json'), 'utf8')).emptyDirs;
|
|
expect(emptyDirs).to.eql(['./data/subdir/emptydir']);
|
|
|
|
const execFiles = JSON.parse(fs.readFileSync(path.join(tmpdir, 'fsmetadata.json'), 'utf8')).execFiles;
|
|
expect(execFiles).to.eql(['./dir2/file']);
|
|
});
|
|
|
|
it('restores special files', async function () {
|
|
fs.rmSync(path.join(tmpdir, 'data'), { recursive: true, force: true });
|
|
|
|
expect(fs.existsSync(path.join(tmpdir, 'data/subdir/emptydir'))).to.be(false); // just make sure rimraf worked
|
|
|
|
const dataLayout = new DataLayout(tmpdir, []);
|
|
|
|
await rsync._restoreFsMetadata(dataLayout, `${dataLayout.localRoot()}/fsmetadata.json`);
|
|
|
|
expect(fs.existsSync(path.join(tmpdir, 'data/subdir/emptydir'))).to.be(true);
|
|
const mode = fs.statSync(path.join(tmpdir, 'dir2/file')).mode;
|
|
expect(mode & ~fs.constants.S_IFREG).to.be(parseInt('0755', 8));
|
|
});
|
|
});
|
|
});
|