Migrate codebase from CommonJS to ES Modules
- Convert all require()/module.exports to import/export across 260+ files - Add "type": "module" to package.json to enable ESM by default - Add migrations/package.json with "type": "commonjs" to keep db-migrate compatible - Convert eslint.config.js to ESM with sourceType: "module" - Replace __dirname/__filename with import.meta.dirname/import.meta.filename - Replace require.main === module with process.argv[1] === import.meta.filename - Remove 'use strict' directives (implicit in ESM) - Convert dynamic require() in switch statements to static import lookup maps (dns.js, domains.js, backupformats.js, backupsites.js, network.js) - Extract self-referencing exports.CONSTANT patterns into standalone const declarations (apps.js, services.js, locks.js, users.js, mail.js, etc.) - Lazify SERVICES object in services.js to avoid circular dependency TDZ issues - Add clearMailQueue() to mailer.js for ESM-safe queue clearing in tests - Add _setMockApp() to ldapserver.js for ESM-safe test mocking - Add _setMockResolve() wrapper to dig.js for ESM-safe DNS mocking in tests - Convert backupupload.js to use dynamic imports so --check exits before loading the module graph (which requires BOX_ENV) - Update check-install to use ESM import for infra_version.js - Convert scripts/ (hotfix, release, remote_hotfix.js, find-unused-translations) - All 1315 tests passing Migration stats (AI-assisted using Cursor with Claude): - Wall clock time: ~3-4 hours - Assistant completions: ~80-100 - Estimated token usage: ~1-2M tokens Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
@@ -1,26 +1,25 @@
|
||||
/* global it:false */
|
||||
|
||||
import * as backupSites from '../backupsites.js';
|
||||
import BoxError from '../boxerror.js';
|
||||
import * as common from './common.js';
|
||||
import consumers from 'node:stream/consumers';
|
||||
import { execSync } from 'node:child_process';
|
||||
import expect from 'expect.js';
|
||||
import * as filesystem from '../storage/filesystem.js';
|
||||
import fs from 'node:fs';
|
||||
import * as gcs from '../storage/gcs.js';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import * as s3 from '../storage/s3.js';
|
||||
import safe from 'safetydance';
|
||||
import stream from 'stream/promises';
|
||||
|
||||
/* global describe:false */
|
||||
/* global before:false */
|
||||
/* global after:false */
|
||||
/* global xit:false */
|
||||
|
||||
'use strict';
|
||||
|
||||
const backupSites = require('../backupsites.js'),
|
||||
BoxError = require('../boxerror.js'),
|
||||
common = require('./common.js'),
|
||||
consumers = require('node:stream/consumers'),
|
||||
execSync = require('node:child_process').execSync,
|
||||
expect = require('expect.js'),
|
||||
filesystem = require('../storage/filesystem.js'),
|
||||
fs = require('node:fs'),
|
||||
gcs = require('../storage/gcs.js'),
|
||||
os = require('node:os'),
|
||||
path = require('node:path'),
|
||||
s3 = require('../storage/s3.js'),
|
||||
safe = require('safetydance'),
|
||||
stream = require('stream/promises');
|
||||
|
||||
describe('Storage', function () {
|
||||
const { setup, cleanup, getDefaultBackupSite, auditSource } = common;
|
||||
|
||||
@@ -62,7 +61,7 @@ describe('Storage', function () {
|
||||
});
|
||||
|
||||
it('can upload', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt');
|
||||
const uploader = await filesystem.upload(gBackupConfig, {}, 'uploadtest/test.txt');
|
||||
@@ -73,7 +72,7 @@ describe('Storage', function () {
|
||||
});
|
||||
|
||||
xit('upload waits for empty file to be created', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/empty');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/empty');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/empty');
|
||||
const uploader = await filesystem.upload(gBackupConfig, {}, destFile);
|
||||
@@ -84,7 +83,7 @@ describe('Storage', function () {
|
||||
});
|
||||
|
||||
it('upload unlinks old file', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt');
|
||||
const oldStat = fs.statSync(destFile);
|
||||
@@ -111,7 +110,7 @@ describe('Storage', function () {
|
||||
});
|
||||
|
||||
it('list dir lists the source dir', async function () {
|
||||
const sourceDir = path.join(__dirname, 'storage');
|
||||
const sourceDir = path.join(import.meta.dirname, 'storage');
|
||||
execSync(`cp -r ${sourceDir} ${gBackupConfig.backupDir}/${gBackupConfig.prefix}`, { encoding: 'utf8' });
|
||||
|
||||
let allFiles = [], marker = null;
|
||||
@@ -242,7 +241,7 @@ describe('Storage', function () {
|
||||
});
|
||||
|
||||
it('can upload', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destKey = 'uploadtest/test.txt';
|
||||
const uploader = await s3.upload(backupConfig, {}, destKey);
|
||||
@@ -276,7 +275,7 @@ describe('Storage', function () {
|
||||
fs.writeFileSync(path.join(bucketPath, 'uploadtest/C++.gitignore'), 'special', 'utf8');
|
||||
|
||||
await s3.copyDir(backupConfig, {}, 'uploadtest', 'uploadtest-copy', () => {});
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/test.txt');
|
||||
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size);
|
||||
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/C++.gitignore')).size).to.be(7);
|
||||
});
|
||||
@@ -379,7 +378,7 @@ describe('Storage', function () {
|
||||
});
|
||||
|
||||
it('can upload', async function () {
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/test.txt');
|
||||
const sourceStream = fs.createReadStream(sourceFile);
|
||||
const destKey = 'uploadtest/test.txt';
|
||||
const uploader = await gcs.upload(backupConfig, {}, destKey);
|
||||
@@ -413,7 +412,7 @@ describe('Storage', function () {
|
||||
fs.writeFileSync(path.join(bucketPath, 'uploadtest/C++.gitignore'), 'special', 'utf8');
|
||||
|
||||
await gcs.copyDir(backupConfig, {}, 'uploadtest', 'uploadtest-copy', () => {});
|
||||
const sourceFile = path.join(__dirname, 'storage/data/test.txt');
|
||||
const sourceFile = path.join(import.meta.dirname, 'storage/data/test.txt');
|
||||
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/test.txt')).size).to.be(fs.statSync(sourceFile).size);
|
||||
expect(fs.statSync(path.join(bucketPath, 'uploadtest-copy/C++.gitignore')).size).to.be(7);
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user