eslint: add no-shadow

This commit is contained in:
Girish Ramakrishnan
2026-02-18 08:18:37 +01:00
parent 4d3e9dc49b
commit 4ed6fbbd74
40 changed files with 250 additions and 249 deletions

View File

@@ -15,7 +15,8 @@ export default [
rules: {
semi: "error",
"prefer-const": "error",
"no-use-before-define": "error"
"no-use-before-define": "error",
"no-shadow": "error"
}
}
];

View File

@@ -413,7 +413,7 @@ function postProcess(result) {
delete result.servicesConfigJson;
const subdomains = JSON.parse(result.subdomains),
domains = JSON.parse(result.domains),
parsedDomains = JSON.parse(result.domains),
subdomainTypes = JSON.parse(result.subdomainTypes),
subdomainEnvironmentVariables = JSON.parse(result.subdomainEnvironmentVariables),
subdomainCertificateJsons = JSON.parse(result.subdomainCertificateJsons);
@@ -428,7 +428,7 @@ function postProcess(result) {
result.redirectDomains = [];
result.aliasDomains = [];
for (let i = 0; i < subdomainTypes.length; i++) {
const subdomain = subdomains[i], domain = domains[i], certificate = safe.JSON.parse(subdomainCertificateJsons[i]);
const subdomain = subdomains[i], domain = parsedDomains[i], certificate = safe.JSON.parse(subdomainCertificateJsons[i]);
if (subdomainTypes[i] === Location.TYPE_PRIMARY) {
result.subdomain = subdomain;
@@ -504,14 +504,14 @@ function accessLevel(app, user) {
return canAccess(app, user) ? ACCESS_LEVEL_USER : ACCESS_LEVEL_NONE;
}
function pickFields(app, accessLevel) {
function pickFields(app, level) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof accessLevel, 'string');
assert.strictEqual(typeof level, 'string');
if (accessLevel === ACCESS_LEVEL_NONE) return null; // cannot happen!
if (level === ACCESS_LEVEL_NONE) return null; // cannot happen!
let result;
if (accessLevel === ACCESS_LEVEL_USER) {
if (level === ACCESS_LEVEL_USER) {
result = _.pick(app, [
'id', 'appStoreId', 'versionsUrl', 'installationState', 'error', 'runState', 'health', 'taskId', 'accessRestriction',
'secondaryDomains', 'redirectDomains', 'aliasDomains', 'sso', 'subdomain', 'domain', 'fqdn', 'certificate',
@@ -632,10 +632,10 @@ async function add(id, appStoreId, versionsUrl, manifest, subdomain, domain, por
args: [ id, domain, subdomain, Location.TYPE_PRIMARY ]
});
Object.keys(portBindings).forEach(function (env) {
Object.keys(portBindings).forEach(function (portEnv) {
queries.push({
query: 'INSERT INTO appPortBindings (environmentVariable, hostPort, type, appId, count) VALUES (?, ?, ?, ?, ?)',
args: [ env, portBindings[env].hostPort, portBindings[env].type, id, portBindings[env].count ]
args: [ portEnv, portBindings[portEnv].hostPort, portBindings[portEnv].type, id, portBindings[portEnv].count ]
});
});
@@ -1531,7 +1531,7 @@ async function downloadFile(app, filePath) {
for (;;) {
if (this._buffer.length < 8) break; // header is 8 bytes
const type = this._buffer.readUInt8(0);
const streamType = this._buffer.readUInt8(0);
const len = this._buffer.readUInt32BE(4);
if (this._buffer.length < (8 + len)) break; // not enough
@@ -1540,7 +1540,7 @@ async function downloadFile(app, filePath) {
this._buffer = this._buffer.slice(8+len); // consumed
if (type === 1) this.push(payload);
if (streamType === 1) this.push(payload);
}
callback();
@@ -2444,12 +2444,12 @@ async function restore(app, backupId, auditSource) {
if (error) throw error;
// for empty or null backupId, use existing manifest to mimic a reinstall
const backup = backupId ? await backups.get(backupId) : { manifest: app.manifest };
if (!backup) throw new BoxError(BoxError.BAD_FIELD, 'No such backup');
const manifest = backup.manifest;
const restoreBackup = backupId ? await backups.get(backupId) : { manifest: app.manifest };
if (!restoreBackup) throw new BoxError(BoxError.BAD_FIELD, 'No such backup');
const manifest = restoreBackup.manifest;
if (!manifest) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Could not get restore manifest');
if (backup.encryptionVersion === 1) throw new BoxError(BoxError.BAD_FIELD, 'This encrypted backup was created with an older Cloudron version and has to be restored using the CLI tool');
if (restoreBackup.encryptionVersion === 1) throw new BoxError(BoxError.BAD_FIELD, 'This encrypted backup was created with an older Cloudron version and has to be restored using the CLI tool');
// re-validate because this new box version may not accept old configs
error = await checkManifest(manifest);
@@ -2468,7 +2468,7 @@ async function restore(app, backupId, auditSource) {
values.inboxName = values.inboxDomain = null;
}
const restoreConfig = { backupId: backup.id };
const restoreConfig = { backupId: restoreBackup.id };
const task = {
args: {
@@ -2482,7 +2482,7 @@ async function restore(app, backupId, auditSource) {
const taskId = await addTask(appId, ISTATE_PENDING_RESTORE, task, auditSource);
await eventlog.add(eventlog.ACTION_APP_RESTORE, auditSource, { app, backupId: backup.id, remotePath: backup.remotePath, fromManifest: app.manifest, toManifest: manifest, taskId });
await eventlog.add(eventlog.ACTION_APP_RESTORE, auditSource, { app, backupId: restoreBackup.id, remotePath: restoreBackup.remotePath, fromManifest: app.manifest, toManifest: manifest, taskId });
return { taskId };
}
@@ -2573,13 +2573,13 @@ async function clone(app, data, user, auditSource) {
assert.strictEqual(typeof subdomain, 'string');
assert.strictEqual(typeof domain, 'string');
const backup = await backups.get(backupId);
const cloneBackup = await backups.get(backupId);
if (!backup) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
if (!backup.manifest) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Could not detect restore manifest');
if (backup.encryptionVersion === 1) throw new BoxError(BoxError.BAD_FIELD, 'This encrypted backup was created with an older Cloudron version and cannot be cloned');
if (!cloneBackup) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
if (!cloneBackup.manifest) throw new BoxError(BoxError.EXTERNAL_ERROR, 'Could not detect restore manifest');
if (cloneBackup.encryptionVersion === 1) throw new BoxError(BoxError.BAD_FIELD, 'This encrypted backup was created with an older Cloudron version and cannot be cloned');
const manifest = backup.manifest, appStoreId = app.appStoreId, versionsUrl = app.versionsUrl;
const manifest = cloneBackup.manifest, appStoreId = app.appStoreId, versionsUrl = app.versionsUrl;
let error = validateSecondaryDomains(data.secondaryDomains || {}, manifest);
if (error) throw error;
@@ -2606,7 +2606,7 @@ async function clone(app, data, user, auditSource) {
const newAppId = crypto.randomUUID();
// label, checklist intentionally omitted . icon is loaded in apptask from the backup
const dolly = _.pick(backup.appConfig || app, ['memoryLimit', 'cpuQuota', 'crontab', 'reverseProxyConfig', 'env', 'servicesConfig', 'tags', 'devices',
const dolly = _.pick(cloneBackup.appConfig || app, ['memoryLimit', 'cpuQuota', 'crontab', 'reverseProxyConfig', 'env', 'servicesConfig', 'tags', 'devices',
'enableMailbox', 'mailboxDisplayName', 'mailboxName', 'mailboxDomain', 'enableInbox', 'inboxName', 'inboxDomain', 'debugMode',
'enableTurn', 'enableRedis', 'mounts', 'enableBackup', 'enableAutomaticUpdate', 'accessRestriction', 'operators', 'sso',
'notes', 'checklist']);
@@ -2628,7 +2628,7 @@ async function clone(app, data, user, auditSource) {
if (addError && addError.reason === BoxError.ALREADY_EXISTS) throw getDuplicateErrorDetails(addError.message, locations, portBindings);
if (addError) throw addError;
const restoreConfig = { backupId: backup.id };
const restoreConfig = { backupId: cloneBackup.id };
const task = {
args: { restoreConfig, overwriteDns, skipDnsSetup, oldManifest: null },
values: {},
@@ -2642,24 +2642,24 @@ async function clone(app, data, user, auditSource) {
newApp.redirectDomains.forEach(function (ad) { ad.fqdn = dns.fqdn(ad.subdomain, ad.domain); });
newApp.aliasDomains.forEach(function (ad) { ad.fqdn = dns.fqdn(ad.subdomain, ad.domain); });
await eventlog.add(eventlog.ACTION_APP_CLONE, auditSource, { appId: newAppId, oldAppId: app.id, backupId, remotePath: backup.remotePath, oldApp: app, newApp, taskId });
await eventlog.add(eventlog.ACTION_APP_CLONE, auditSource, { appId: newAppId, oldAppId: app.id, backupId, remotePath: cloneBackup.remotePath, oldApp: app, newApp, taskId });
return { id: newAppId, taskId };
}
async function unarchive(archive, data, auditSource) {
assert.strictEqual(typeof archive, 'object');
async function unarchive(archiveEntry, data, auditSource) {
assert.strictEqual(typeof archiveEntry, 'object');
assert.strictEqual(typeof data, 'object');
assert(auditSource && typeof auditSource === 'object');
const backup = await backups.get(archive.backupId);
const restoreConfig = { backupId: backup.id };
const archiveBackup = await backups.get(archiveEntry.backupId);
const restoreConfig = { backupId: archiveBackup.id };
const subdomain = data.subdomain.toLowerCase(),
domain = data.domain.toLowerCase(),
overwriteDns = 'overwriteDns' in data ? data.overwriteDns : false;
const manifest = backup.manifest, appStoreId = backup.manifest.id, versionsUrl = backup.appConfig?.versionsUrl || '';
const manifest = archiveBackup.manifest, appStoreId = archiveBackup.manifest.id, versionsUrl = archiveBackup.appConfig?.versionsUrl || '';
let error = validateSecondaryDomains(data.secondaryDomains || {}, manifest);
if (error) throw error;
@@ -2682,7 +2682,7 @@ async function unarchive(archive, data, auditSource) {
const appId = crypto.randomUUID();
// appConfig is null for pre-8.2 backups
const dolly = _.pick(backup.appConfig || {}, ['memoryLimit', 'cpuQuota', 'crontab', 'reverseProxyConfig', 'env', 'servicesConfig',
const dolly = _.pick(archiveBackup.appConfig || {}, ['memoryLimit', 'cpuQuota', 'crontab', 'reverseProxyConfig', 'env', 'servicesConfig',
'tags', 'label', 'enableMailbox', 'mailboxDisplayName', 'mailboxName', 'mailboxDomain', 'enableInbox', 'inboxName', 'inboxDomain', 'devices',
'enableTurn', 'enableRedis', 'mounts', 'enableBackup', 'enableAutomaticUpdate', 'accessRestriction', 'operators', 'sso',
'notes', 'checklist']);
@@ -2695,9 +2695,9 @@ async function unarchive(archive, data, auditSource) {
mailboxDomain: data.domain, // archive's mailboxDomain may not exist
runState: RSTATE_RUNNING,
installationState: ISTATE_PENDING_INSTALL,
sso: backup.appConfig ? backup.appConfig.sso : true // when no appConfig take a blind guess
sso: archiveBackup.appConfig ? archiveBackup.appConfig.sso : true // when no appConfig take a blind guess
});
obj.icon = (await archives.getIcons(archive.id))?.icon;
obj.icon = (await archives.getIcons(archiveEntry.id))?.icon;
const [addError] = await safe(add(appId, appStoreId, versionsUrl, manifest, subdomain, domain, portBindings, obj));
if (addError && addError.reason === BoxError.ALREADY_EXISTS) throw getDuplicateErrorDetails(addError.message, locations, portBindings);
@@ -2831,8 +2831,8 @@ async function backup(app, backupSiteId, auditSource) {
// background
tasks.startTask(taskId, { timeout: 24 * 60 * 60 * 1000 /* 24 hours */, nice: 15, memoryLimit, oomScoreAdjust: -999 })
.then(async (backupId) => {
const backup = await backups.get(backupId); // if task crashed, no result
await eventlog.add(eventlog.ACTION_APP_BACKUP_FINISH, auditSource, { app, success: !!backup, errorMessage: '', remotePath: backup?.remotePath, backupId: backupId });
const completedBackup = await backups.get(backupId); // if task crashed, no result
await eventlog.add(eventlog.ACTION_APP_BACKUP_FINISH, auditSource, { app, success: !!completedBackup, errorMessage: '', remotePath: completedBackup?.remotePath, backupId: backupId });
})
.catch(async (error) => {
await eventlog.add(eventlog.ACTION_APP_BACKUP_FINISH, auditSource, { app, success: false, errorMessage: error.message });
@@ -2851,28 +2851,28 @@ async function updateBackup(app, backupId, data) {
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof data, 'object');
const backup = await backups.get(backupId);
if (!backup) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
if (backup.identifier !== app.id) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); // some other app's backup
const appBackup = await backups.get(backupId);
if (!appBackup) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
if (appBackup.identifier !== app.id) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); // some other app's backup
await backups.update(backup, data);
await backups.update(appBackup, data);
}
async function getBackupDownloadStream(app, backupId) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof backupId, 'string');
const backup = await backups.get(backupId);
if (!backup) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
if (backup.identifier !== app.id) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); // some other app's backup
const downloadBackup = await backups.get(backupId);
if (!downloadBackup) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found');
if (downloadBackup.identifier !== app.id) throw new BoxError(BoxError.NOT_FOUND, 'Backup not found'); // some other app's backup
const backupSite = await backupSites.get(backup.siteId);
const backupSite = await backupSites.get(downloadBackup.siteId);
if (!backupSite) throw new BoxError(BoxError.NOT_FOUND, 'Backup site not found'); // not possible
if (backupSite.format !== 'tgz') throw new BoxError(BoxError.BAD_STATE, 'only tgz backups can be downloaded');
const ps = new PassThrough();
const stream = await backupSites.storageApi(backupSite).download(backupSite.config, backup.remotePath);
const stream = await backupSites.storageApi(backupSite).download(backupSite.config, downloadBackup.remotePath);
stream.on('error', function(error) {
debug(`getBackupDownloadStream: read stream error: ${error.message}`);
ps.emit('error', new BoxError(BoxError.EXTERNAL_ERROR, error));
@@ -2880,7 +2880,7 @@ async function getBackupDownloadStream(app, backupId) {
stream.pipe(ps);
const now = (new Date()).toISOString().replace(/:|T/g,'-').replace(/\..*/,'');
const encryptionSuffix = backup.encryptionVersion ? '.enc' : '';
const encryptionSuffix = downloadBackup.encryptionVersion ? '.enc' : '';
const filename = `app-backup-${now} (${app.fqdn}).tar.gz${encryptionSuffix}`;
return { stream: ps, filename };

View File

@@ -102,8 +102,8 @@ async function deleteAppDir(app, options) {
// besides, we cannot delete those dirs anyway because of perms
for (const entry of entries) {
const fullPath = path.join(resolvedAppDataDir, entry);
const stat = safe.fs.statSync(fullPath);
if (stat && !stat.isDirectory()) {
const entryStat = safe.fs.statSync(fullPath);
if (entryStat && !entryStat.isDirectory()) {
safe.fs.unlinkSync(fullPath);
debug(`deleteAppDir - ${fullPath} ${safe.error?.message || ''}`);
}

View File

@@ -342,9 +342,9 @@ async function startBackupTask(site, auditSource) {
.then(async (result) => { // this can be the an array or string depending on site.contents
await eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, result, siteId: site.id, siteName: site.name });
})
.catch(async (error) => {
const timedOut = error.code === tasks.ETIMEOUT;
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage: error.message, timedOut, siteId: site.id, siteName: site.name }));
.catch(async (backupError) => {
const timedOut = backupError.code === tasks.ETIMEOUT;
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage: backupError.message, timedOut, siteId: site.id, siteName: site.name }));
})
.finally(async () => {
await locks.release(`${locks.TYPE_FULL_BACKUP_TASK_PREFIX}${site.id}`);

View File

@@ -554,11 +554,11 @@ async function fullBackup(backupSiteId, options, progressCallback) {
progressCallback({ percent, message: `Backing up ${app.fqdn} (${i+1}/${allApps.length}). Waiting for lock` });
await locks.wait(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`);
const startTime = new Date();
const [appBackupError, appBackup] = await safe(backupAppWithTag(app, backupSite, tag, options, (progress) => progressCallback({ percent, message: progress.message })));
const [appBackupError, appBackupResult] = await safe(backupAppWithTag(app, backupSite, tag, options, (progress) => progressCallback({ percent, message: progress.message })));
debug(`fullBackup: app ${app.fqdn} backup finished. Took ${(new Date() - startTime)/1000} seconds`);
await locks.release(`${locks.TYPE_APP_BACKUP_PREFIX}${app.id}`);
if (appBackupError) throw appBackupError;
if (appBackup) appBackupsMap.set(appBackup.id, appBackup.stats); // backupId can be null if in BAD_STATE and never backed up
if (appBackupResult) appBackupsMap.set(appBackupResult.id, appBackupResult.stats); // backupId can be null if in BAD_STATE and never backed up
}
if (!backupSites.hasContent(backupSite, 'box')) return [...appBackupsMap.keys()];

View File

@@ -45,17 +45,17 @@ async function transaction(queries) {
try {
await connection.beginTransaction();
const results = [];
for (const query of queries) {
const [rows /*, fields */] = await connection.query(query.query, query.args);
for (const queryItem of queries) {
const [rows /*, fields */] = await connection.query(queryItem.query, queryItem.args);
results.push(rows);
}
await connection.commit();
connection.release(); // no await!
return results;
} catch (error) {
} catch (txError) {
await safe(connection.rollback(), { debug });
connection.release(); // no await!
throw new BoxError(BoxError.DATABASE_ERROR, error, { sqlCode: error.code, sqlMessage: error.sqlMessage || null });
throw new BoxError(BoxError.DATABASE_ERROR, txError, { sqlCode: txError.code, sqlMessage: txError.sqlMessage || null });
}
}
@@ -118,20 +118,20 @@ async function runInTransaction(callback) {
try {
await connection.beginTransaction();
const query = async (...args) => {
const [error, result] = await safe(connection.query(...args)); // this is same as getConnection/query/release
if (error) throw new BoxError(BoxError.DATABASE_ERROR, error, { sqlCode: error.code, sqlMessage: error.sqlMessage || null });
const queryFn = async (...args) => {
const [queryError, result] = await safe(connection.query(...args)); // this is same as getConnection/query/release
if (queryError) throw new BoxError(BoxError.DATABASE_ERROR, queryError, { sqlCode: queryError.code, sqlMessage: queryError.sqlMessage || null });
return result[0]; // the promise version returns a tuple of [rows, fields]
};
const result = await callback(query);
const result = await callback(queryFn);
await connection.commit();
connection.release(); // no await!
return result;
} catch (error) {
} catch (txError) {
await safe(connection.rollback(), { debug });
connection.release(); // no await!
throw new BoxError(BoxError.DATABASE_ERROR, error, { sqlCode: error.code, sqlMessage: error.sqlMessage || null });
throw new BoxError(BoxError.DATABASE_ERROR, txError, { sqlCode: txError.code, sqlMessage: txError.sqlMessage || null });
}
}

View File

@@ -145,12 +145,12 @@ function finalSend(results, req, res, next) {
}
});
function sendPagedResults(start, end) {
start = (start < min) ? min : start;
function sendPagedResults(pageStart, end) {
pageStart = (pageStart < min) ? min : pageStart;
end = (end > max || end < min) ? max : end;
let i;
for (i = start; i < end; i++) {
for (i = pageStart; i < end; i++) {
res.send(results[i]);
}

View File

@@ -199,11 +199,11 @@ async function waitForLocations(locations, progressCallback) {
}
}
function makeWildcard(fqdn) {
assert.strictEqual(typeof fqdn, 'string');
function makeWildcard(recordFqdn) {
assert.strictEqual(typeof recordFqdn, 'string');
// if the fqdn is like *.example.com, this function will do nothing
const parts = fqdn.split('.');
const parts = recordFqdn.split('.');
parts[0] = '*';
return parts.join('.');
}
@@ -262,15 +262,15 @@ async function registerLocations(locations, options, progressCallback) {
if (ipv4) {
await registerLocation(location, options, 'A', ipv4);
} else {
const [error, values] = await safe(getDnsRecords(location.subdomain, location.domain, 'A'));
if (!error && values.length) await safe(removeDnsRecords(location.subdomain, location.domain, 'A', values), { debug });
const [aError, aValues] = await safe(getDnsRecords(location.subdomain, location.domain, 'A'));
if (!aError && aValues.length) await safe(removeDnsRecords(location.subdomain, location.domain, 'A', aValues), { debug });
}
if (ipv6) {
await registerLocation(location, options, 'AAAA', ipv6);
} else {
const [error, values] = await safe(getDnsRecords(location.subdomain, location.domain, 'AAAA'));
if (!error && values.length) await safe(removeDnsRecords(location.subdomain, location.domain, 'AAAA', values), { debug });
const [aaaaError, aaaaValues] = await safe(getDnsRecords(location.subdomain, location.domain, 'AAAA'));
if (!aaaaError && aaaaValues.length) await safe(removeDnsRecords(location.subdomain, location.domain, 'AAAA', aaaaValues), { debug });
}
});
}

View File

@@ -49,7 +49,7 @@ async function getZoneId(domainConfig, zoneName) {
if (response.status !== 200) throw new BoxError(BoxError.EXTERNAL_ERROR, formatError(response));
if (!Array.isArray(response.body.Items)) throw new BoxError(BoxError.EXTERNAL_ERROR, `Invalid records in response: ${response.text}`);
const item = response.body.Items.filter(item => item.Domain === zoneName);
const item = response.body.Items.filter(entry => entry.Domain === zoneName);
if (item.length === 0) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
return item[0].Id;

View File

@@ -59,7 +59,7 @@ async function getZone(domainConfig, zoneName) {
if (!Array.isArray(response.body.data)) throw new BoxError(BoxError.EXTERNAL_ERROR, `Invalid data in response: ${JSON.stringify(response.body)}`);
const item = response.body.data.filter(item => item.name === zoneName);
const item = response.body.data.filter(entry => entry.name === zoneName);
if (item.length === 0) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
return { accountId, zoneId: item[0].id };
}

View File

@@ -47,8 +47,8 @@ async function getZoneByName(domainConfig, zoneName) {
if (error && error.code === 404) throw new BoxError(BoxError.NOT_FOUND, error.message);
if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, error);
const zone = result[0].filter(function (zone) {
return zone.metadata.dnsName.slice(0, -1) === zoneName; // the zone name contains a '.' at the end
const zone = result[0].filter(function (entry) {
return entry.metadata.dnsName.slice(0, -1) === zoneName; // the zone name contains a '.' at the end
})[0];
if (!zone) throw new BoxError(BoxError.NOT_FOUND, 'no such zone');

View File

@@ -59,8 +59,8 @@ async function getZoneByName(domainConfig, zoneName) {
if (error && error.code === 'InvalidClientTokenId') throw new BoxError(BoxError.ACCESS_DENIED, error.message);
if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, error.message);
const zone = result.HostedZones.filter(function (zone) {
return zone.Name.slice(0, -1) === zoneName; // aws zone name contains a '.' at the end
const zone = result.HostedZones.filter(function (entry) {
return entry.Name.slice(0, -1) === zoneName; // aws zone name contains a '.' at the end
})[0];
if (!zone) throw new BoxError(BoxError.NOT_FOUND, 'no such zone');

View File

@@ -124,9 +124,9 @@ async function pullImage(imageRef) {
reject(new BoxError(layerError.message.includes('no space') ? BoxError.FS_ERROR : BoxError.DOCKER_ERROR, layerError.message));
});
stream.on('error', function (error) { // this is only hit for stream error and not for some download error
debug(`error pulling image ${imageRef}: %o`, error);
reject(new BoxError(BoxError.DOCKER_ERROR, error.message));
stream.on('error', function (streamError) { // this is only hit for stream error and not for some download error
debug(`error pulling image ${imageRef}: %o`, streamError);
reject(new BoxError(BoxError.DOCKER_ERROR, streamError.message));
});
});
}
@@ -166,9 +166,9 @@ async function buildImage(dockerImage, sourceArchiveFilePath) {
resolve();
});
stream.on('error', (error) => {
debug(`buildImage: error building image ${dockerImage}: %o`, error);
reject(new BoxError(BoxError.DOCKER_ERROR, error.message));
stream.on('error', (streamError) => {
debug(`buildImage: error building image ${dockerImage}: %o`, streamError);
reject(new BoxError(BoxError.DOCKER_ERROR, streamError.message));
});
});
}
@@ -255,8 +255,8 @@ async function getAddressesForPort53() {
const addresses = [];
for (const phy of physicalDevices) {
const [error, output] = await safe(shell.spawn('ip', ['-f', 'inet', '-j', 'addr', 'show', 'dev', phy.name, 'scope', 'global'], { encoding: 'utf8' }));
if (error) continue;
const [ipError, output] = await safe(shell.spawn('ip', ['-f', 'inet', '-j', 'addr', 'show', 'dev', phy.name, 'scope', 'global'], { encoding: 'utf8' }));
if (ipError) continue;
const inet = safe.JSON.parse(output) || [];
for (const r of inet) {
const address = safe.query(r, 'addr_info[0].local');
@@ -584,7 +584,7 @@ async function createSubcontainer(app, name, cmd, options) {
}
const appEnv = [];
Object.keys(app.env).forEach(function (name) { appEnv.push(`${name}=${app.env[name]}`); });
Object.keys(app.env).forEach(function (envName) { appEnv.push(`${envName}=${app.env[envName]}`); });
let memoryLimit = apps.getMemoryLimit(app);

View File

@@ -110,10 +110,10 @@ async function clientSearch(client, dn, searchOptions) {
const ldapObjects = [];
result.on('searchEntry', entry => ldapObjects.push(entry.object));
result.on('error', error => reject(new BoxError(BoxError.EXTERNAL_ERROR, `search error: ${error.message}`)));
result.on('error', searchError => reject(new BoxError(BoxError.EXTERNAL_ERROR, `search error: ${searchError.message}`)));
result.on('end', function (result) {
if (result.status !== 0) return reject(new BoxError(BoxError.EXTERNAL_ERROR, 'Server returned status ' + result.status));
result.on('end', function (searchResult) {
if (searchResult.status !== 0) return reject(new BoxError(BoxError.EXTERNAL_ERROR, 'Server returned status ' + searchResult.status));
resolve(ldapObjects);
});
@@ -481,7 +481,7 @@ async function syncGroupMembers(config, progressCallback) {
const userIds = [];
for (const memberDn of ldapGroupMembers) {
const [ldapError, result] = await safe(ldapGetByDN(config, memberDn));
const [ldapError, memberResult] = await safe(ldapGetByDN(config, memberDn));
if (ldapError) {
debug(`syncGroupMembers: Group ${ldapGroup.name} failed to get ${memberDn}: %o`, ldapError);
continue;
@@ -489,7 +489,7 @@ async function syncGroupMembers(config, progressCallback) {
debug(`syncGroupMembers: Group ${ldapGroup.name} has member object ${memberDn}`);
const username = result[config.usernameField]?.toLowerCase();
const username = memberResult[config.usernameField]?.toLowerCase();
if (!username) continue;
const [getError, userObject] = await safe(users.getByUsername(username));

View File

@@ -35,7 +35,7 @@ async function cleanupTmpVolume(containerInfo) {
gConnection.modem.demuxStream(stream, process.stdout, process.stderr);
return new Promise((resolve, reject) => {
stream.on('error', (error) => reject(new BoxError(BoxError.DOCKER_ERROR, `Failed to cleanup in exec container: ${error.message}`)));
stream.on('error', (streamError) => reject(new BoxError(BoxError.DOCKER_ERROR, `Failed to cleanup in exec container: ${streamError.message}`)));
stream.on('end', resolve);
});
}

View File

@@ -96,12 +96,12 @@ function finalSend(results, req, res, next) {
}
});
function sendPagedResults(start, end) {
start = (start < min) ? min : start;
function sendPagedResults(startIdx, end) {
startIdx = (startIdx < min) ? min : startIdx;
end = (end > max || end < min) ? max : end;
let i;
for (i = start; i < end; i++) {
for (i = startIdx; i < end; i++) {
res.send(results[i]);
}
@@ -312,14 +312,14 @@ async function mailboxSearch(req, res, next) {
for (const mailbox of mailboxes) {
if (!mailbox.active) continue;
const dn = ldap.parseDN(`cn=${mailbox.name}@${mailbox.domain},ou=mailboxes,dc=cloudron`);
const mailboxDn = ldap.parseDN(`cn=${mailbox.name}@${mailbox.domain},ou=mailboxes,dc=cloudron`);
if (mailbox.ownerType === mail.OWNERTYPE_APP) continue; // cannot login with app mailbox anyway
const [error, ownerObject] = await safe(mailbox.ownerType === mail.OWNERTYPE_USER ? users.get(mailbox.ownerId) : groups.get(mailbox.ownerId));
if (error || !ownerObject) continue; // skip mailboxes with unknown user
const [ownerError, ownerObject] = await safe(mailbox.ownerType === mail.OWNERTYPE_USER ? users.get(mailbox.ownerId) : groups.get(mailbox.ownerId));
if (ownerError || !ownerObject) continue; // skip mailboxes with unknown user
const obj = {
dn: dn.toString(),
dn: mailboxDn.toString(),
attributes: {
objectclass: ['mailbox'],
objectcategory: 'mailbox',

View File

@@ -520,8 +520,8 @@ async function checkRbl(type, mailDomain) {
for (const rblServer of RBL_LIST) {
if (type === 'ipv6' && rblServer[type] !== true) continue; // all support ipv4
const [error, records] = await safe(dig.resolve(`${flippedIp}.${rblServer.dns}`, 'A', DNS_OPTIONS));
if (error || records.length === 0) continue; // not listed
const [rblError, records] = await safe(dig.resolve(`${flippedIp}.${rblServer.dns}`, 'A', DNS_OPTIONS));
if (rblError || records.length === 0) continue; // not listed
debug(`checkRbl (${domain}) flippedIp: ${flippedIp} is in the blocklist of ${rblServer.dns}: ${JSON.stringify(records)}`);
@@ -753,8 +753,8 @@ async function setMailRelay(domain, relay, options) {
}
if (!options.skipVerify) {
const result = await checkSmtpRelay(relay);
if (result.status === 'failed') throw new BoxError(BoxError.BAD_FIELD, result.message);
const relayResult = await checkSmtpRelay(relay);
if (relayResult.status === 'failed') throw new BoxError(BoxError.BAD_FIELD, relayResult.message);
}
await updateDomain(domain, { relay });
@@ -993,16 +993,16 @@ async function setAliases(name, domain, aliases, auditSource) {
assert.strictEqual(typeof auditSource, 'object');
for (let i = 0; i < aliases.length; i++) {
const name = aliases[i].name.toLowerCase();
const domain = aliases[i].domain.toLowerCase();
const aliasName = aliases[i].name.toLowerCase();
const aliasDomain = aliases[i].domain.toLowerCase();
const error = validateAlias(name);
const error = validateAlias(aliasName);
if (error) throw error;
const mailDomain = await getDomain(domain);
if (!mailDomain) throw new BoxError(BoxError.NOT_FOUND, `mail domain ${domain} not found`);
const mailDomain = await getDomain(aliasDomain);
if (!mailDomain) throw new BoxError(BoxError.NOT_FOUND, `mail domain ${aliasDomain} not found`);
aliases[i] = { name, domain };
aliases[i] = { name: aliasName, domain: aliasDomain };
}
const results = await database.query('SELECT ' + MAILBOX_FIELDS + ' FROM mailboxes WHERE name = ? AND domain = ?', [ name, domain ]);

View File

@@ -286,7 +286,7 @@ async function startChangeLocation(subdomain, domain, auditSource) {
.then(async () => {
await platform.onMailServerLocationChanged(auditSource);
})
.catch((error) => debug(`startChangeLocation`, error));
.catch((taskError) => debug(`startChangeLocation`, taskError));
await eventlog.add(eventlog.ACTION_MAIL_LOCATION, auditSource, { subdomain, domain, taskId });
return taskId;

View File

@@ -129,8 +129,8 @@ class StorageAdapter {
if (client.grant_types) tmp.grant_types = client.grant_types;
if (client.appId) {
const [error, app] = await safe(apps.get(client.appId));
if (error || !app) {
const [appError, app] = await safe(apps.get(client.appId));
if (appError || !app) {
debug(`find: Unknown app for client with appId ${client.appId}`);
return null;
}
@@ -695,8 +695,8 @@ async function start() {
const [error, response] = await safe(superagent.get(`http://127.0.0.1:${constants.OIDC_PORT}${ROUTE_PREFIX}/jwks`));
if (error) return res.send(`Internal error: ${error?.message}`);
if (response.status !== 200) return res.send(`Internal error, unexpected status: ${response.status}`);
const jwksKeys = safe.JSON.parse(response.body.toString('utf8'));
const rsaKeys = jwksKeys?.keys?.filter(k => k.kty === 'RSA') || [];
const jwksResponse = safe.JSON.parse(response.body.toString('utf8'));
const rsaKeys = jwksResponse?.keys?.filter(k => k.kty === 'RSA') || [];
res.set('content-type', req.get('content-type')); // application/jwk-set+json; charset=utf-8
res.send({ keys: rsaKeys }); // https://github.com/panva/jose/discussions/654
});

View File

@@ -827,16 +827,16 @@ async function startExecWebSocket(req, res, next) {
res.handleUpgrade(function (ws) {
duplexStream.on('end', function () { ws.close(); });
duplexStream.on('close', function () { ws.close(); });
duplexStream.on('error', function (error) {
debug('duplexStream error: %o', error);
duplexStream.on('error', function (streamError) {
debug('duplexStream error: %o', streamError);
});
duplexStream.on('data', function (data) {
if (ws.readyState !== WebSocket.OPEN) return;
ws.send(data.toString());
});
ws.on('error', function (error) {
debug('websocket error: %o', error);
ws.on('error', function (wsError) {
debug('websocket error: %o', wsError);
});
ws.on('message', function (msg) {
duplexStream.write(msg);

View File

@@ -42,7 +42,7 @@ function proxy(kind) {
// sftpRes.on('end', () => next());
sftpRes.pipe(res);
});
sftpReq.on('error', (error) => next(new HttpError(424, `Unable to connect to filemanager: ${error.message} ${error.code}`)));
sftpReq.on('error', (reqError) => next(new HttpError(424, `Unable to connect to filemanager: ${reqError.message} ${reqError.code}`)));
if (!req.readable) {
sftpReq.end();
} else {

View File

@@ -32,11 +32,11 @@ async function proxyToMailContainer(port, pathname, req, res, next) {
const mailReq = http.request(opts, function (mailRes) {
res.writeHead(mailRes.statusCode, mailRes.headers);
mailRes.on('error', (error) => next(new HttpError(500, `mailserver error: ${error.message} ${error.code}`)));
mailRes.on('error', (resError) => next(new HttpError(500, `mailserver error: ${resError.message} ${resError.code}`)));
mailRes.on('end', () => next());
mailRes.pipe(res);
});
mailReq.on('error', (error) => next(new HttpError(424, `Unable to connect to mailserver: ${error.message} ${error.code}`)));
mailReq.on('error', (reqError) => next(new HttpError(424, `Unable to connect to mailserver: ${reqError.message} ${reqError.code}`)));
if (!req.readable) {
mailReq.end();
} else {

View File

@@ -96,11 +96,11 @@ async function getMetrics(req, res, next) {
const fromSecs = parseInt(req.query.fromSecs, 10);
const intervalSecs = parseInt(req.query.intervalSecs, 10);
const noNullPoints = typeof req.query.noNullPoints === 'string' ? (req.query.noNullPoints === '1' || req.query.noNullPoints === 'true') : false;
const system = req.query.system === 'true';
const includeSystem = req.query.system === 'true';
const appIds = 'appId' in req.query ? (Array.isArray(req.query.appId) ? req.query.appId : [ req.query.appId ]) : [];
const serviceIds = 'serviceId' in req.query ? (Array.isArray(req.query.serviceId) ? req.query.serviceId : [ req.query.serviceId ]) : [];
const [error, result] = await safe(metrics.get({ fromSecs, intervalSecs, noNullPoints, system, appIds, serviceIds }));
const [error, result] = await safe(metrics.get({ fromSecs, intervalSecs, noNullPoints, system: includeSystem, appIds, serviceIds }));
if (error) return next(new HttpError(500, error));
next(new HttpSuccess(200, result));
@@ -109,11 +109,11 @@ async function getMetrics(req, res, next) {
async function getMetricStream(req, res, next) {
if (req.headers.accept !== 'text/event-stream') return next(new HttpError(400, 'This API call requires EventStream'));
const system = req.query.system === 'true';
const includeSystem = req.query.system === 'true';
const appIds = 'appId' in req.query ? (Array.isArray(req.query.appId) ? req.query.appId : [ req.query.appId ]) : [];
const serviceIds = 'serviceId' in req.query ? (Array.isArray(req.query.serviceId) ? req.query.serviceId : [ req.query.serviceId ]) : [];
const [error, metricStream] = await safe(metrics.getStream({ system, appIds, serviceIds }));
const [error, metricStream] = await safe(metrics.getStream({ system: includeSystem, appIds, serviceIds }));
if (error) return next(BoxError.toHttpError(error));
res.writeHead(200, {
@@ -175,8 +175,8 @@ async function getFilesystemUsage(req, res, next) {
const sse = `data: ${JSON.stringify(obj)}\n\n`;
res.write(sse);
});
task.on('done', function (error) {
const obj = { type: 'done', ...error };
task.on('done', function (doneError) {
const obj = { type: 'done', ...doneError };
const sse = `data: ${JSON.stringify(obj)}\n\n`;
res.write(sse);
res.end();

View File

@@ -420,7 +420,7 @@ describe('Profile API', function () {
});
describe('avatar', function () {
let customAvatarSize = 0;
let avatarSize = 0;
it('empty by default', async function () {
const response = await superagent.get(`${serverUrl}/api/v1/profile/avatar/${user.id}`).ok(() => true);
@@ -432,7 +432,7 @@ describe('Profile API', function () {
.query({ access_token: user.token })
.attach('avatar', './logo.png');
customAvatarSize = fs.readFileSync('./logo.png').length;
avatarSize = fs.readFileSync('./logo.png').length;
expect(response.status).to.be(204);
});
@@ -441,7 +441,7 @@ describe('Profile API', function () {
const response = await superagent.get(`${serverUrl}/api/v1/profile/avatar/${user.id}`)
.ok(() => true);
expect(parseInt(response.headers['content-length'])).to.equal(customAvatarSize);
expect(parseInt(response.headers['content-length'])).to.equal(avatarSize);
expect(response.status).to.equal(200);
});

View File

@@ -270,8 +270,8 @@ describe('Users API', function () {
expect(response.status).to.equal(200);
expect(response.body.users).to.be.an('array');
response.body.users.forEach(function (user) {
expect('groupIds' in user).to.be(true);
response.body.users.forEach(function (u) {
expect('groupIds' in u).to.be(true);
});
});
});
@@ -293,14 +293,14 @@ describe('Users API', function () {
expect(response.body.users).to.be.an('array');
expect(response.body.users.length).to.be.greaterThan(3);
response.body.users.forEach(function (user) {
expect(user).to.be.an('object');
expect(user.id).to.be.ok();
expect(user.email).to.be.ok();
expect(user.role).to.be.ok();
if (!user.email.startsWith('unnamed')) expect(user.username).to.be.ok();
expect(user.password).to.not.be.ok();
expect(user.salt).to.not.be.ok();
response.body.users.forEach(function (u) {
expect(u).to.be.an('object');
expect(u.id).to.be.ok();
expect(u.email).to.be.ok();
expect(u.role).to.be.ok();
if (!u.email.startsWith('unnamed')) expect(u.username).to.be.ok();
expect(u.password).to.not.be.ok();
expect(u.salt).to.not.be.ok();
});
});
});

View File

@@ -81,8 +81,8 @@ async function createJobs(app, schedulerConfig) {
const cronJob = CronJob.from({
cronTime,
onTick: async () => {
const [error] = await safe(runTask(appId, taskName)); // put the app id in closure, so we don't use the outdated app object by mistake
if (error) debug(`could not run task ${taskName} : ${error.message}`);
const [taskError] = await safe(runTask(appId, taskName)); // put the app id in closure, so we don't use the outdated app object by mistake
if (taskError) debug(`could not run task ${taskName} : ${taskError.message}`);
},
start: true,
timeZone: tz

View File

@@ -246,7 +246,7 @@ async function importDatabase(addon) {
debug(`importDatabase: importing addon ${addon} of app ${app.id}`);
const [error] = await safe(importAppDatabase(app, addon));
const [error] = await safe(importAppDatabase(app, addon)); // eslint-disable-line no-use-before-define
if (!error) continue;
debug(`importDatabase: error importing ${addon} of app ${app.id}. Marking as errored. %o`, error);

View File

@@ -340,9 +340,9 @@ async function listDir(config, remotePath, batchSize, marker) {
// https://github.com/aws/aws-sdk-js/blob/2b6bcbdec1f274fe931640c1b61ece999aae7a19/lib/util.js#L41
// https://github.com/GeorgePhillips/node-s3-url-encode/blob/master/index.js
// See aws-sdk-js/issues/1302
function encodeCopySource(bucket, path) {
function encodeCopySource(bucket, filePath) {
// AWS percent-encodes some extra non-standard characters in a URI
const output = encodeURI(path).replace(/[+!"#$@&'()*,:;=?@]/g, function(ch) {
const output = encodeURI(filePath).replace(/[+!"#$@&'()*,:;=?@]/g, function(ch) {
return '%' + ch.charCodeAt(0).toString(16).toUpperCase();
});
@@ -440,10 +440,10 @@ async function cleanup(config, progressCallback) {
progressCallback({ message: `Cleaning up any aborted multi-part uploads. count:${uploads.Uploads?.length || 0} truncated:${uploads.IsTruncated}` });
if (!uploads.Uploads) return;
for (const upload of uploads.Uploads) {
if (Date.now() - new Date(upload.Initiated) < 3 * 24 * 60 * 60 * 1000) continue; // 3 days ago
progressCallback({ message: `Cleaning up multi-part upload uploadId:${upload.UploadId} key:${upload.Key}` });
await safe(s3.abortMultipartUpload({ Bucket: config.bucket, Key: upload.Key, UploadId: upload.UploadId }), { debug }); // ignore error
for (const multipartUpload of uploads.Uploads) {
if (Date.now() - new Date(multipartUpload.Initiated) < 3 * 24 * 60 * 60 * 1000) continue; // 3 days ago
progressCallback({ message: `Cleaning up multi-part upload uploadId:${multipartUpload.UploadId} key:${multipartUpload.Key}` });
await safe(s3.abortMultipartUpload({ Bucket: config.bucket, Key: multipartUpload.Key, UploadId: multipartUpload.UploadId }), { debug }); // ignore error
}
}

View File

@@ -106,8 +106,8 @@ async function getFilesystems() {
];
for (const stdPath of standardPaths) {
const [dfError, diskInfo] = await safe(df.file(stdPath.path));
if (dfError) throw new BoxError(BoxError.FS_ERROR, `Error getting std path: ${dfError.message}`);
const [dfPathError, diskInfo] = await safe(df.file(stdPath.path));
if (dfPathError) throw new BoxError(BoxError.FS_ERROR, `Error getting std path: ${dfPathError.message}`);
filesystems[diskInfo.filesystem].contents.push(stdPath);
}
@@ -240,9 +240,9 @@ class FilesystemUsageTask extends AsyncTask {
if (!dockerDf) dockerDf = await docker.df({ abortSignal });
content.usage = content.id === 'docker' ? dockerDf.LayersSize : dockerDf.Volumes.map((v) => v.UsageData.Size).reduce((a,b) => a + b, 0);
} else {
const [error, usage] = await safe(du(content.path, { abortSignal }));
const [error, duResult] = await safe(du(content.path, { abortSignal }));
if (error) debug(`du error ${content.path}: ${error.message}`); // can happen if app is installing etc
content.usage = usage || 0;
content.usage = duResult || 0;
}
usage += content.usage;
this.emitData({ content });

View File

@@ -242,8 +242,8 @@ describe('backup cleaner', function () {
await backupSites.setSchedule(site, '00 00 23 * * *', auditSource);
});
async function cleanupBackups(site) {
const taskId = await backupSites.startCleanupTask(site, auditSource);
async function cleanupBackups(targetSite) {
const taskId = await backupSites.startCleanupTask(targetSite, auditSource);
console.log('started task', taskId);

View File

@@ -70,18 +70,18 @@ describe('backups', function () {
});
it('can get backup site', async function () {
const backupSite = await backupSites.get(defaultBackupSite.id);
expect(backupSite.provider).to.be('filesystem');
expect(backupSite.config.backupDir).to.be.ok(); // the test sets this to some tmp location
expect(backupSite.format).to.be('tgz');
expect(backupSite.encryption).to.be(null);
expect(backupSite.contents).to.be(null);
expect(backupSite.enableForUpdates).to.be(true);
const result = await backupSites.get(defaultBackupSite.id);
expect(result.provider).to.be('filesystem');
expect(result.config.backupDir).to.be.ok(); // the test sets this to some tmp location
expect(result.format).to.be('tgz');
expect(result.encryption).to.be(null);
expect(result.contents).to.be(null);
expect(result.enableForUpdates).to.be(true);
});
it('cannot get random backup site', async function () {
const backupSite = await backupSites.get('random');
expect(backupSite).to.be(null);
const result = await backupSites.get('random');
expect(result).to.be(null);
});
it('can set backup config', async function () {
@@ -100,8 +100,8 @@ describe('backups', function () {
it('can set valid schedule', async function () {
for (const pattern of [ '00 * * * * *', constants.CRON_PATTERN_NEVER ]) {
await backupSites.setSchedule(defaultBackupSite, pattern, auditSource);
const backupSite = await backupSites.get(defaultBackupSite.id);
expect(backupSite.schedule).to.be(pattern);
const result = await backupSites.get(defaultBackupSite.id);
expect(result.schedule).to.be(pattern);
}
});
@@ -113,15 +113,15 @@ describe('backups', function () {
it('can set valid retention', async function () {
for (const retention of [ { keepWithinSecs: 1 }, { keepYearly: 3 }, { keepMonthly: 14 } ]) {
await backupSites.setRetention(defaultBackupSite, retention, auditSource);
const backupSite = await backupSites.get(defaultBackupSite.id);
expect(backupSite.retention).to.eql(retention);
const result = await backupSites.get(defaultBackupSite.id);
expect(result.retention).to.eql(retention);
}
});
it('cannot disable for update', async function () {
await backupSites.setEnabledForUpdates(defaultBackupSite, false, auditSource);
const backupSite = await backupSites.get(defaultBackupSite.id);
expect(backupSite.enableForUpdates).to.eql(false);
const result = await backupSites.get(defaultBackupSite.id);
expect(result.enableForUpdates).to.eql(false);
});
it('can delete all the backup sites', async function () {

View File

@@ -51,17 +51,17 @@ async function ldapSearch(dn, opts, auth) {
bindAuth(function (error) {
if (error) return done(error);
client.search(dn, opts, function (error, result) {
if (error) return done(error);
client.search(dn, opts, function (searchError, searchResult) {
if (searchError) return done(searchError);
const entries = [];
result.on('searchEntry', function (entry) { entries.push(entry.object); });
searchResult.on('searchEntry', function (entry) { entries.push(entry.object); });
result.on('error', done);
searchResult.on('error', done);
result.on('end', function (result) {
if (result.status !== 0) return done(new Error(`Unexpected status: ${result.status}`));
searchResult.on('end', function (endResult) {
if (endResult.status !== 0) return done(new Error(`Unexpected status: ${endResult.status}`));
done(null, entries);
});
});

View File

@@ -131,9 +131,9 @@ describe('Eventlog', function () {
await eventlog._clear();
for (const e of [ eventlog.ACTION_USER_LOGIN, eventlog.ACTION_USER_LOGIN_GHOST, eventlog.ACTION_USER_LOGOUT, eventlog.ACTION_USER_LOGIN ]) {
const eventId = await eventlog.add(e, { ip: '1.2.3.4' }, { appId: 'thatapp' });
const loopEventId = await eventlog.add(e, { ip: '1.2.3.4' }, { appId: 'thatapp' });
await notifications._add(notifications.TYPE_APP_UPDATED, 'title', 'some message', { eventId });
await notifications._add(notifications.TYPE_APP_UPDATED, 'title', 'some message', { eventId: loopEventId });
}
await timers.setTimeout(3000);

View File

@@ -43,12 +43,12 @@ class LdapServer {
this.#provider = provider;
}
setUsers(users) {
this.#users = users;
setUsers(userList) {
this.#users = userList;
}
setGroups(groups) {
this.#groups = groups;
setGroups(groupList) {
this.#groups = groupList;
}
setProvider(provider) {
@@ -187,7 +187,7 @@ class LdapServer {
const commonName = req.dn.rdns[0].attrs[attributeName].value;
if (!commonName) return next(new ldap.NoSuchObjectError('Missing CN'));
const u = this.#users.find(function (u) { return u.username === commonName; });
const u = this.#users.find(function (candidate) { return candidate.username === commonName; });
if (!u) return next(new ldap.NoSuchObjectError('No such user'));
if (req.credentials !== LDAP_SHARED_PASSWORD) return next(new ldap.InvalidCredentialsError('Bad password'));

View File

@@ -50,8 +50,8 @@ async function ldapSearch(dn, opts) {
result.on('error', done);
result.on('end', function (result) {
if (result.status !== 0) return done(new Error(`Unexpected status: ${result.status}`));
result.on('end', function (endResult) {
if (endResult.status !== 0) return done(new Error(`Unexpected status: ${endResult.status}`));
done(null, entries);
});
});

View File

@@ -97,10 +97,10 @@ describe('Storage', function () {
it('can download file', async function () {
const sourceFile = path.join(gBackupConfig.backupDir, gBackupConfig.prefix, '/uploadtest/test.txt');
const [error, stream] = await safe(filesystem.download(gBackupConfig, 'uploadtest/test.txt'));
const [error, downloadStream] = await safe(filesystem.download(gBackupConfig, 'uploadtest/test.txt'));
expect(error).to.be(null);
expect(stream).to.be.an('object');
const data = await consumers.buffer(stream);
expect(downloadStream).to.be.an('object');
const data = await consumers.buffer(downloadStream);
expect(fs.readFileSync(sourceFile)).to.eql(data); // buffer compare
});
@@ -314,8 +314,8 @@ describe('Storage', function () {
}
file(key) { // already has prefix
// console.log('gcs file object:', key);
function getFullWritablePath(key) {
const fullPath = path.join(bucketPathNoPrefix, key);
function getFullWritablePath(keyPath) {
const fullPath = path.join(bucketPathNoPrefix, keyPath);
fs.mkdirSync(path.dirname(fullPath), { recursive: true });
return fullPath;
}
@@ -390,9 +390,9 @@ describe('Storage', function () {
it('can download file', async function () {
const sourceKey = 'uploadtest/test.txt';
const [error, stream] = await safe(gcs.download(backupConfig, sourceKey));
const [error, downloadStream] = await safe(gcs.download(backupConfig, sourceKey));
expect(error).to.be(null);
expect(stream).to.be.an('object');
expect(downloadStream).to.be.an('object');
});
it('list dir lists contents of source dir', async function () {

View File

@@ -82,42 +82,42 @@ describe('task', function () {
});
it('can run valid task - success', async function () {
const taskId = await tasks.add(tasks._TASK_IDENTITY, [ 'ping' ]);
const successTaskId = await tasks.add(tasks._TASK_IDENTITY, [ 'ping' ]);
const [error, result] = await safe(tasks.startTask(taskId, {}));
const [error, result] = await safe(tasks.startTask(successTaskId, {}));
if (error) throw error;
expect(result).to.equal('ping');
});
it('can run valid task - error', async function () {
const taskId = await tasks.add(tasks._TASK_ERROR, [ 'ping' ]);
const errorTaskId = await tasks.add(tasks._TASK_ERROR, [ 'ping' ]);
const [error, result] = await safe(tasks.startTask(taskId, {}));
const [error, result] = await safe(tasks.startTask(errorTaskId, {}));
if (!error) throw new Error('expecting task to fail');
expect(error.message).to.be('Task crashed. Failed for arg: ping');
expect(result).to.not.be.ok();
});
it('can get logs of crash', async function () {
const taskId = await tasks.add(tasks._TASK_CRASH, [ 'ping' ]);
const crashTaskId = await tasks.add(tasks._TASK_CRASH, [ 'ping' ]);
const [error, result] = await safe(tasks.startTask(taskId, {}));
const [error, result] = await safe(tasks.startTask(crashTaskId, {}));
if (!error) throw new Error('expecting task to crash');
expect(error.message).to.contain(`Task ${taskId} crashed`);
expect(error.message).to.contain(`Task ${crashTaskId} crashed`);
expect(result).to.not.be.ok();
const logs = fs.readFileSync(`${paths.TASKS_LOG_DIR}/${taskId}.log`, 'utf8');
const logs = fs.readFileSync(`${paths.TASKS_LOG_DIR}/${crashTaskId}.log`, 'utf8');
expect(logs).to.contain('Crashing for arg: ping');
});
it('can stop task', async function () {
const taskId = await tasks.add(tasks._TASK_SLEEP, [ 10000 ]);
const sleepTaskId = await tasks.add(tasks._TASK_SLEEP, [ 10000 ]);
setTimeout(async function () {
await tasks.stopTask(taskId);
await tasks.stopTask(sleepTaskId);
}, 2000);
const [error, result] = await safe(tasks.startTask(taskId, {}));
const [error, result] = await safe(tasks.startTask(sleepTaskId, {}));
if (!error) throw new Error('expecting task to stop');
expect(error.message).to.contain('stopped');
expect(error.code).to.be(tasks.ESTOPPED);
@@ -125,9 +125,9 @@ describe('task', function () {
});
it('task timesout', async function () {
const taskId = await tasks.add(tasks._TASK_SLEEP, [ 10000 ]);
const timeoutTaskId = await tasks.add(tasks._TASK_SLEEP, [ 10000 ]);
const [error, result] = await safe(tasks.startTask(taskId, { timeout: 2000 }));
const [error, result] = await safe(tasks.startTask(timeoutTaskId, { timeout: 2000 }));
if (!error) throw new Error('expecting task to timeout');
expect(error.code).to.be(tasks.ETIMEOUT);
expect(error.message).to.contain('timed out');

View File

@@ -42,50 +42,50 @@ describe('User', function () {
describe('add', function () {
it('fails due to short password', async function () {
const user = Object.assign({}, admin, { password: 'Fo$%23' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { password: 'Fo$%23' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails due to reserved username', async function () {
const user = Object.assign({}, admin, { username: 'admin' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { username: 'admin' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails due to invalid username', async function () {
const user = Object.assign({}, admin, { username: 'moo+daemon' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { username: 'moo+daemon' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails due to empty username', async function () {
const user = Object.assign({}, admin, { username: '' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { username: '' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails due to long username', async function () {
const user = Object.assign({}, admin, { username: new Array(257).fill('Z').join('') });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { username: new Array(257).fill('Z').join('') });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails due to reserved app pattern', async function () {
const user = Object.assign({}, admin, { username: 'maybe.app' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { username: 'maybe.app' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails because password is empty', async function () {
const user = Object.assign({}, admin, { password: '' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { password: '' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
it('fails because fallbackEmail is not an email', async function () {
const user = Object.assign({}, admin, { fallbackEmail: 'notanemail' });
const [error] = await safe(users.add(user.email, user, auditSource));
const testUser = Object.assign({}, admin, { fallbackEmail: 'notanemail' });
const [error] = await safe(users.add(testUser.email, testUser, auditSource));
expect(error.reason).to.equal(BoxError.BAD_FIELD);
});
@@ -207,8 +207,8 @@ describe('User', function () {
before(createOwner);
it('fails due to unknown userid', async function () {
const user = Object.assign({}, admin, { id: 'random' });
const [error] = await safe(users.update(user, { displayName: 'full name' }, auditSource));
const unknownUser = Object.assign({}, admin, { id: 'random' });
const [error] = await safe(users.update(unknownUser, { displayName: 'full name' }, auditSource));
expect(error.reason).to.equal(BoxError.NOT_FOUND);
});
@@ -234,9 +234,9 @@ describe('User', function () {
it('can update the user', async function () {
await users.update(admin, { email: 'some@thing.com', displayName: 'Heiter' }, auditSource);
const user = await users.get(admin.id);
expect(user.email).to.equal('some@thing.com');
expect(user.displayName).to.equal('Heiter');
const updatedUser = await users.get(admin.id);
expect(updatedUser.email).to.equal('some@thing.com');
expect(updatedUser.displayName).to.equal('Heiter');
});
});
@@ -427,19 +427,19 @@ describe('User', function () {
});
it('verify succeeds with relaxed 2fa', async function () {
const user = await users.verifyWithUsername(admin.username, admin.password, users.AP_WEBADMIN, { skipTotpCheck: true });
expect(user.id).to.be(admin.id);
const verifiedUser = await users.verifyWithUsername(admin.username, admin.password, users.AP_WEBADMIN, { skipTotpCheck: true });
expect(verifiedUser.id).to.be(admin.id);
});
it('verify succeeds with relaxed 2fa but incorrect totp (totp is ignored)', async function () {
const user = await users.verifyWithUsername(admin.username, admin.password, users.AP_WEBADMIN, { totpToken: 'schlecht', skipTotpCheck: true });
expect(user.id).to.be(admin.id);
const verifiedUser = await users.verifyWithUsername(admin.username, admin.password, users.AP_WEBADMIN, { totpToken: 'schlecht', skipTotpCheck: true });
expect(verifiedUser.id).to.be(admin.id);
});
it('verify succeeds with valid 2fa', async function () {
const totpToken = speakeasy.totp({ secret: twofa.secret, encoding: 'base32' });
const user = await users.verifyWithUsername(admin.username, admin.password, users.AP_WEBADMIN, { totpToken });
expect(user.id).to.be(admin.id);
const verifiedUser = await users.verifyWithUsername(admin.username, admin.password, users.AP_WEBADMIN, { totpToken });
expect(verifiedUser.id).to.be(admin.id);
});
});
@@ -496,8 +496,8 @@ describe('User', function () {
before(createOwner);
it('fails due to unknown user', async function () {
const user = Object.assign({}, admin, { id: 'doesnotexist' });
const [error] = await safe(users.setPassword(user, 'newpassword', auditSource));
const unknownUser = Object.assign({}, admin, { id: 'doesnotexist' });
const [error] = await safe(users.setPassword(unknownUser, 'newpassword', auditSource));
expect(error.reason).to.be(BoxError.NOT_FOUND);
});
@@ -606,7 +606,7 @@ describe('User', function () {
describe('invite', function () {
before(createOwner);
let user;
let invitedUser;
it('get link fails as alreayd been used', async function () {
const [error] = await safe(users.getInviteLink(admin, auditSource));
@@ -615,11 +615,11 @@ describe('User', function () {
it('can get link', async function () {
const userId = await users.add('some@mail.com', { username: 'someoneinvited', displayName: 'some one', password: 'unsafe1234' }, auditSource);
user = await users.get(userId);
invitedUser = await users.get(userId);
const inviteLink = await users.getInviteLink(user, auditSource);
const inviteLink = await users.getInviteLink(invitedUser, auditSource);
expect(inviteLink).to.be.a('string');
expect(inviteLink).to.contain(user.inviteToken);
expect(inviteLink).to.contain(invitedUser.inviteToken);
});
it('cannot send mail for already active user', async function () {
@@ -628,13 +628,13 @@ describe('User', function () {
});
it('cannot send mail with empty receipient', async function () {
const [error] = await safe(users.sendInviteEmail(user, '', auditSource));
const [error] = await safe(users.sendInviteEmail(invitedUser, '', auditSource));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('can send mail', async function () {
await clearMailQueue();
await users.sendInviteEmail(user, 'custom@mail.com', auditSource);
await users.sendInviteEmail(invitedUser, 'custom@mail.com', auditSource);
const emails = await checkMails(1);
expect(emails[0].to).to.equal('custom@mail.com');
});
@@ -644,8 +644,8 @@ describe('User', function () {
before(createOwner);
it('fails for unknown user', async function () {
const user = Object.assign({}, admin, { id: 'doesnotexist' });
const [error] = await safe(users.del(user, auditSource));
const unknownUser = Object.assign({}, admin, { id: 'doesnotexist' });
const [error] = await safe(users.del(unknownUser, auditSource));
expect(error.reason).to.be(BoxError.NOT_FOUND);
});

View File

@@ -224,14 +224,14 @@ async function startBoxUpdateTask(options, auditSource) {
// background
tasks.startTask(taskId, { timeout: 20 * 60 * 60 * 1000 /* 20 hours */, nice: 15, memoryLimit })
.then(() => debug('startBoxUpdateTask: update task completed'))
.catch(async (error) => {
debug('Update failed with error. %o', error);
.catch(async (updateError) => {
debug('Update failed with error. %o', updateError);
await locks.release(locks.TYPE_BOX_UPDATE_TASK);
await locks.releaseByTaskId(taskId);
const timedOut = error.code === tasks.ETIMEOUT;
await eventlog.add(eventlog.ACTION_UPDATE_FINISH, auditSource, { taskId, errorMessage: error.message, timedOut });
const timedOut = updateError.code === tasks.ETIMEOUT;
await eventlog.add(eventlog.ACTION_UPDATE_FINISH, auditSource, { taskId, errorMessage: updateError.message, timedOut });
});
return taskId;

View File

@@ -608,8 +608,8 @@ async function verify(user, password, identifier, options) {
localTotpCheck = user.twoFactorAuthenticationEnabled && !externalLdap.supports2FA(externalLdapConfig);
} else {
const saltBinary = Buffer.from(user.salt, 'hex');
const [error, derivedKey] = await safe(pbkdf2Async(password, saltBinary, CRYPTO_ITERATIONS, CRYPTO_KEY_LENGTH, CRYPTO_DIGEST));
if (error) throw new BoxError(BoxError.CRYPTO_ERROR, error);
const [cryptoError, derivedKey] = await safe(pbkdf2Async(password, saltBinary, CRYPTO_ITERATIONS, CRYPTO_KEY_LENGTH, CRYPTO_DIGEST));
if (cryptoError) throw new BoxError(BoxError.CRYPTO_ERROR, cryptoError);
const derivedKeyHex = Buffer.from(derivedKey, 'binary').toString('hex');
if (derivedKeyHex !== user.password) {