diff --git a/src/acme2.js b/src/acme2.js index 826317dfb..786197539 100644 --- a/src/acme2.js +++ b/src/acme2.js @@ -19,8 +19,9 @@ const assert = require('assert'), path = require('path'), paths = require('./paths.js'), promiseRetry = require('./promise-retry.js'), - superagent = require('superagent'), safe = require('safetydance'), + shell = require('./shell.js'), + superagent = require('superagent'), users = require('./users.js'); const CA_PROD_DIRECTORY_URL = 'https://acme-v02.api.letsencrypt.org/directory', @@ -70,13 +71,12 @@ function b64(str) { return urlBase64Encode(buf.toString('base64')); } -function getModulus(pem) { +async function getModulus(pem) { assert.strictEqual(typeof pem, 'string'); - const stdout = safe.child_process.execSync('openssl rsa -modulus -noout', { input: pem, encoding: 'utf8' }); - if (!stdout) return null; + const stdout = await shell.promises.exec('getModulus', 'openssl rsa -modulus -noout', { input: pem, encoding: 'utf8' }); const match = stdout.match(/Modulus=([0-9a-fA-F]+)$/m); - if (!match) return null; + if (!match) throw new BoxError(BoxError.OPENSSL_ERROR, 'Could not get modulus'); return Buffer.from(match[1], 'hex'); } @@ -98,7 +98,7 @@ Acme2.prototype.sendSignedRequest = async function (url, payload) { header.jwk = { e: b64(Buffer.from([0x01, 0x00, 0x01])), // exponent - 65537 kty: 'RSA', - n: b64(getModulus(this.accountKey)) + n: b64(await getModulus(this.accountKey)) }; } @@ -153,8 +153,7 @@ Acme2.prototype.updateContact = async function (registrationUri) { }; async function generateAccountKey() { - const acmeAccountKey = safe.child_process.execSync('openssl genrsa 4096', { encoding: 'utf8' }); - if (!acmeAccountKey) throw new BoxError(BoxError.OPENSSL_ERROR, `Could not generate acme account key: ${safe.error.message}`); + const acmeAccountKey = await shell.promises.exec('generateAccountKey', 'openssl genrsa 4096', { encoding: 'utf8' }); return acmeAccountKey; } @@ -237,13 +236,13 @@ Acme2.prototype.waitForOrder = async function (orderUrl) { }); }; -Acme2.prototype.getKeyAuthorization = function (token) { +Acme2.prototype.getKeyAuthorization = async function (token) { assert(typeof this.accountKey, 'string'); const jwk = { e: b64(Buffer.from([0x01, 0x00, 0x01])), // Exponent - 65537 kty: 'RSA', - n: b64(getModulus(this.accountKey)) + n: b64(await getModulus(this.accountKey)) }; const shasum = crypto.createHash('sha256'); @@ -257,7 +256,7 @@ Acme2.prototype.notifyChallengeReady = async function (challenge) { debug(`notifyChallengeReady: ${challenge.url} was met`); - const keyAuthorization = this.getKeyAuthorization(challenge.token); + const keyAuthorization = await this.getKeyAuthorization(challenge.token); const payload = { resource: 'challenge', @@ -295,8 +294,7 @@ Acme2.prototype.signCertificate = async function (finalizationUrl, csrPem) { assert.strictEqual(typeof finalizationUrl, 'string'); assert.strictEqual(typeof csrPem, 'string'); - const csrDer = safe.child_process.execSync('openssl req -inform pem -outform der', { input: csrPem }); - if (!csrDer) throw new BoxError(BoxError.OPENSSL_ERROR, safe.error); + const csrDer = await shell.promises.exec('signCertificate', 'openssl req -inform pem -outform der', { input: csrPem }); const payload = { csr: b64(csrDer) @@ -318,8 +316,7 @@ Acme2.prototype.ensureKey = async function () { debug(`ensureKey: generating new key for ${this.cn}`); // same as prime256v1. openssl ecparam -list_curves. we used to use secp384r1 but it doesn't seem to be accepted by few mail servers - const newKey = safe.child_process.execSync('openssl ecparam -genkey -name secp256r1', { encoding: 'utf8' }); - if (!newKey) throw new BoxError(BoxError.OPENSSL_ERROR, safe.error); + const newKey = await shell.promises.exec('ensureKey', 'openssl ecparam -genkey -name secp256r1', { encoding: 'utf8' }); return newKey; }; @@ -347,9 +344,7 @@ Acme2.prototype.createCsr = async function (key) { if (!safe.fs.writeFileSync(opensslConfigFile, conf)) throw new BoxError(BoxError.FS_ERROR, `Failed to write openssl config: ${safe.error.message}`); // while we pass the CN anyways, subjectAltName takes precedence - const csrPem = safe.child_process.execSync(`openssl req -new -key ${keyFilePath} -outform PEM -subj /CN=${this.cn} -config ${opensslConfigFile}`, { encoding: 'utf8' }); - if (!csrPem) throw new BoxError(BoxError.OPENSSL_ERROR, safe.error); - + const csrPem = await shell.promises.exec('createCsr', `openssl req -new -key ${keyFilePath} -outform PEM -subj /CN=${this.cn} -config ${opensslConfigFile}`, { encoding: 'utf8' }); await safe(fs.promises.rm(tmpdir, { recursive: true, force: true })); debug(`createCsr: csr file created for ${this.cn}`); return csrPem; // inspect with openssl req -text -noout -in hostname.csr -inform pem @@ -375,7 +370,7 @@ Acme2.prototype.prepareHttpChallenge = async function (challenge) { debug(`prepareHttpChallenge: preparing for challenge ${JSON.stringify(challenge)}`); - const keyAuthorization = this.getKeyAuthorization(challenge.token); + const keyAuthorization = await this.getKeyAuthorization(challenge.token); const challengeFilePath = path.join(paths.ACME_CHALLENGES_DIR, challenge.token); debug(`prepareHttpChallenge: writing ${keyAuthorization} to ${challengeFilePath}`); @@ -414,7 +409,7 @@ Acme2.prototype.prepareDnsChallenge = async function (cn, challenge) { debug(`prepareDnsChallenge: preparing for challenge: ${JSON.stringify(challenge)}`); - const keyAuthorization = this.getKeyAuthorization(challenge.token); + const keyAuthorization = await this.getKeyAuthorization(challenge.token); const shasum = crypto.createHash('sha256'); shasum.update(keyAuthorization); @@ -432,7 +427,7 @@ Acme2.prototype.cleanupDnsChallenge = async function (cn, challenge) { assert.strictEqual(typeof cn, 'string'); assert.strictEqual(typeof challenge, 'object'); - const keyAuthorization = this.getKeyAuthorization(challenge.token); + const keyAuthorization = await this.getKeyAuthorization(challenge.token); const shasum = crypto.createHash('sha256'); shasum.update(keyAuthorization); diff --git a/src/apps.js b/src/apps.js index 05d688f3c..dc45f2949 100644 --- a/src/apps.js +++ b/src/apps.js @@ -2835,7 +2835,7 @@ async function uploadFile(app, sourceFilePath, destFilePath) { // the built-in bash printf understands "%q" but not /usr/bin/printf. // ' gets replaced with '\'' . the first closes the quote and last one starts a new one - const escapedDestFilePath = safe.child_process.execSync(`printf %q '${destFilePath.replace(/'/g, '\'\\\'\'')}'`, { shell: '/bin/bash', encoding: 'utf8' }); + const escapedDestFilePath = await shell.promises.exec('uploadFile', `printf %q '${destFilePath.replace(/'/g, '\'\\\'\'')}'`, { shell: '/bin/bash', encoding: 'utf8' }); debug(`uploadFile: ${sourceFilePath} -> ${escapedDestFilePath}`); const execId = await createExec(app, { cmd: [ 'bash', '-c', `cat - > ${escapedDestFilePath}` ], tty: false }); diff --git a/src/appstore.js b/src/appstore.js index b911e45a2..6bfb3d23c 100644 --- a/src/appstore.js +++ b/src/appstore.js @@ -47,6 +47,7 @@ const apps = require('./apps.js'), safe = require('safetydance'), semver = require('semver'), settings = require('./settings.js'), + shell = require('./shell.js'), superagent = require('superagent'), support = require('./support.js'); @@ -395,8 +396,8 @@ async function createTicket(info, auditSource) { const logPaths = await apps.getLogPaths(info.app); for (const logPath of logPaths) { - const logs = safe.child_process.execSync(`tail --lines=1000 ${logPath}`); - if (logs) request.attach(path.basename(logPath), logs, path.basename(logPath)); + const [error, logs] = await safe(shell.promises.exec('createTicket', `tail --lines=1000 ${logPath}`, {})); + if (!error && logs) request.attach(path.basename(logPath), logs, path.basename(logPath)); } } else { request.send(info); diff --git a/src/backupformat/rsync.js b/src/backupformat/rsync.js index baa2b60e0..58b988b31 100644 --- a/src/backupformat/rsync.js +++ b/src/backupformat/rsync.js @@ -22,6 +22,7 @@ const assert = require('assert'), ProgressStream = require('../progress-stream.js'), promiseRetry = require('../promise-retry.js'), safe = require('safetydance'), + shell = require('../shell.js'), storage = require('../storage.js'), stream = require('stream'), syncer = require('../syncer.js'), @@ -109,17 +110,15 @@ async function saveFsMetadata(dataLayout, metadataFile) { // we assume small number of files. spawnSync will raise a ENOBUFS error after maxBuffer for (let lp of dataLayout.localPaths()) { - const emptyDirs = safe.child_process.execSync(`find ${lp} -type d -empty`, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 80 }); - if (emptyDirs === null) throw new BoxError(BoxError.FS_ERROR, `Error finding empty dirs: ${safe.error.message}`); - if (emptyDirs.length) metadata.emptyDirs = metadata.emptyDirs.concat(emptyDirs.trim().split('\n').map((ed) => dataLayout.toRemotePath(ed))); + const emptyDirs = await shell.promises.exec('saveFsMetadata', `find ${lp} -type d -empty`, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 80 }); + metadata.emptyDirs = metadata.emptyDirs.concat(emptyDirs.trim().split('\n').map((ed) => dataLayout.toRemotePath(ed))); - const execFiles = safe.child_process.execSync(`find ${lp} -type f -executable`, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 80 }); - if (execFiles === null) throw new BoxError(BoxError.FS_ERROR, `Error finding executables: ${safe.error.message}`); - if (execFiles.length) metadata.execFiles = metadata.execFiles.concat(execFiles.trim().split('\n').map((ef) => dataLayout.toRemotePath(ef))); + const execFiles = await shell.promises.exec('saveFsMetadata', `find ${lp} -type f -executable`, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 80 }); + metadata.execFiles = metadata.execFiles.concat(execFiles.trim().split('\n').map((ef) => dataLayout.toRemotePath(ef))); - const symlinks = safe.child_process.execSync(`find ${lp} -type l`, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 30 }); - if (symlinks === null) throw new BoxError(BoxError.FS_ERROR, `Error finding symlinks: ${safe.error.message}`); - if (symlinks.length) metadata.symlinks = metadata.symlinks.concat(symlinks.trim().split('\n').map((sl) => { + const symlinks = await shell.promises.exec('safeFsMetadata', `find ${lp} -type l`, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 30 }); + console.log(symlinks); + metadata.symlinks = metadata.symlinks.concat(symlinks.trim().split('\n').map((sl) => { const target = safe.fs.readlinkSync(sl); return { path: dataLayout.toRemotePath(sl), target }; })); diff --git a/src/backuptask.js b/src/backuptask.js index ca10abb88..6ac8b782e 100644 --- a/src/backuptask.js +++ b/src/backuptask.js @@ -60,8 +60,7 @@ async function checkPreconditions(backupConfig, dataLayout) { let used = 0; for (const localPath of dataLayout.localPaths()) { debug(`checkPreconditions: getting disk usage of ${localPath}`); - const result = safe.child_process.execSync(`du -Dsb --exclude='*.lock' --exclude='dovecot.list.index.log.*' "${localPath}"`, { encoding: 'utf8' }); - if (!result) throw new BoxError(BoxError.FS_ERROR, `du error: ${safe.error.message}`); + const result = await shell.promises.exec('checkPreconditions', `du -Dsb --exclude='*.lock' --exclude='dovecot.list.index.log.*' "${localPath}"`, { encoding: 'utf8' }); used += parseInt(result, 10); } diff --git a/src/boxerror.js b/src/boxerror.js index 121e93499..02025c553 100644 --- a/src/boxerror.js +++ b/src/boxerror.js @@ -63,7 +63,7 @@ BoxError.NOT_SIGNED = 'Not Signed'; BoxError.NOT_SUPPORTED = 'Not Supported'; BoxError.OPENSSL_ERROR = 'OpenSSL Error'; BoxError.PLAN_LIMIT = 'Plan Limit'; -BoxError.SPAWN_ERROR = 'Spawn Error'; +BoxError.SHELL_ERROR = 'Shell Error'; // exec or spawn cmd failed BoxError.TASK_ERROR = 'Task Error'; BoxError.TIMEOUT = 'Timeout'; BoxError.TRY_AGAIN = 'Try Again'; diff --git a/src/database.js b/src/database.js index 1db563d4c..756733eb0 100644 --- a/src/database.js +++ b/src/database.js @@ -144,8 +144,7 @@ async function exportToFile(file) { assert.strictEqual(typeof file, 'string'); // latest mysqldump enables column stats by default which is not present in 5.7 util - const mysqlDumpHelp = safe.child_process.execSync('/usr/bin/mysqldump --help', { encoding: 'utf8' }); - if (!mysqlDumpHelp) throw new BoxError(BoxError.DATABASE_ERROR, safe.error); + const mysqlDumpHelp = await shell.promises.exec('exportToFile', '/usr/bin/mysqldump --help', { encoding: 'utf8' }); const hasColStats = mysqlDumpHelp.includes('column-statistics'); const colStats = hasColStats ? '--column-statistics=0' : ''; diff --git a/src/docker.js b/src/docker.js index 535f0d1cf..e01b92369 100644 --- a/src/docker.js +++ b/src/docker.js @@ -41,6 +41,7 @@ const apps = require('./apps.js'), dashboard = require('./dashboard.js'), debug = require('debug')('box:docker'), Docker = require('dockerode'), + fs = require('fs'), paths = require('./paths.js'), promiseRetry = require('./promise-retry.js'), services = require('./services.js'), @@ -237,16 +238,18 @@ async function getMounts(app) { // This only returns ipv4 addresses // We dont bind to ipv6 interfaces, public prefix changes and container restarts wont work -function getAddressesForPort53() { - const deviceLinks = safe.fs.readdirSync('/sys/class/net'); // https://man7.org/linux/man-pages/man5/sysfs.5.html - if (!deviceLinks) return []; +async function getAddressesForPort53() { + const [error, deviceLinks] = await safe(fs.promises.readdir('/sys/class/net')); // https://man7.org/linux/man-pages/man5/sysfs.5.html + if (error) return []; const devices = deviceLinks.map(d => { return { name: d, link: safe.fs.readlinkSync(`/sys/class/net/${d}`) }; }); const physicalDevices = devices.filter(d => d.link && !d.link.includes('virtual')); const addresses = []; for (const phy of physicalDevices) { - const inet = safe.JSON.parse(safe.child_process.execSync(`ip -f inet -j addr show dev ${phy.name} scope global`, { encoding: 'utf8' })); + const [error, output] = await safe(shell.promises.exec('getAddressesForPort53', `ip -f inet -j addr show dev ${phy.name} scope global`, { encoding: 'utf8' })); + if (error) continue; + const inet = safe.JSON.parse(output) || []; for (const r of inet) { const address = safe.query(r, 'addr_info[0].local'); if (address) addresses.push(address); @@ -290,7 +293,7 @@ async function createSubcontainer(app, name, cmd, options) { const ports = portType == 'tcp' ? manifest.tcpPorts : manifest.udpPorts; const containerPort = ports[portName].containerPort || hostPort; const portCount = ports[portName].portCount || 1; - const hostIps = hostPort === 53 ? getAddressesForPort53() : [ '0.0.0.0', '::0' ]; // port 53 is special because it is possibly taken by systemd-resolved + const hostIps = hostPort === 53 ? await getAddressesForPort53() : [ '0.0.0.0', '::0' ]; // port 53 is special because it is possibly taken by systemd-resolved portEnv.push(`${portName}=${hostPort}`); if (portCount > 1) portEnv.push(`${portName}_COUNT=${portCount}`); diff --git a/src/mailserver.js b/src/mailserver.js index 5cf680165..0b737185c 100644 --- a/src/mailserver.js +++ b/src/mailserver.js @@ -53,8 +53,8 @@ async function generateDkimKey() { const privateKeyFilePath = path.join(os.tmpdir(), `dkim-${crypto.randomBytes(4).readUInt32LE(0)}.private`); // https://www.unlocktheinbox.com/dkim-key-length-statistics/ and https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-authentication-dkim-easy.html for key size - if (!safe.child_process.execSync(`openssl genrsa -out ${privateKeyFilePath} 1024`)) return new BoxError(BoxError.OPENSSL_ERROR, safe.error); - if (!safe.child_process.execSync(`openssl rsa -in ${privateKeyFilePath} -out ${publicKeyFilePath} -pubout -outform PEM`)) return new BoxError(BoxError.OPENSSL_ERROR, safe.error); + await shell.promises.exec('generateDkimKey', `openssl genrsa -out ${privateKeyFilePath} 1024`, {}); + await shell.promises.exec('generateDkimKey', `openssl rsa -in ${privateKeyFilePath} -out ${publicKeyFilePath} -pubout -outform PEM`, {}); const publicKey = safe.fs.readFileSync(publicKeyFilePath, 'utf8'); if (!publicKey) throw new BoxError(BoxError.FS_ERROR, safe.error.message); @@ -153,7 +153,8 @@ async function configureMail(mailFqdn, mailDomain, serviceConfig) { const mailCertFilePath = `${paths.MAIL_CONFIG_DIR}/tls_cert.pem`; const mailKeyFilePath = `${paths.MAIL_CONFIG_DIR}/tls_key.pem`; - if (!safe.child_process.execSync(`cp ${paths.DHPARAMS_FILE} ${dhparamsFilePath}`)) throw new BoxError(BoxError.FS_ERROR, `Could not copy dhparams: ${safe.error.message}`); + const [copyError] = await safe(shell.promises.exec('configureMail', `cp ${paths.DHPARAMS_FILE} ${dhparamsFilePath}`, {})); + if (copyError) throw new BoxError(BoxError.FS_ERROR, `Could not copy dhparams: ${copyError.message}`); if (!safe.fs.writeFileSync(mailCertFilePath, certificate.cert)) throw new BoxError(BoxError.FS_ERROR, `Could not create cert file: ${safe.error.message}`); if (!safe.fs.writeFileSync(mailKeyFilePath, certificate.key)) throw new BoxError(BoxError.FS_ERROR, `Could not create key file: ${safe.error.message}`); diff --git a/src/mounts.js b/src/mounts.js index d5726f28f..0d1b4ddc9 100644 --- a/src/mounts.js +++ b/src/mounts.js @@ -71,7 +71,7 @@ function isManagedProvider(provider) { // nfs - no_root_squash is mode on server to map all root to 'nobody' user. all_squash does this for all users (making it like ftp) // sshfs - supports users/permissions // cifs - does not support permissions -function renderMountFile(mount) { +async function renderMountFile(mount) { assert.strictEqual(typeof mount, 'object'); const { name, hostPath, mountType, mountOptions } = mount; @@ -79,8 +79,7 @@ function renderMountFile(mount) { let options, what, type; switch (mountType) { case 'cifs': { - const out = safe.child_process.execSync(`systemd-escape -p '${hostPath}'`, { encoding: 'utf8' }); // this ensures uniqueness of creds file - if (!out) throw new BoxError(BoxError.FS_ERROR, `Could not determine credentials file name: ${safe.error.message}`); + const out = shell.promises.exec('renderMountFile', `systemd-escape -p '${hostPath}'`, { encoding: 'utf8' }); // this ensures uniqueness of creds file const credentialsFilePath = path.join(paths.CIFS_CREDENTIALS_DIR, `${out.trim()}.cred`); if (!safe.fs.writeFileSync(credentialsFilePath, `username=${mountOptions.username}\npassword=${mountOptions.password}\n`, { mode: 0o600 })) throw new BoxError(BoxError.FS_ERROR, `Could not write credentials file: ${safe.error.message}`); @@ -139,8 +138,7 @@ async function removeMount(mount) { const keyFilePath = path.join(paths.SSHFS_KEYS_DIR, `id_rsa_${mountOptions.host}`); safe.fs.unlinkSync(keyFilePath); } else if (mountType === 'cifs') { - const out = safe.child_process.execSync(`systemd-escape -p '${hostPath}'`, { encoding: 'utf8' }); - if (!out) return; + const out = await shell.promises.exec('removeMount', `systemd-escape -p '${hostPath}'`, { encoding: 'utf8' }); const credentialsFilePath = path.join(paths.CIFS_CREDENTIALS_DIR, `${out.trim()}.cred`); safe.fs.unlinkSync(credentialsFilePath); } @@ -161,7 +159,7 @@ async function getStatus(mountType, hostPath) { let message; if (state !== 'active') { // find why it failed - const logsJson = safe.child_process.execSync(`journalctl -u $(systemd-escape -p --suffix=mount ${hostPath}) -n 10 --no-pager -o json`, { encoding: 'utf8' }); + const logsJson = await shell.promises.exec('getStatus', `journalctl -u $(systemd-escape -p --suffix=mount ${hostPath}) -n 10 --no-pager -o json`, { encoding: 'utf8' }); if (logsJson) { const lines = logsJson.trim().split('\n').map(l => JSON.parse(l)); // array of json @@ -196,7 +194,8 @@ async function tryAddMount(mount, options) { if (constants.TEST) return; - const [error] = await safe(shell.promises.sudo('addMount', [ ADD_MOUNT_CMD, renderMountFile(mount), options.timeout ], {})); + const mountFileContents = await renderMountFile(mount); + const [error] = await safe(shell.promises.sudo('addMount', [ ADD_MOUNT_CMD, mountFileContents, options.timeout ], {})); if (error && error.code === 2) throw new BoxError(BoxError.MOUNT_ERROR, 'Failed to unmount existing mount'); // at this point, the old mount config is still there if (options.skipCleanup) return; diff --git a/src/platform.js b/src/platform.js index a2e1e043a..d27fc1838 100644 --- a/src/platform.js +++ b/src/platform.js @@ -49,10 +49,10 @@ async function pruneInfraImages() { // cannot blindly remove all unused images since redis image may not be used const imageNames = Object.keys(infra.images).map(addon => infra.images[addon]); - const output = safe.child_process.execSync('docker images --digests --format "{{.ID}} {{.Repository}} {{.Tag}} {{.Digest}}"', { encoding: 'utf8' }); - if (output === null) { - debug(`Failed to list images ${safe.error.message}`); - throw safe.error; + const [error, output] = await safe(shell.promises.exec('pruneInfraImages', 'docker images --digests --format "{{.ID}} {{.Repository}} {{.Tag}} {{.Digest}}"', { encoding: 'utf8' })); + if (error) { + debug(`Failed to list images ${error.message}`); + throw error; } const lines = output.trim().split('\n'); @@ -69,8 +69,8 @@ async function pruneInfraImages() { const imageIdToPrune = tag === '' ? `${repo}@${digest}` : `${repo}:${tag}`; // untagged, use digest console.log(`pruneInfraImages: removing unused image of ${imageName}: ${imageIdToPrune}`); - const result = safe.child_process.execSync(`docker rmi '${imageIdToPrune}'`, { encoding: 'utf8' }); - if (result === null) console.log(`Error removing image ${imageIdToPrune}: ${safe.error.mesage}`); + const [error] = await safe(shell.promises.exec('pruneInfraImages', `docker rmi '${imageIdToPrune}'`, { encoding: 'utf8' })); + if (error) console.log(`Error removing image ${imageIdToPrune}: ${error.mesage}`); } } } diff --git a/src/provision.js b/src/provision.js index 300276a72..a91b2a874 100644 --- a/src/provision.js +++ b/src/provision.js @@ -24,6 +24,7 @@ const assert = require('assert'), platform = require('./platform.js'), reverseProxy = require('./reverseproxy.js'), safe = require('safetydance'), + shell = require('./shell.js'), semver = require('semver'), paths = require('./paths.js'), system = require('./system.js'), @@ -57,8 +58,7 @@ function setProgress(task, message) { async function ensureDhparams() { if (fs.existsSync(paths.DHPARAMS_FILE)) return; debug('ensureDhparams: generating dhparams'); - const dhparams = safe.child_process.execSync('openssl dhparam -dsaparam 2048'); - if (!dhparams) throw new BoxError(BoxError.OPENSSL_ERROR, safe.error); + const dhparams = await shell.promises.exec('ensureDhParams', 'openssl dhparam -dsaparam 2048', {}); if (!safe.fs.writeFileSync(paths.DHPARAMS_FILE, dhparams)) throw new BoxError(BoxError.FS_ERROR, `Could not save dhparams.pem: ${safe.error.message}`); } diff --git a/src/shell.js b/src/shell.js index f8852431f..2f55e9d32 100644 --- a/src/shell.js +++ b/src/shell.js @@ -30,13 +30,16 @@ function exec(tag, cmd, options, callback) { debug(`${tag} exec: ${cmd}`); child_process.exec(cmd, options, function (error, stdout, stderr) { - const stdoutResult = stdout ? stdout.toString('utf8') : null; - const stderrResult = stderr ? stderr.toString('utf8') : null; + let e = null; + if (error) { + e = new BoxError(BoxError.SHELL_ERROR, `${tag} errored with code ${error.code} message ${error.message}`); + e.stdout = stdout; // when promisified, this is the way to get stdout + e.stderr = stderr; // when promisified, this is the way to get stderr - if (error) error.stdout = stdoutResult; // when promisified, this is the way to get stdout - if (error) error.stderr = stderrResult; // when promisified, this is the way to get stderr + debug(e.message); + } - callback(error, stdoutResult); + callback(e, stdout); }); } @@ -73,7 +76,7 @@ function spawn(tag, file, args, options, callback) { if (code || signal) debug(tag + ' code: %s, signal: %s', code, signal); if (code === 0) return callback(null, stdoutResult); - let e = new BoxError(BoxError.SPAWN_ERROR, `${tag} exited with code ${code} signal ${signal}`); + let e = new BoxError(BoxError.SHELL_ERROR, `${tag} exited with code ${code} signal ${signal}`); e.code = code; e.signal = signal; callback(e); @@ -81,7 +84,7 @@ function spawn(tag, file, args, options, callback) { cp.on('error', function (error) { debug(tag + ' code: %s, signal: %s', error.code, error.signal); - let e = new BoxError(BoxError.SPAWN_ERROR, `${tag} errored with code ${error.code} message ${error.message}`); + let e = new BoxError(BoxError.SHELL_ERROR, `${tag} errored with code ${error.code} message ${error.message}`); callback(e); }); diff --git a/src/system.js b/src/system.js index 6f5e51fcc..639a12626 100644 --- a/src/system.js +++ b/src/system.js @@ -329,8 +329,7 @@ async function getLogs(unit, options) { } async function getBlockDevices() { - const [error, result] = await safe(shell.promises.exec('getBlockDevices', 'lsblk --paths --json --list --fs', { encoding: 'utf8' })); - if (error) throw new BoxError(BoxError.INTERNAL_ERROR, `lsblk failed: ${error.message}`); + const result = await shell.promises.exec('getBlockDevices', 'lsblk --paths --json --list --fs', { encoding: 'utf8' }); const info = safe.JSON.parse(result); if (!info) throw new BoxError(BoxError.INTERNAL_ERROR, `failed to parse lsblk: ${safe.error.message}`); diff --git a/src/test/shell-test.js b/src/test/shell-test.js index e84dd80ce..3c8e56bc8 100644 --- a/src/test/shell-test.js +++ b/src/test/shell-test.js @@ -4,7 +4,8 @@ 'use strict'; -const expect = require('expect.js'), +const BoxError = require('../boxerror.js'), + expect = require('expect.js'), path = require('path'), safe = require('safetydance'), shell = require('../shell.js'); @@ -78,6 +79,6 @@ describe('shell', function () { it('exec times out properly', async function () { const [error] = await safe(shell.promises.exec('sleeping', 'sleep 20', { timeout: 1000 })); - expect(error.signal).to.be('SIGTERM'); // somtimes code is ETIMEOUT + expect(error.reason).to.be(BoxError.SHELL_ERROR); }); });