Compare commits

..

2 Commits

Author SHA1 Message Date
Girish Ramakrishnan 52d2fe6909 data dir: allow sameness of old and new dir
this makes it easy to migrate to a new volume setup

(cherry picked from commit a32166bc9d)
2022-06-10 09:39:24 -07:00
Girish Ramakrishnan 61a1ac6983 7.2.4 changes 2022-06-10 09:33:12 -07:00
93 changed files with 804 additions and 2711 deletions
-40
View File
@@ -2502,43 +2502,3 @@
* volumes: Ensure long volume names do not overflow the table
* Move all appstore filter to the left
* app data: allow sameness of old and new dir
[7.2.5]
* Fix storage volume migration
* Fix issue where only 25 group members were returned
* Fix eventlog display
[7.3.0]
* Proxied apps
* Applinks - app bookmarks in dashboard
* backups: optional encryption of backup file names
* eventlog: add event for impersonated user login
* ldap & user directory: Remove virtual user and admin groups
* Randomize certificate generation cronjob to lighten load on Let's Encrypt servers
* mail: catch all address can be any domain
* mail: accept only STARTTLS servers for relay
* graphs: cgroup v2 support
* mail: fix issue where signature was appended to text attachments
* redis: restart button will now rebuild if the container is missing
* backups: allow space in label name
* mail: fix crash when solr is enabled on Ubuntu 22 (cgroup v2 detection fix)
* mail: fix issue where certificate renewal did not restart the mail container properly
* notification: Fix crash when backupId is null
* IPv6: initial support for ipv6 only server
* User directory: Cloudron connector uses 2FA auth
* port bindings: add read only flag
* mail: add storage quota support
* mail: allow aliases to have wildcard
* proxyAuth: add supportsBearerAuth flag
* backups: Fix precondition check which was not erroring if mount is missing
* mail: add queue management API and UI
* graphs: show app disk usage graphs
* UI: fix issue where mailbox display name was not init correctly
* wasabi: add singapore and sydney regions
* filemanager: add split view
* nginx: fix zero length certs when out of disk space
* read only API tokens
[7.3.1]
* Add cloudlare R2
+5 -5
View File
@@ -9,7 +9,7 @@ const fs = require('fs'),
safe = require('safetydance'),
server = require('./src/server.js'),
settings = require('./src/settings.js'),
directoryServer = require('./src/directoryserver.js');
userdirectory = require('./src/userdirectory.js');
let logFd;
@@ -38,8 +38,8 @@ async function startServers() {
await proxyAuth.start();
await ldap.start();
const conf = await settings.getDirectoryServerConfig();
if (conf.enabled) await directoryServer.start();
const conf = await settings.getUserDirectoryConfig();
if (conf.enabled) await userdirectory.start();
}
async function main() {
@@ -54,7 +54,7 @@ async function main() {
await proxyAuth.stop();
await server.stop();
await directoryServer.stop();
await userdirectory.stop();
await ldap.stop();
setTimeout(process.exit.bind(process), 3000);
});
@@ -64,7 +64,7 @@ async function main() {
await proxyAuth.stop();
await server.stop();
await directoryServer.stop();
await userdirectory.stop();
await ldap.stop();
setTimeout(process.exit.bind(process), 3000);
});
@@ -13,16 +13,14 @@ function getMountPoint(dataDir) {
}
exports.up = async function(db) {
// use safe() here because this migration failed midway in 7.2.4
await safe(db.runSql('ALTER TABLE apps ADD storageVolumeId VARCHAR(128), ADD FOREIGN KEY(storageVolumeId) REFERENCES volumes(id)'));
await safe(db.runSql('ALTER TABLE apps ADD storageVolumePrefix VARCHAR(128)'));
await safe(db.runSql('ALTER TABLE apps ADD CONSTRAINT apps_storageVolume UNIQUE (storageVolumeId, storageVolumePrefix)'));
await db.runSql('ALTER TABLE apps ADD storageVolumeId VARCHAR(128), ADD FOREIGN KEY(storageVolumeId) REFERENCES volumes(id)');
await db.runSql('ALTER TABLE apps ADD storageVolumePrefix VARCHAR(128)');
await db.runSql('ALTER TABLE apps ADD CONSTRAINT apps_storageVolume UNIQUE (storageVolumeId, storageVolumePrefix)');
const apps = await db.runSql('SELECT * FROM apps WHERE dataDir IS NOT NULL');
const allVolumes = await db.runSql('SELECT * FROM volumes');
for (const app of apps) {
const allVolumes = await db.runSql('SELECT * FROM volumes');
console.log(`data-dir (${app.id}): migrating data dir ${app.dataDir}`);
const mountPoint = getMountPoint(app.dataDir);
@@ -38,7 +36,7 @@ exports.up = async function(db) {
}
const id = uuid.v4().replace(/-/g, ''); // to make systemd mount file names more readable
const name = `appdata-${id}`;
const name = `app-${app.id}`;
const type = app.dataDir === mountPoint ? 'filesystem' : 'mountpoint';
console.log(`data-dir (${app.id}): creating new volume ${id}`);
@@ -1,9 +0,0 @@
'use strict';
exports.up = async function (db) {
await db.runSql('ALTER TABLE apps ADD COLUMN upstreamUri VARCHAR(256) DEFAULT ""');
};
exports.down = async function (db) {
await db.runSql('ALTER TABLE apps DROP COLUMN upstreamUri');
};
@@ -1,15 +0,0 @@
'use strict';
exports.up = async function(db) {
const result = await db.runSql('SELECT * FROM settings WHERE name=?', [ 'backup_config' ]);
if (!result.length) return;
const backupConfig = JSON.parse(result[0].value);
if (backupConfig.encryption && backupConfig.format === 'rsync') backupConfig.encryptedFilenames = true;
await db.runSql('UPDATE settings SET value=? WHERE name=?', [ JSON.stringify(backupConfig), 'backup_config', ]);
};
exports.down = async function(/* db */) {
};
@@ -1,22 +0,0 @@
'use strict';
exports.up = async function (db) {
var cmd = 'CREATE TABLE applinks(' +
'id VARCHAR(128) NOT NULL UNIQUE,' +
'accessRestrictionJson TEXT,' +
'creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'updateTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,' +
'label VARCHAR(128),' +
'tagsJson VARCHAR(2048),' +
'icon MEDIUMBLOB,' +
'upstreamUri VARCHAR(256) DEFAULT "",' +
'PRIMARY KEY (id)) CHARACTER SET utf8 COLLATE utf8_bin';
await db.runSql(cmd);
};
exports.down = async function (db) {
await db.runSql('DROP TABLE applinks');
};
@@ -1,17 +0,0 @@
'use strict';
const async = require('async');
exports.up = function(db, callback) {
async.series([
db.runSql.bind(db, 'ALTER TABLE mailboxes ADD COLUMN storageQuota BIGINT DEFAULT 0'),
db.runSql.bind(db, 'ALTER TABLE mailboxes ADD COLUMN messagesQuota BIGINT DEFAULT 0'),
], callback);
};
exports.down = function(db, callback) {
async.series([
db.runSql.bind(db, 'ALTER TABLE mailboxes DROP COLUMN storageQuota'),
db.runSql.bind(db, 'ALTER TABLE mailboxes DROP COLUMN messagesQuota')
], callback);
};
@@ -1,18 +0,0 @@
'use strict';
const safe = require('safetydance');
exports.up = async function (db) {
const mailDomains = await db.runSql('SELECT * FROM mail', []);
for (const mailDomain of mailDomains) {
let catchAll = safe.JSON.parse(mailDomain.catchAllJson) || [];
if (catchAll.length === 0) continue;
catchAll = catchAll.map(a => `${a}@${mailDomain.domain}`);
await db.runSql('UPDATE mail SET catchAllJson = ? WHERE domain = ?', [ JSON.stringify(catchAll), mailDomain.domain ]);
}
};
exports.down = async function( /* db */) {
};
@@ -1,13 +0,0 @@
'use strict';
exports.up = async function (db) {
await db.runSql('ALTER TABLE tokens DROP COLUMN scope');
await db.runSql('ALTER TABLE tokens ADD COLUMN scopeJson TEXT');
await db.runSql('UPDATE tokens SET scopeJson = ?', [ JSON.stringify({'*':'rw'})]);
};
exports.down = async function (db) {
await db.runSql('ALTER TABLE tokens ADD COLUMN scope VARCHAR(512) NOT NULL DEFAULT ""');
await db.runSql('ALTER TABLE tokens DROP COLUMN scopeJson');
};
+1 -17
View File
@@ -58,7 +58,7 @@ CREATE TABLE IF NOT EXISTS tokens(
accessToken VARCHAR(128) NOT NULL UNIQUE,
identifier VARCHAR(128) NOT NULL, // resourceId: app id or user id
clientId VARCHAR(128),
scopeJson TEXT,
scope VARCHAR(512) NOT NULL,
expires BIGINT NOT NULL, // FIXME: make this a timestamp
lastUsedTime TIMESTAMP NULL,
PRIMARY KEY(accessToken));
@@ -102,7 +102,6 @@ CREATE TABLE IF NOT EXISTS apps(
appStoreIcon MEDIUMBLOB,
icon MEDIUMBLOB,
crontab TEXT,
upstreamUri VARCHAR(256) DEFAULT "",
FOREIGN KEY(mailboxDomain) REFERENCES domains(domain),
FOREIGN KEY(taskId) REFERENCES tasks(id),
@@ -217,8 +216,6 @@ CREATE TABLE IF NOT EXISTS mailboxes(
domain VARCHAR(128),
active BOOLEAN DEFAULT 1,
enablePop3 BOOLEAN DEFAULT 0,
storageQuota BIGINT DEFAULT 0,
messagesQuota BIGINT DEFAULT 0,
FOREIGN KEY(domain) REFERENCES mail(domain),
FOREIGN KEY(aliasDomain) REFERENCES mail(domain),
@@ -300,17 +297,4 @@ CREATE TABLE IF NOT EXISTS blobs(
value MEDIUMBLOB,
PRIMARY KEY(id));
CREATE TABLE IF NOT EXISTS appLinks(
id VARCHAR(128) NOT NULL UNIQUE,
accessRestrictionJson TEXT, // { users: [ ], groups: [ ] }
creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, // when the app was installed
updateTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, // when the last app update was done
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, // when this db record was updated (useful for UI caching)
label VARCHAR(128), // display name
tagsJson VARCHAR(2048), // array of tags
icon MEDIUMBLOB,
upstreamUri VARCHAR(256) DEFAULT "",
PRIMARY KEY(id));
CHARACTER SET utf8 COLLATE utf8_bin;
+147 -840
View File
File diff suppressed because it is too large Load Diff
+2 -2
View File
@@ -18,7 +18,7 @@
"aws-sdk": "^2.1115.0",
"basic-auth": "^2.0.1",
"body-parser": "^1.20.0",
"cloudron-manifestformat": "^5.18.0",
"cloudron-manifestformat": "^5.16.0",
"connect": "^3.7.0",
"connect-lastmile": "^2.1.1",
"connect-timeout": "^1.9.0",
@@ -34,7 +34,6 @@
"express": "^4.17.3",
"ipaddr.js": "^2.0.1",
"js-yaml": "^4.1.0",
"jsdom": "^20.0.0",
"json": "^11.0.0",
"jsonwebtoken": "^8.5.1",
"ldapjs": "^2.3.2",
@@ -45,6 +44,7 @@
"multiparty": "^4.2.3",
"mysql": "^2.18.1",
"nodemailer": "^6.7.3",
"nodemailer-smtp-transport": "^2.7.4",
"progress-stream": "^2.0.0",
"qrcode": "^1.5.0",
"readdirp": "^3.6.0",
+10 -21
View File
@@ -11,7 +11,7 @@ trap exitHandler EXIT
# change this to a hash when we make a upgrade release
readonly LOG_FILE="/var/log/cloudron-setup.log"
readonly MINIMUM_DISK_SIZE_GB="18" # this is the size of "/" and required to fit in docker images 18 is a safe bet for different reporting on 20GB min
readonly MINIMUM_MEMORY="960" # this is mostly reported for 1GB main memory (DO 992, EC2 967, Linode 989, Serverdiscounter.com 974)
readonly MINIMUM_MEMORY="974" # this is mostly reported for 1GB main memory (DO 992, EC2 990, Linode 989, Serverdiscounter.com 974)
readonly curl="curl --fail --connect-timeout 20 --retry 10 --retry-delay 2 --max-time 2400"
@@ -236,31 +236,20 @@ while true; do
sleep 10
done
ip4=$(curl -s --fail --connect-timeout 2 --max-time 2 https://ipv4.api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p' || true)
ip6=$(curl -s --fail --connect-timeout 2 --max-time 2 https://ipv6.api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p' || true)
url4=""
url6=""
fallbackUrl=""
if [[ -z "${setupToken}" ]]; then
[[ -n "${ip4}" ]] && url4="https://${ip4}"
[[ -n "${ip6}" ]] && url6="https://[${ip6}]"
[[ -z "${ip4}" && -z "${ip6}" ]] && fallbackUrl="https://<IP>"
else
[[ -n "${ip4}" ]] && url4="https://${ip4}/?setupToken=${setupToken}"
[[ -n "${ip6}" ]] && url6="https://[${ip6}]/?setupToken=${setupToken}"
[[ -z "${ip4}" && -z "${ip6}" ]] && fallbackUrl="https://<IP>?setupToken=${setupToken}"
if ! ip=$(curl -s --fail --connect-timeout 2 --max-time 2 https://ipv4.api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p'); then
ip='<IP>'
fi
echo -e "\n\n${GREEN}After reboot, visit one of the following URLs and accept the self-signed certificate to finish setup.${DONE}\n"
[[ -n "${url4}" ]] && echo -e " * ${GREEN}${url4}${DONE}"
[[ -n "${url6}" ]] && echo -e " * ${GREEN}${url6}${DONE}"
[[ -n "${fallbackUrl}" ]] && echo -e " * ${GREEN}${fallbackUrl}${DONE}"
if [[ -z "${setupToken}" ]]; then
url="https://${ip}"
else
url="https://${ip}/?setupToken=${setupToken}"
fi
echo -e "\n\n${GREEN}After reboot, visit ${url} and accept the self-signed certificate to finish setup.${DONE}\n"
if [[ "${rebootServer}" == "true" ]]; then
systemctl stop box mysql # sometimes mysql ends up having corrupt privilege tables
# https://www.gnu.org/savannah-checkouts/gnu/bash/manual/bash.html#ANSI_002dC-Quoting
read -p $'\n'"The server has to be rebooted to apply all the settings. Reboot now ? [Y/n] " yn
read -p "The server has to be rebooted to apply all the settings. Reboot now ? [Y/n] " yn
yn=${yn:-y}
case $yn in
[Yy]* ) exitHandler; systemctl reboot;;
+2 -3
View File
@@ -1,7 +1,6 @@
#!/bin/bash
# This script is run on the base ubuntu. Put things here which are managed by ubuntu
# This script is also run after ubuntu upgrade
set -euv -o pipefail
@@ -121,13 +120,13 @@ else
if ! apt-get install -y --no-install-recommends collectd collectd-utils; then
# FQDNLookup is true in default debian config. The box code has a custom collectd.conf that fixes this
echo "Failed to install collectd, continuing anyway. Presumably because of http://mailman.verplant.org/pipermail/collectd/2015-March/006491.html"
sed -e 's/^FQDNLookup true/FQDNLookup false/' -i /etc/collectd/collectd.conf
fi
if [[ "${ubuntu_version}" == "20.04" ]]; then
echo -e "\nLD_PRELOAD=/usr/lib/python3.8/config-3.8-x86_64-linux-gnu/libpython3.8.so" >> /etc/default/collectd
fi
fi
sed -e 's/^FQDNLookup true/FQDNLookup false/' -i /etc/collectd/collectd.conf
# some hosts like atlantic install ntp which conflicts with timedatectl. https://serverfault.com/questions/1024770/ubuntu-20-04-time-sync-problems-and-possibly-incorrect-status-information
echo "==> Configuring host"
@@ -180,7 +179,7 @@ systemctl disable systemd-resolved || true
ufw disable || true
# we need unbound to work as this is required for installer.sh to do any DNS requests
echo -e "server:\n\tinterface: 127.0.0.1\n" > /etc/unbound/unbound.conf.d/cloudron-network.conf
echo -e "server:\n\tinterface: 127.0.0.1\n\tdo-ip6: no" > /etc/unbound/unbound.conf.d/cloudron-network.conf
systemctl restart unbound
# Ubuntu 22 has private home directories by default (https://discourse.ubuntu.com/t/private-home-directories-for-ubuntu-21-04-onwards/)
+4 -11
View File
@@ -69,7 +69,7 @@ readonly ubuntu_codename=$(lsb_release -cs)
readonly is_update=$(systemctl is-active -q box && echo "yes" || echo "no")
log "Updating from $(cat $box_src_dir/VERSION 2>/dev/null) to $(cat $box_src_tmp_dir/VERSION 2>/dev/null)"
log "Updating from $(cat $box_src_dir/VERSION) to $(cat $box_src_tmp_dir/VERSION)"
# https://docs.docker.com/engine/installation/linux/ubuntulinux/
readonly docker_version=20.10.14
@@ -145,18 +145,12 @@ log "downloading new addon images"
images=$(node -e "let i = require('${box_src_tmp_dir}/src/infra_version.js'); console.log(i.baseImages.map(function (x) { return x.tag; }).join(' '), Object.keys(i.images).map(function (x) { return i.images[x].tag; }).join(' '));")
log "\tPulling docker images: ${images}"
if ! curl -s --fail --connect-timeout 2 --max-time 2 https://ipv4.api.cloudron.io/api/v1/helper/public_ip; then
docker_registry=registry.ipv6.docker.com
else
docker_registry=registry-1.docker.io
fi
for image in ${images}; do
while ! docker pull "${docker_registry}/${image}"; do # this pulls the image using the sha256
while ! docker pull "${image}"; do # this pulls the image using the sha256
log "Could not pull ${image}"
sleep 5
done
while ! docker pull "${docker_registry}/${image%@sha256:*}"; do # this will tag the image for readability
while ! docker pull "${image%@sha256:*}"; do # this will tag the image for readability
log "Could not pull ${image%@sha256:*}"
sleep 5
done
@@ -169,8 +163,7 @@ CLOUDRON_SYSLOG_VERSION="1.1.0"
while [[ ! -f "${CLOUDRON_SYSLOG}" || "$(${CLOUDRON_SYSLOG} --version)" != ${CLOUDRON_SYSLOG_VERSION} ]]; do
rm -rf "${CLOUDRON_SYSLOG_DIR}"
mkdir -p "${CLOUDRON_SYSLOG_DIR}"
# verbatim is not needed in node 18 since that is the default there. in node 16, ipv4 is preferred and this breaks on ipv6 only servers
if NODE_OPTIONS="--dns-result-order=verbatim" npm install --unsafe-perm -g --prefix "${CLOUDRON_SYSLOG_DIR}" cloudron-syslog@${CLOUDRON_SYSLOG_VERSION}; then break; fi
if npm install --unsafe-perm -g --prefix "${CLOUDRON_SYSLOG_DIR}" cloudron-syslog@${CLOUDRON_SYSLOG_VERSION}; then break; fi
log "Failed to install cloudron-syslog, trying again"
sleep 5
done
-32
View File
@@ -1,32 +0,0 @@
#!/bin/bash
set -eu -o pipefail
readonly logfile="/home/yellowtent/platformdata/logs/box.log"
if [[ ${EUID} -ne 0 ]]; then
echo "This script should be run as root." > /dev/stderr
exit 1
fi
echo "This will re-create all the containers. Services will go down for a bit."
read -p "Do you want to proceed? (y/N) " -n 1 -r choice
echo
if [[ ! $choice =~ ^[Yy]$ ]]; then
exit 1
fi
echo -n "Re-creating addon containers (this takes a while) ."
line_count=$(cat /home/yellowtent/platformdata/logs/box.log | wc -l)
sed -e 's/"version": ".*",/"version":"48.0.0",/' -i /home/yellowtent/platformdata/INFRA_VERSION
systemctl restart box
while ! tail -n "+${line_count}" "${logfile}" | grep -q "platform is ready"; do
echo -n "."
sleep 2
done
echo -e "\nDone.\nThe Cloudron dashboard will say 'Configuring (Queued)' for each app. The apps will come up in a short while."
+11 -32
View File
@@ -4,51 +4,30 @@
printf "**********************************************************************\n\n"
readonly cache_file4="/var/cache/cloudron-motd-cache4"
readonly cache_file6="/var/cache/cloudron-motd-cache6"
cache_file="/var/cache/cloudron-motd-cache"
url4=""
url6=""
fallbackUrl=""
function detectIp() {
if [[ ! -f "${cache_file4}" ]]; then
ip4=$(curl -s --fail --connect-timeout 2 --max-time 2 https://ipv4.api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p' || true)
[[ -n "${ip4}" ]] && echo "${ip4}" > "${cache_file4}"
else
ip4=$(cat "${cache_file4}")
if [[ -z "$(ls -A /home/yellowtent/platformdata/addons/mail/dkim)" ]]; then
if [[ ! -f "${cache_file}" ]]; then
curl --fail --connect-timeout 2 --max-time 2 -q https://ipv4.api.cloudron.io/api/v1/helper/public_ip --output "${cache_file}" || true
fi
if [[ ! -f "${cache_file6}" ]]; then
ip6=$(curl -s --fail --connect-timeout 2 --max-time 2 https://ipv6.api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p' || true)
[[ -n "${ip6}" ]] && echo "${ip6}" > "${cache_file6}"
if [[ -f "${cache_file}" ]]; then
ip=$(sed -n -e 's/.*"ip": "\(.*\)"/\1/p' /var/cache/cloudron-motd-cache)
else
ip6=$(cat "${cache_file6}")
ip='<IP>'
fi
if [[ ! -f /etc/cloudron/SETUP_TOKEN ]]; then
[[ -n "${ip4}" ]] && url4="https://${ip4}"
[[ -n "${ip6}" ]] && url6="https://[${ip6}]"
[[ -z "${ip4}" && -z "${ip6}" ]] && fallbackUrl="https://<IP>"
url="https://${ip}"
else
setupToken="$(cat /etc/cloudron/SETUP_TOKEN)"
[[ -n "${ip4}" ]] && url4="https://${ip4}/?setupToken=${setupToken}"
[[ -n "${ip6}" ]] && url6="https://[${ip6}]/?setupToken=${setupToken}"
[[ -z "${ip4}" && -z "${ip6}" ]] && fallbackUrl="https://<IP>?setupToken=${setupToken}"
url="https://${ip}/?setupToken=${setupToken}"
fi
}
if [[ -z "$(ls -A /home/yellowtent/platformdata/addons/mail/dkim)" ]]; then
detectIp
printf "\t\t\tWELCOME TO CLOUDRON\n"
printf "\t\t\t-------------------\n"
printf '\n\e[1;32m%-6s\e[m\n' "Visit one of the following URLs on your browser and accept the self-signed certificate to finish setup."
[[ -n "${url4}" ]] && printf '\e[1;32m%-6s\e[m\n' " * ${url4}"
[[ -n "${url6}" ]] && printf '\e[1;32m%-6s\e[m\n' " * ${url6}"
[[ -n "${fallbackUrl}" ]] && printf '\e[1;32m%-6s\e[m\n' " * ${fallbackUrl}"
printf "\nCloudron overview - https://docs.cloudron.io/ \n"
printf '\n\e[1;32m%-6s\e[m\n\n' "Visit ${url} on your browser and accept the self-signed certificate to finish setup."
printf "Cloudron overview - https://docs.cloudron.io/ \n"
printf "Cloudron setup - https://docs.cloudron.io/installation/#setup \n"
else
printf "\t\t\tNOTE TO CLOUDRON ADMINS\n"
+1 -1
View File
@@ -6,7 +6,7 @@ server:
interface: 127.0.0.1
interface: 172.18.0.1
ip-freebind: yes
do-ip6: yes
do-ip6: no
access-control: 127.0.0.1 allow
access-control: 172.18.0.1/16 allow
cache-max-negative-ttl: 30
+26
View File
@@ -0,0 +1,26 @@
'use strict';
exports = module.exports = {
verifyToken
};
const assert = require('assert'),
BoxError = require('./boxerror.js'),
safe = require('safetydance'),
tokens = require('./tokens.js'),
users = require('./users.js');
async function verifyToken(accessToken) {
assert.strictEqual(typeof accessToken, 'string');
const token = await tokens.getByAccessToken(accessToken);
if (!token) throw new BoxError(BoxError.INVALID_CREDENTIALS, 'No such token');
const user = await users.get(token.identifier);
if (!user) throw new BoxError(BoxError.INVALID_CREDENTIALS, 'User not found');
if (!user.active) throw new BoxError(BoxError.INVALID_CREDENTIALS, 'User not active');
await safe(tokens.update(token.id, { lastUsedTime: new Date() })); // ignore any error
return user;
}
+11 -11
View File
@@ -522,32 +522,32 @@ Acme2.prototype.loadDirectory = async function () {
});
};
Acme2.prototype.getCertificate = async function (fqdn, domain, paths) {
assert.strictEqual(typeof fqdn, 'string');
Acme2.prototype.getCertificate = async function (vhost, domain, paths) {
assert.strictEqual(typeof vhost, 'string');
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof paths, 'object');
debug(`getCertificate: start acme flow for ${fqdn} from ${this.caDirectory}`);
debug(`getCertificate: start acme flow for ${vhost} from ${this.caDirectory}`);
if (fqdn !== domain && this.wildcard) { // bare domain is not part of wildcard SAN
fqdn = dns.makeWildcard(fqdn);
debug(`getCertificate: will get wildcard cert for ${fqdn}`);
if (vhost !== domain && this.wildcard) { // bare domain is not part of wildcard SAN
vhost = dns.makeWildcard(vhost);
debug(`getCertificate: will get wildcard cert for ${vhost}`);
}
await this.loadDirectory();
await this.acmeFlow(fqdn, domain, paths);
await this.acmeFlow(vhost, domain, paths);
};
async function getCertificate(fqdn, domain, paths, options) {
assert.strictEqual(typeof fqdn, 'string'); // this can also be a wildcard domain (for alias domains)
async function getCertificate(vhost, domain, paths, options) {
assert.strictEqual(typeof vhost, 'string'); // this can also be a wildcard domain (for alias domains)
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof paths, 'object');
assert.strictEqual(typeof options, 'object');
await promiseRetry({ times: 3, interval: 0, debug }, async function () {
debug(`getCertificate: for fqdn ${fqdn} and domain ${domain}`);
debug(`getCertificate: for vhost ${vhost} and domain ${domain}`);
const acme = new Acme2(options || { });
return await acme.getCertificate(fqdn, domain, paths);
return await acme.getCertificate(vhost, domain, paths);
});
}
+10 -18
View File
@@ -70,35 +70,25 @@ async function checkAppHealth(app, options) {
const manifest = app.manifest;
let healthCheckUrl, host;
if (app.manifest.id === constants.PROXY_APP_APPSTORE_ID) {
healthCheckUrl = app.upstreamUri;
host = '';
} else {
const [error, data] = await safe(docker.inspect(app.containerId));
if (error || !data || !data.State) return await setHealth(app, apps.HEALTH_ERROR);
if (data.State.Running !== true) return await setHealth(app, apps.HEALTH_DEAD);
const [error, data] = await safe(docker.inspect(app.containerId));
if (error || !data || !data.State) return await setHealth(app, apps.HEALTH_ERROR);
if (data.State.Running !== true) return await setHealth(app, apps.HEALTH_DEAD);
// non-appstore apps may not have healthCheckPath
if (!manifest.healthCheckPath) return await setHealth(app, apps.HEALTH_HEALTHY);
healthCheckUrl = `http://${app.containerIp}:${manifest.httpPort}${manifest.healthCheckPath}`;
host = app.fqdn;
}
// non-appstore apps may not have healthCheckPath
if (!manifest.healthCheckPath) return await setHealth(app, apps.HEALTH_HEALTHY);
const healthCheckUrl = `http://${app.containerIp}:${manifest.httpPort}${manifest.healthCheckPath}`;
const [healthCheckError, response] = await safe(superagent
.get(healthCheckUrl)
.set('Host', host) // required for some apache configs with rewrite rules
.set('Host', app.fqdn) // required for some apache configs with rewrite rules
.set('User-Agent', 'Mozilla (CloudronHealth)') // required for some apps (e.g. minio)
.redirects(0)
.ok(() => true)
.timeout(options.timeout * 1000));
if (healthCheckError) {
await apps.appendLogLine(app, `=> Healtheck error: ${healthCheckError}`);
await setHealth(app, apps.HEALTH_UNHEALTHY);
} else if (response.status > 403) { // 2xx and 3xx are ok. even 401 and 403 are ok for now (for WP sites)
await apps.appendLogLine(app, `=> Healtheck error got response status ${response.status}`);
await setHealth(app, apps.HEALTH_UNHEALTHY);
} else {
await setHealth(app, apps.HEALTH_HEALTHY);
@@ -139,7 +129,9 @@ async function processDockerEvents(options) {
const [error, info] = await safe(getContainerInfo(containerId));
const program = error ? containerId : (info.addonName || info.app.fqdn);
const now = Date.now();
const notifyUser = !info?.app?.debugMode && ((now - gLastOomMailTime) > OOM_EVENT_LIMIT);
// do not send mails for dev apps
const notifyUser = !(info.app && info.app.debugMode) && ((now - gLastOomMailTime) > OOM_EVENT_LIMIT);
debug(`OOM ${program} notifyUser: ${notifyUser}. lastOomTime: ${gLastOomMailTime} (now: ${now})`);
-178
View File
@@ -1,178 +0,0 @@
'use strict';
exports = module.exports = {
list,
listByUser,
add,
get,
update,
remove,
getIcon
};
const assert = require('assert'),
apps = require('./apps.js'),
database = require('./database.js'),
BoxError = require('./boxerror.js'),
uuid = require('uuid'),
safe = require('safetydance'),
superagent = require('superagent'),
validator = require('validator'),
jsdom = require('jsdom'),
debug = require('debug')('box:applinks');
const APPLINKS_FIELDS= [ 'id', 'accessRestrictionJson', 'creationTime', 'updateTime', 'ts', 'label', 'tagsJson', 'icon', 'upstreamUri' ].join(',');
function postProcess(result) {
assert.strictEqual(typeof result, 'object');
assert(result.tagsJson === null || typeof result.tagsJson === 'string');
result.tags = safe.JSON.parse(result.tagsJson) || [];
delete result.tagsJson;
assert(result.accessRestrictionJson === null || typeof result.accessRestrictionJson === 'string');
result.accessRestriction = safe.JSON.parse(result.accessRestrictionJson);
if (result.accessRestriction && !result.accessRestriction.users) result.accessRestriction.users = [];
delete result.accessRestrictionJson;
result.ts = new Date(result.ts).getTime();
result.icon = result.icon ? result.icon : null;
}
async function list() {
const results = await database.query(`SELECT ${APPLINKS_FIELDS} FROM applinks ORDER BY upstreamUri`);
results.forEach(postProcess);
return results;
}
async function listByUser(user) {
assert.strictEqual(typeof user, 'object');
const result = await list();
return result.filter((app) => apps.canAccess(app, user));
}
async function detectMetaInfo(applink) {
assert.strictEqual(typeof applink, 'object');
const [error, response] = await safe(superagent.get(applink.upstreamUri));
if (error || !response.text) throw new BoxError(BoxError.BAD_FIELD, 'cannot fetch upstream uri for favicon and label');
// fixup upstreamUri to match the redirect
if (response.redirects && response.redirects.length) {
debug(`detectMetaInfo: found redirect from ${applink.upstreamUri} to ${response.redirects[0]}`);
applink.upstreamUri = response.redirects[0];
}
if (applink.favicon && applink.label) return;
const dom = new jsdom.JSDOM(response.text);
if (!applink.icon) {
let favicon = '';
if (dom.window.document.querySelector('link[rel="apple-touch-icon"]')) favicon = dom.window.document.querySelector('link[rel="apple-touch-icon"]').href ;
if (!favicon.endsWith('.png') && dom.window.document.querySelector('meta[name="msapplication-TileImage"]')) favicon = dom.window.document.querySelector('meta[name="msapplication-TileImage"]').content ;
if (!favicon.endsWith('.png') && dom.window.document.querySelector('link[rel="shortcut icon"]')) favicon = dom.window.document.querySelector('link[rel="shortcut icon"]').href ;
if (!favicon.endsWith('.png') && dom.window.document.querySelector('link[rel="icon"]')) favicon = dom.window.document.querySelector('link[rel="icon"]').href ;
if (!favicon.endsWith('.png') && dom.window.document.querySelector('meta[itemprop="image"]')) favicon = dom.window.document.querySelector('meta[itemprop="image"]').content;
if (favicon) {
if (favicon.startsWith('/')) favicon = applink.upstreamUri + favicon;
const [error, response] = await safe(superagent.get(favicon));
if (error) console.error(`Failed to fetch icon ${favicon}: `, error);
else if (response.ok && response.headers['content-type'] === 'image/png') applink.icon = response.body;
else console.error(`Failed to fetch icon ${favicon}: statusCode=${response.status}`);
} else {
console.error(`Unable to find a suitable icon for ${applink.upstreamUri}`);
}
}
if (!applink.label) {
if (dom.window.document.querySelector('meta[property="og:title"]')) applink.label = dom.window.document.querySelector('meta[property="og:title"]').content;
else if (dom.window.document.querySelector('meta[property="og:site_name"]')) applink.label = dom.window.document.querySelector('meta[property="og:site_name"]').content;
else if (dom.window.document.title) applink.label = dom.window.document.title;
}
}
async function add(applink) {
assert.strictEqual(typeof applink, 'object');
assert.strictEqual(typeof applink.upstreamUri, 'string');
debug(`add: ${applink.upstreamUri}`, applink);
if (applink.icon) {
if (!validator.isBase64(applink.icon)) throw new BoxError(BoxError.BAD_FIELD, 'icon is not base64');
applink.icon = Buffer.from(applink.icon, 'base64');
}
await detectMetaInfo(applink);
const data = {
id: uuid.v4(),
accessRestrictionJson: applink.accessRestriction ? JSON.stringify(applink.accessRestriction) : null,
label: applink.label || '',
tagsJson: applink.tags ? JSON.stringify(applink.tags) : null,
icon: applink.icon || null,
upstreamUri: applink.upstreamUri
};
const query = 'INSERT INTO applinks (id, accessRestrictionJson, label, tagsJson, icon, upstreamUri) VALUES (?, ?, ?, ?, ?, ?)';
const args = [ data.id, data.accessRestrictionJson, data.label, data.tagsJson, data.icon, data.upstreamUri ];
const [error] = await safe(database.query(query, args));
if (error) throw error;
return data.id;
}
async function get(applinkId) {
assert.strictEqual(typeof applinkId, 'string');
const result = await database.query(`SELECT ${APPLINKS_FIELDS} FROM applinks WHERE id = ?`, [ applinkId ]);
if (result.length === 0) throw new BoxError(BoxError.NOT_FOUND, 'Applink not found');
postProcess(result[0]);
return result[0];
}
async function update(applinkId, applink) {
assert.strictEqual(typeof applinkId, 'string');
assert.strictEqual(typeof applink, 'object');
assert.strictEqual(typeof applink.upstreamUri, 'string');
debug(`update: ${applinkId} ${applink.upstreamUri}`, applink);
if (applink.icon) {
if (!validator.isBase64(applink.icon)) throw new BoxError(BoxError.BAD_FIELD, 'icon is not base64');
applink.icon = Buffer.from(applink.icon, 'base64');
}
await detectMetaInfo(applink);
const query = 'UPDATE applinks SET label=?, icon=?, upstreamUri=?, tagsJson=?, accessRestrictionJson=? WHERE id = ?';
const args = [ applink.label, applink.icon || null, applink.upstreamUri, applink.tags ? JSON.stringify(applink.tags) : null, applink.accessRestriction ? JSON.stringify(applink.accessRestriction) : null, applinkId ];
const result = await database.query(query, args);
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Applink not found');
}
async function remove(applinkId) {
assert.strictEqual(typeof applinkId, 'string');
debug(`remove: ${applinkId}`);
const result = await database.query(`DELETE FROM applinks WHERE id = ?`, [ applinkId ]);
if (result.affectedRows !== 1) throw new BoxError(BoxError.NOT_FOUND, 'Applink not found');
}
async function getIcon(applinkId) {
assert.strictEqual(typeof applinkId, 'string');
const applink = await get(applinkId);
return applink.icon;
}
+35 -114
View File
@@ -27,7 +27,6 @@ exports = module.exports = {
setAccessRestriction,
setOperators,
setCrontab,
setUpstreamUri,
setLabel,
setIcon,
setTags,
@@ -61,8 +60,6 @@ exports = module.exports = {
getLogPaths,
getLogs,
appendLogLine,
getCertificate,
start,
@@ -162,7 +159,6 @@ const appstore = require('./appstore.js'),
mail = require('./mail.js'),
manifestFormat = require('cloudron-manifestformat'),
mounts = require('./mounts.js'),
notifications = require('./notifications.js'),
once = require('./once.js'),
os = require('os'),
path = require('path'),
@@ -190,7 +186,7 @@ const APPS_FIELDS_PREFIXED = [ 'apps.id', 'apps.appStoreId', 'apps.installationS
'apps.health', 'apps.containerId', 'apps.manifestJson', 'apps.accessRestrictionJson', 'apps.memoryLimit', 'apps.cpuShares',
'apps.label', 'apps.tagsJson', 'apps.taskId', 'apps.reverseProxyConfigJson', 'apps.servicesConfigJson', 'apps.operatorsJson',
'apps.sso', 'apps.debugModeJson', 'apps.enableBackup', 'apps.proxyAuth', 'apps.containerIp', 'apps.crontab',
'apps.creationTime', 'apps.updateTime', 'apps.enableAutomaticUpdate', 'apps.upstreamUri',
'apps.creationTime', 'apps.updateTime', 'apps.enableAutomaticUpdate',
'apps.enableMailbox', 'apps.mailboxDisplayName', 'apps.mailboxName', 'apps.mailboxDomain', 'apps.enableInbox', 'apps.inboxName', 'apps.inboxDomain',
'apps.storageVolumeId', 'apps.storageVolumePrefix', 'apps.ts', 'apps.healthTime', '(apps.icon IS NOT NULL) AS hasIcon', '(apps.appStoreIcon IS NOT NULL) AS hasAppStoreIcon' ].join(',');
@@ -243,10 +239,7 @@ function validatePortBindings(portBindings, manifest) {
if (!portBindings) return null;
const tcpPorts = manifest.tcpPorts || { };
const udpPorts = manifest.udpPorts || { };
for (const portName in portBindings) {
for (let portName in portBindings) {
if (!/^[a-zA-Z0-9_]+$/.test(portName)) return new BoxError(BoxError.BAD_FIELD, `${portName} is not a valid environment variable in portBindings`);
const hostPort = portBindings[portName];
@@ -254,11 +247,14 @@ function validatePortBindings(portBindings, manifest) {
if (RESERVED_PORTS.indexOf(hostPort) !== -1) return new BoxError(BoxError.BAD_FIELD, `Port ${hostPort} for ${portName} is reserved in portBindings`);
if (RESERVED_PORT_RANGES.find(range => (hostPort >= range[0] && hostPort <= range[1]))) return new BoxError(BoxError.BAD_FIELD, `Port ${hostPort} for ${portName} is reserved in portBindings`);
if (ALLOWED_PORTS.indexOf(hostPort) === -1 && (hostPort <= 1023 || hostPort > 65535)) return new BoxError(BoxError.BAD_FIELD, `${hostPort} for ${portName} is not in permitted range in portBindings`);
}
// it is OK if there is no 1-1 mapping between values in manifest.tcpPorts and portBindings. missing values implies the service is disabled
const portSpec = tcpPorts[portName] || udpPorts[portName];
if (!portSpec) return new BoxError(BoxError.BAD_FIELD, `Invalid portBinding ${portName}`);
if (portSpec.readOnly && portSpec.defaultValue !== hostPort) return new BoxError(BoxError.BAD_FIELD, `portBinding ${portName} is readOnly and cannot have a different value that the default`);
// it is OK if there is no 1-1 mapping between values in manifest.tcpPorts and portBindings. missing values implies
// that the user wants the service disabled
const tcpPorts = manifest.tcpPorts || { };
const udpPorts = manifest.udpPorts || { };
for (let portName in portBindings) {
if (!(portName in tcpPorts) && !(portName in udpPorts)) return new BoxError(BoxError.BAD_FIELD, `Invalid portBindings ${portName}`);
}
return null;
@@ -458,23 +454,6 @@ function validateBackupFormat(format) {
return new BoxError(BoxError.BAD_FIELD, 'Invalid backup format');
}
function validateUpstreamUri(upstreamUri) {
assert.strictEqual(typeof upstreamUri, 'string');
if (!upstreamUri) return null;
const uri = safe(() => new URL(upstreamUri));
if (!uri) return new BoxError(BoxError.BAD_FIELD, `upstreamUri is invalid: ${safe.error.message}`);
if (uri.protocol !== 'http:' && uri.protocol !== 'https:') return new BoxError(BoxError.BAD_FIELD, 'upstreamUri has an unsupported scheme');
if (uri.search || uri.hash) return new BoxError(BoxError.BAD_FIELD, 'upstreamUri cannot have search or hash');
if (uri.pathname !== '/') return new BoxError(BoxError.BAD_FIELD, 'upstreamUri cannot have a path');
// we use the uri in a named location @wellknown-upstream. nginx does not support having paths in it
if (upstreamUri.endsWith('/')) return new BoxError(BoxError.BAD_FIELD, 'upstreamUri cannot have a path');
return null;
}
function validateLabel(label) {
if (label === null) return null;
@@ -572,36 +551,21 @@ async function getStorageDir(app) {
return path.join(volume.hostPath, app.storageVolumePrefix);
}
function removeCertificateKeys(app) {
if (app.certificate) delete app.certificate.key;
app.secondaryDomains.forEach(sd => { if (sd.certificate) delete sd.certificate.key; });
app.aliasDomains.forEach(ad => { if (ad.certificate) delete ad.certificate.key; });
app.redirectDomains.forEach(rd => { if (rd.certificate) delete rd.certificate.key; });
}
function removeInternalFields(app) {
const result = _.pick(app,
return _.pick(app,
'id', 'appStoreId', 'installationState', 'error', 'runState', 'health', 'taskId',
'subdomain', 'domain', 'fqdn', 'certificate', 'crontab', 'upstreamUri',
'subdomain', 'domain', 'fqdn', 'crontab',
'accessRestriction', 'manifest', 'portBindings', 'iconUrl', 'memoryLimit', 'cpuShares', 'operators',
'sso', 'debugMode', 'reverseProxyConfig', 'enableBackup', 'creationTime', 'updateTime', 'ts', 'tags',
'label', 'secondaryDomains', 'redirectDomains', 'aliasDomains', 'env', 'enableAutomaticUpdate',
'storageVolumeId', 'storageVolumePrefix', 'mounts',
'label', 'secondaryDomains', 'redirectDomains', 'aliasDomains', 'env', 'enableAutomaticUpdate', 'storageVolumeId', 'storageVolumePrefix', 'mounts',
'enableMailbox', 'mailboxDisplayName', 'mailboxName', 'mailboxDomain', 'enableInbox', 'inboxName', 'inboxDomain');
removeCertificateKeys(result);
return result;
}
// non-admins can only see these
function removeRestrictedFields(app) {
const result = _.pick(app,
'id', 'appStoreId', 'installationState', 'error', 'runState', 'health', 'taskId', 'accessRestriction',
'secondaryDomains', 'redirectDomains', 'aliasDomains', 'sso', 'subdomain', 'domain', 'fqdn', 'certificate',
'manifest', 'portBindings', 'iconUrl', 'creationTime', 'ts', 'tags', 'label', 'enableBackup', 'upstreamUri');
removeCertificateKeys(result);
return result;
return _.pick(app,
'id', 'appStoreId', 'installationState', 'error', 'runState', 'health', 'taskId', 'accessRestriction', 'secondaryDomains', 'redirectDomains', 'aliasDomains', 'sso',
'subdomain', 'domain', 'fqdn', 'manifest', 'portBindings', 'iconUrl', 'creationTime', 'ts', 'tags', 'label', 'enableBackup');
}
async function getIcon(app, options) {
@@ -699,35 +663,30 @@ function postProcess(result) {
const subdomains = JSON.parse(result.subdomains),
domains = JSON.parse(result.domains),
subdomainTypes = JSON.parse(result.subdomainTypes),
subdomainEnvironmentVariables = JSON.parse(result.subdomainEnvironmentVariables),
subdomainCertificateJsons = JSON.parse(result.subdomainCertificateJsons);
subdomainEnvironmentVariables = JSON.parse(result.subdomainEnvironmentVariables);
delete result.subdomains;
delete result.domains;
delete result.subdomainTypes;
delete result.subdomainEnvironmentVariables;
delete result.subdomainCertificateJsons;
result.secondaryDomains = [];
result.redirectDomains = [];
result.aliasDomains = [];
for (let i = 0; i < subdomainTypes.length; i++) {
const subdomain = subdomains[i], domain = domains[i], certificate = safe.JSON.parse(subdomainCertificateJsons[i]);
if (subdomainTypes[i] === exports.LOCATION_TYPE_PRIMARY) {
result.subdomain = subdomain;
result.domain = domain;
result.certificate = certificate;
result.subdomain = subdomains[i];
result.domain = domains[i];
} else if (subdomainTypes[i] === exports.LOCATION_TYPE_SECONDARY) {
result.secondaryDomains.push({ domain, subdomain, certificate, environmentVariable: subdomainEnvironmentVariables[i] });
result.secondaryDomains.push({ domain: domains[i], subdomain: subdomains[i], environmentVariable: subdomainEnvironmentVariables[i] });
} else if (subdomainTypes[i] === exports.LOCATION_TYPE_REDIRECT) {
result.redirectDomains.push({ domain, subdomain, certificate });
result.redirectDomains.push({ domain: domains[i], subdomain: subdomains[i] });
} else if (subdomainTypes[i] === exports.LOCATION_TYPE_ALIAS) {
result.aliasDomains.push({ domain, subdomain, certificate });
result.aliasDomains.push({ domain: domains[i], subdomain: subdomains[i] });
}
}
const envNames = JSON.parse(result.envNames), envValues = JSON.parse(result.envValues);
let envNames = JSON.parse(result.envNames), envValues = JSON.parse(result.envValues);
delete result.envNames;
delete result.envValues;
result.env = {};
@@ -735,7 +694,7 @@ function postProcess(result) {
if (envNames[i]) result.env[envNames[i]] = envValues[i];
}
const volumeIds = JSON.parse(result.volumeIds);
let volumeIds = JSON.parse(result.volumeIds);
delete result.volumeIds;
let volumeReadOnlys = JSON.parse(result.volumeReadOnlys);
delete result.volumeReadOnlys;
@@ -832,7 +791,6 @@ async function add(id, appStoreId, manifest, subdomain, domain, portBindings, da
reverseProxyConfigJson = data.reverseProxyConfig ? JSON.stringify(data.reverseProxyConfig) : null,
servicesConfigJson = data.servicesConfig ? JSON.stringify(data.servicesConfig) : null,
enableMailbox = data.enableMailbox || false,
upstreamUri = data.upstreamUri || '',
icon = data.icon || null;
const queries = [];
@@ -840,11 +798,11 @@ async function add(id, appStoreId, manifest, subdomain, domain, portBindings, da
queries.push({
query: 'INSERT INTO apps (id, appStoreId, manifestJson, installationState, runState, accessRestrictionJson, memoryLimit, cpuShares, '
+ 'sso, debugModeJson, mailboxName, mailboxDomain, label, tagsJson, reverseProxyConfigJson, servicesConfigJson, icon, '
+ 'enableMailbox, mailboxDisplayName, upstreamUri) '
+ ' VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
+ 'enableMailbox, mailboxDisplayName) '
+ ' VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
args: [ id, appStoreId, manifestJson, installationState, runState, accessRestrictionJson, memoryLimit, cpuShares,
sso, debugModeJson, mailboxName, mailboxDomain, label, tagsJson, reverseProxyConfigJson, servicesConfigJson, icon,
enableMailbox, mailboxDisplayName, upstreamUri ]
enableMailbox, mailboxDisplayName ]
});
queries.push({
@@ -1067,9 +1025,9 @@ async function getDomainObjectMap() {
// each query simply join apps table with another table by id. we then join the full result together
const PB_QUERY = 'SELECT id, GROUP_CONCAT(CAST(appPortBindings.hostPort AS CHAR(6))) AS hostPorts, GROUP_CONCAT(appPortBindings.environmentVariable) AS environmentVariables, GROUP_CONCAT(appPortBindings.type) AS portTypes FROM apps LEFT JOIN appPortBindings ON apps.id = appPortBindings.appId GROUP BY apps.id';
const ENV_QUERY = 'SELECT id, JSON_ARRAYAGG(appEnvVars.name) AS envNames, JSON_ARRAYAGG(appEnvVars.value) AS envValues FROM apps LEFT JOIN appEnvVars ON apps.id = appEnvVars.appId GROUP BY apps.id';
const SUBDOMAIN_QUERY = 'SELECT id, JSON_ARRAYAGG(locations.subdomain) AS subdomains, JSON_ARRAYAGG(locations.domain) AS domains, JSON_ARRAYAGG(locations.type) AS subdomainTypes, JSON_ARRAYAGG(locations.environmentVariable) AS subdomainEnvironmentVariables, JSON_ARRAYAGG(locations.certificateJson) AS subdomainCertificateJsons FROM apps LEFT JOIN locations ON apps.id = locations.appId GROUP BY apps.id';
const SUBDOMAIN_QUERY = 'SELECT id, JSON_ARRAYAGG(locations.subdomain) AS subdomains, JSON_ARRAYAGG(locations.domain) AS domains, JSON_ARRAYAGG(locations.type) AS subdomainTypes, JSON_ARRAYAGG(locations.environmentVariable) AS subdomainEnvironmentVariables FROM apps LEFT JOIN locations ON apps.id = locations.appId GROUP BY apps.id';
const MOUNTS_QUERY = 'SELECT id, JSON_ARRAYAGG(appMounts.volumeId) AS volumeIds, JSON_ARRAYAGG(appMounts.readOnly) AS volumeReadOnlys FROM apps LEFT JOIN appMounts ON apps.id = appMounts.appId GROUP BY apps.id';
const APPS_QUERY = `SELECT ${APPS_FIELDS_PREFIXED}, hostPorts, environmentVariables, portTypes, envNames, envValues, subdomains, domains, subdomainTypes, subdomainEnvironmentVariables, subdomainCertificateJsons, volumeIds, volumeReadOnlys FROM apps`
const APPS_QUERY = `SELECT ${APPS_FIELDS_PREFIXED}, hostPorts, environmentVariables, portTypes, envNames, envValues, subdomains, domains, subdomainTypes, subdomainEnvironmentVariables, volumeIds, volumeReadOnlys FROM apps`
+ ` LEFT JOIN (${PB_QUERY}) AS q1 on q1.id = apps.id`
+ ` LEFT JOIN (${ENV_QUERY}) AS q2 on q2.id = apps.id`
+ ` LEFT JOIN (${SUBDOMAIN_QUERY}) AS q3 on q3.id = apps.id`
@@ -1325,7 +1283,6 @@ async function install(data, auditSource) {
overwriteDns = 'overwriteDns' in data ? data.overwriteDns : false,
skipDnsSetup = 'skipDnsSetup' in data ? data.skipDnsSetup : false,
appStoreId = data.appStoreId,
upstreamUri = data.upstreamUri || '',
manifest = data.manifest;
let error = manifestFormat.parse(manifest);
@@ -1349,9 +1306,6 @@ async function install(data, auditSource) {
error = validateLabel(label);
if (error) throw error;
error = validateUpstreamUri(upstreamUri);
if (error) throw error;
error = validateTags(tags);
if (error) throw error;
@@ -1409,7 +1363,6 @@ async function install(data, auditSource) {
tags,
icon,
enableMailbox,
upstreamUri,
runState: exports.RSTATE_RUNNING,
installationState: exports.ISTATE_PENDING_INSTALL
};
@@ -1477,22 +1430,6 @@ async function setCrontab(app, crontab, auditSource) {
await eventlog.add(eventlog.ACTION_APP_CONFIGURE, auditSource, { appId, app, crontab });
}
async function setUpstreamUri(app, upstreamUri, auditSource) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof upstreamUri, 'string');
assert.strictEqual(typeof auditSource, 'object');
const appId = app.id;
const error = validateUpstreamUri(upstreamUri);
if (error) throw error;
await reverseProxy.writeAppConfigs(_.extend({}, app, { upstreamUri }));
await update(appId, { upstreamUri });
await eventlog.add(eventlog.ACTION_APP_CONFIGURE, auditSource, { appId, app, upstreamUri });
}
async function setLabel(app, label, auditSource) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof label, 'string');
@@ -1766,7 +1703,7 @@ async function setReverseProxyConfig(app, reverseProxyConfig, auditSource) {
error = validateRobotsTxt(reverseProxyConfig.robotsTxt);
if (error) throw error;
await reverseProxy.writeAppConfigs(_.extend({}, app, { reverseProxyConfig }));
await reverseProxy.writeAppConfig(_.extend({}, app, { reverseProxyConfig }));
await update(appId, { reverseProxyConfig });
@@ -1778,23 +1715,18 @@ async function setCertificate(app, data, auditSource) {
assert(data && typeof data === 'object');
assert.strictEqual(typeof auditSource, 'object');
const { subdomain, domain, cert, key } = data;
const appId = app.id;
const { location, domain, cert, key } = data;
const domainObject = await domains.get(domain);
if (domainObject === null) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
if (cert && key) {
const error = reverseProxy.validateCertificate(subdomain, domainObject, { cert, key });
const error = reverseProxy.validateCertificate(location, domainObject, { cert, key });
if (error) throw error;
}
const certificate = cert && key ? { cert, key } : null;
const result = await database.query('UPDATE locations SET certificateJson=? WHERE location=? AND domain=?', [ certificate ? JSON.stringify(certificate) : null, subdomain, domain ]);
if (result.affectedRows === 0) throw new BoxError(BoxError.NOT_FOUND, 'Location not found');
app = await get(app.id); // refresh app object
await reverseProxy.setUserCertificate(app, dns.fqdn(subdomain, domainObject), certificate);
await eventlog.add(eventlog.ACTION_APP_CONFIGURE, auditSource, { appId: app.id, app, subdomain, domain, cert });
await reverseProxy.setAppCertificate(location, domainObject, { cert, key });
await eventlog.add(eventlog.ACTION_APP_CONFIGURE, auditSource, { appId, app, cert, key });
}
async function setLocation(app, data, auditSource) {
@@ -2050,23 +1982,13 @@ async function getLogs(app, options) {
return transformStream;
}
// never fails just prints error
async function appendLogLine(app, line) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof line, 'string');
const logFilePath = path.join(paths.LOG_DIR, app.id, 'app.log');
if (!safe.fs.appendFileSync(logFilePath, line)) console.error(`Could not append log line for app ${app.id} at ${logFilePath}: ${safe.error.message}`);
}
async function getCertificate(subdomain, domain) {
assert.strictEqual(typeof subdomain, 'string');
assert.strictEqual(typeof domain, 'string');
const result = await database.query('SELECT certificateJson FROM locations WHERE subdomain=? AND domain=?', [ subdomain, domain ]);
if (result.length === 0) return null;
return safe.JSON.parse(result[0].certificateJson);
return JSON.parse(result[0].certificateJson);
}
// does a re-configure when called from most states. for install/clone errors, it re-installs with an optional manifest
@@ -2568,7 +2490,6 @@ async function autoupdateApps(updateInfo, auditSource) { // updateInfo is { appI
if (!canAutoupdateApp(app, updateInfo[appId])) {
debug(`app ${app.fqdn} requires manual update`);
notifications.alert(notifications.ALERT_MANUAL_APP_UPDATE, `${app.manifest.title} at ${app.fqdn} requires manual update to version ${updateInfo[appId].manifest.version}`, `Changelog:\n${updateInfo[appId].manifest.changelog}\n`);
continue;
}
+5 -29
View File
@@ -50,7 +50,7 @@ const CGROUP_VERSION = fs.existsSync('/sys/fs/cgroup/cgroup.controllers') ? '2'
const COLLECTD_CONFIG_EJS = fs.readFileSync(`${__dirname}/collectd/app_cgroup_v${CGROUP_VERSION}.ejs`, { encoding: 'utf8' });
function makeTaskError(error, app) {
assert(error instanceof BoxError);
assert.strictEqual(typeof error, 'object');
assert.strictEqual(typeof app, 'object');
// track a few variables which helps 'repair' restart the task (see also scheduleTask in apps.js)
@@ -74,8 +74,6 @@ async function updateApp(app, values) {
async function allocateContainerIp(app) {
assert.strictEqual(typeof app, 'object');
if (app.manifest.id === constants.PROXY_APP_APPSTORE_ID) return;
await promiseRetry({ times: 10, interval: 0, debug }, async function () {
const iprange = iputils.intFromIp('172.18.20.255') - iputils.intFromIp('172.18.16.1');
let rnd = Math.floor(Math.random() * iprange);
@@ -88,8 +86,6 @@ async function createContainer(app) {
assert.strictEqual(typeof app, 'object');
assert(!app.containerId); // otherwise, it will trigger volumeFrom
if (app.manifest.id === constants.PROXY_APP_APPSTORE_ID) return;
debug('createContainer: creating container');
const container = await docker.createContainer(app);
@@ -294,9 +290,6 @@ async function moveDataDir(app, targetVolumeId, targetVolumePrefix) {
async function downloadImage(manifest) {
assert.strictEqual(typeof manifest, 'object');
// skip for relay app
if (manifest.id === constants.PROXY_APP_APPSTORE_ID) return;
const info = await docker.info();
const [dfError, diskUsage] = await safe(df.file(info.DockerRootDir));
if (dfError) throw new BoxError(BoxError.FS_ERROR, `Error getting file system info: ${dfError.message}`);
@@ -311,9 +304,6 @@ async function startApp(app) {
if (app.runState === apps.RSTATE_STOPPED) return;
// skip for relay app
if (app.manifest.id === constants.PROXY_APP_APPSTORE_ID) return;
await docker.startContainer(app.id);
}
@@ -685,10 +675,8 @@ async function start(app, args, progressCallback) {
await progressCallback({ percent: 10, message: 'Starting app services' });
await services.startAppServices(app);
if (app.manifest.id !== constants.PROXY_APP_APPSTORE_ID) {
await progressCallback({ percent: 35, message: 'Starting container' });
await docker.startContainer(app.id);
}
await progressCallback({ percent: 35, message: 'Starting container' });
await docker.startContainer(app.id);
await progressCallback({ percent: 60, message: 'Adding collectd profile' });
await addCollectdProfile(app);
@@ -725,20 +713,8 @@ async function restart(app, args, progressCallback) {
assert.strictEqual(typeof args, 'object');
assert.strictEqual(typeof progressCallback, 'function');
if (app.manifest.id !== constants.PROXY_APP_APPSTORE_ID) {
await progressCallback({ percent: 10, message: 'Starting app services' });
await services.startAppServices(app);
await progressCallback({ percent: 20, message: 'Restarting container' });
await docker.restartContainer(app.id);
}
await progressCallback({ percent: 60, message: 'Adding collectd profile' });
await addCollectdProfile(app);
// stopped apps do not renew certs. currently, we don't do DNS to not overwrite existing user settings
await progressCallback({ percent: 80, message: 'Configuring reverse proxy' });
await reverseProxy.configureApp(app, AuditSource.APPTASK);
await progressCallback({ percent: 20, message: 'Restarting container' });
await docker.restartContainer(app.id);
await progressCallback({ percent: 100, message: 'Done' });
await updateApp(app, { installationState: apps.ISTATE_INSTALLED, error: null, health: null });
+1 -1
View File
@@ -293,7 +293,7 @@ async function run(progressCallback) {
await progressCallback({ percent: 40, message: 'Cleaning app backups' });
const removedAppBackupPaths = await cleanupAppBackups(backupConfig, referencedBackupIds, progressCallback);
await progressCallback({ percent: 70, message: 'Checking storage backend and removing stale entries in database' });
await progressCallback({ percent: 70, message: 'Cleaning missing backups' });
const missingBackupPaths = await cleanupMissingBackups(backupConfig, progressCallback);
await progressCallback({ percent: 90, message: 'Cleaning snapshots' });
+2 -2
View File
@@ -46,7 +46,7 @@ function sync(backupConfig, remotePath, dataLayout, progressCallback, callback)
syncer.sync(dataLayout, function processTask(task, iteratorCallback) {
debug('sync: processing task: %j', task);
// the empty task.path is special to signify the directory
const destPath = task.path && backupConfig.encryptedFilenames ? hush.encryptFilePath(task.path, backupConfig.encryption) : task.path;
const destPath = task.path && backupConfig.encryption ? hush.encryptFilePath(task.path, backupConfig.encryption) : task.path;
const backupFilePath = path.join(getBackupFilePath(backupConfig, remotePath), destPath);
if (task.operation === 'removedir') {
@@ -164,7 +164,7 @@ function downloadDir(backupConfig, backupFilePath, dataLayout, progressCallback,
function downloadFile(entry, done) {
let relativePath = path.relative(backupFilePath, entry.fullPath);
if (backupConfig.encryptedFilenames) {
if (backupConfig.encryption) {
const { error, result } = hush.decryptFilePath(relativePath, backupConfig.encryption);
if (error) return done(new BoxError(BoxError.CRYPTO_ERROR, 'Unable to decrypt file'));
relativePath = result;
+3 -4
View File
@@ -127,8 +127,7 @@ async function add(data) {
const creationTime = data.creationTime || new Date(); // allow tests to set the time
const manifestJson = JSON.stringify(data.manifest);
const prefixId = data.type === exports.BACKUP_TYPE_APP ? `${data.type}_${data.identifier}` : data.type; // type and identifier are same for other types
const id = `${prefixId}_v${data.packageVersion}_${hat(256)}`; // id is used by the UI to derive dependent packages. making this a UUID will require a lot of db querying
const id = `${data.type}_${data.identifier}_v${data.packageVersion}_${hat(256)}`; // id is used by the UI to derive dependent packages. making this a UUID will require a lot of db querying
const [error] = await safe(database.query('INSERT INTO backups (id, remotePath, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOnJson, manifestJson, format, preserveSecs) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, data.remotePath, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, JSON.stringify(data.dependsOn), manifestJson, data.format, data.preserveSecs ]));
@@ -177,7 +176,7 @@ function validateLabel(label) {
assert.strictEqual(typeof label, 'string');
if (label.length >= 200) return new BoxError(BoxError.BAD_FIELD, 'label too long');
if (/[^a-zA-Z0-9._() -]/.test(label)) return new BoxError(BoxError.BAD_FIELD, 'label can only contain alphanumerals, space, dot, hyphen, brackets or underscore');
if (/[^a-zA-Z0-9._()-]/.test(label)) return new BoxError(BoxError.BAD_FIELD, 'label can only contain alphanumerals, dot, hyphen, brackets or underscore');
return null;
}
@@ -242,7 +241,7 @@ async function startBackupTask(auditSource) {
const errorMessage = error ? error.message : '';
const timedOut = error ? error.code === tasks.ETIMEOUT : false;
const backup = backupId ? await get(backupId) : null;
const backup = await get(backupId);
await safe(eventlog.add(eventlog.ACTION_BACKUP_FINISH, auditSource, { taskId, errorMessage, timedOut, backupId, remotePath: backup?.remotePath }), { debug });
});
+1 -1
View File
@@ -58,7 +58,7 @@ async function upload(remotePath, format, dataLayoutString, progressCallback) {
const dataLayout = DataLayout.fromString(dataLayoutString);
const backupConfig = await settings.getBackupConfig();
await storage.api(backupConfig.provider).checkPreconditions(backupConfig, dataLayout);
await safe(storage.api(backupConfig.provider).checkPreconditions(backupConfig, dataLayout));
await backupFormat.api(format).upload(backupConfig, remotePath, dataLayout, progressCallback);
}
+4 -4
View File
@@ -140,7 +140,7 @@ async function runStartupTasks() {
// we used to run tasks in parallel but simultaneous nginx reloads was causing issues
for (let i = 0; i < tasks.length; i++) {
const [error] = await safe(tasks[i]());
if (error) debug(`Startup task at index ${i} failed: ${error.message} ${error.stack}`);
if (error) debug(`Startup task at index ${i} failed: ${error.message}`);
}
}
@@ -267,12 +267,12 @@ async function prepareDashboardDomain(domain, auditSource) {
const domainObject = await domains.get(domain);
if (!domain) throw new BoxError(BoxError.NOT_FOUND, 'No such domain');
const fqdn = dns.fqdn(constants.DASHBOARD_SUBDOMAIN, domainObject);
const fqdn = dns.fqdn(constants.DASHBOARD_LOCATION, domainObject);
const result = await apps.list();
if (result.some(app => app.fqdn === fqdn)) throw new BoxError(BoxError.BAD_STATE, 'Dashboard location conflicts with an existing app');
const taskId = await tasks.add(tasks.TASK_SETUP_DNS_AND_CERT, [ constants.DASHBOARD_SUBDOMAIN, domain, auditSource ]);
const taskId = await tasks.add(tasks.TASK_SETUP_DNS_AND_CERT, [ constants.DASHBOARD_LOCATION, domain, auditSource ]);
tasks.startTask(taskId, {});
@@ -290,7 +290,7 @@ async function setDashboardDomain(domain, auditSource) {
if (!domain) throw new BoxError(BoxError.NOT_FOUND, 'No such domain');
await reverseProxy.writeDashboardConfig(domainObject);
const fqdn = dns.fqdn(constants.DASHBOARD_SUBDOMAIN, domainObject);
const fqdn = dns.fqdn(constants.DASHBOARD_LOCATION, domainObject);
await settings.setDashboardLocation(domain, fqdn);
+22 -2
View File
@@ -1,14 +1,34 @@
LoadPlugin "table"
<Plugin table>
<Table "/sys/fs/cgroup/memory/docker/<%= containerId %>/memory.memsw.usage_in_bytes">
<Table "/sys/fs/cgroup/memory/docker/<%= containerId %>/memory.stat">
Instance "<%= appId %>-memory"
Separator " \\n"
<Result>
Type gauge
InstancesFrom 0
ValuesFrom 1
</Result>
</Table>
<Table "/sys/fs/cgroup/memory/docker/<%= containerId %>/memory.max_usage_in_bytes">
Instance "<%= appId %>-memory"
Separator "\\n"
<Result>
Type gauge
InstancePrefix "memsw_usage_in_bytes"
InstancePrefix "max_usage_in_bytes"
ValuesFrom 0
</Result>
</Table>
<Table "/sys/fs/cgroup/cpuacct/docker/<%= containerId %>/cpuacct.stat">
Instance "<%= appId %>-cpu"
Separator " \\n"
<Result>
Type gauge
InstancesFrom 0
ValuesFrom 1
</Result>
</Table>
</Plugin>
<Plugin python>
+15 -5
View File
@@ -1,24 +1,34 @@
LoadPlugin "table"
<Plugin table>
<Table "/sys/fs/cgroup/docker/<%= containerId %>/memory.current">
<Table "/sys/fs/cgroup/docker/<%= containerId %>/memory.stat">
Instance "<%= appId %>-memory"
Separator " \\n"
<Result>
Type gauge
InstancePrefix "memory_current"
ValuesFrom 0
InstancesFrom 0
ValuesFrom 1
</Result>
</Table>
<Table "/sys/fs/cgroup/docker/<%= containerId %>/memory.swap.current">
<Table "/sys/fs/cgroup/docker/<%= containerId %>/memory.max">
Instance "<%= appId %>-memory"
Separator "\\n"
<Result>
Type gauge
InstancePrefix "memory_swap_current"
InstancePrefix "max_usage_in_bytes"
ValuesFrom 0
</Result>
</Table>
<Table "/sys/fs/cgroup/docker/<%= containerId %>/cpu.stat">
Instance "<%= appId %>-cpu"
Separator " \\n"
<Result>
Type gauge
InstancesFrom 0
ValuesFrom 1
</Result>
</Table>
</Plugin>
<Plugin python>
+3 -5
View File
@@ -7,8 +7,8 @@ const CLOUDRON = process.env.BOX_ENV === 'cloudron',
TEST = process.env.BOX_ENV === 'test';
exports = module.exports = {
SMTP_SUBDOMAIN: 'smtp',
IMAP_SUBDOMAIN: 'imap',
SMTP_LOCATION: 'smtp',
IMAP_LOCATION: 'imap',
// These are combined into one array because users and groups become mailboxes
RESERVED_NAMES: [
@@ -22,7 +22,7 @@ exports = module.exports = {
'admins', 'users' // ldap code uses 'users' pseudo group
],
DASHBOARD_SUBDOMAIN: 'my',
DASHBOARD_LOCATION: 'my',
PORT: CLOUDRON ? 3000 : 5454,
INTERNAL_SMTP_PORT: 2525, // this value comes from the mail container
@@ -49,8 +49,6 @@ exports = module.exports = {
],
DEMO_APP_LIMIT: 20,
PROXY_APP_APPSTORE_ID: 'io.cloudron.builtin.appproxy',
AUTOUPDATE_PATTERN_NEVER: 'never',
// the db field is a blob so we make this explicit
+10 -31
View File
@@ -28,13 +28,13 @@ const appHealthMonitor = require('./apphealthmonitor.js'),
dyndns = require('./dyndns.js'),
eventlog = require('./eventlog.js'),
janitor = require('./janitor.js'),
paths = require('./paths.js'),
safe = require('safetydance'),
scheduler = require('./scheduler.js'),
settings = require('./settings.js'),
system = require('./system.js'),
updater = require('./updater.js'),
updateChecker = require('./updatechecker.js'),
userdirectory = require('./userdirectory.js'),
_ = require('underscore');
const gJobs = {
@@ -61,36 +61,12 @@ const gJobs = {
// Months: 0-11
// Day of Week: 0-6
function getCronSeed() {
let hour = null;
let minute = null;
const seedData = safe.fs.readFileSync(paths.CRON_SEED_FILE, 'utf8') || '';
const parts = seedData.split(':');
if (parts.length === 2) {
hour = parseInt(parts[0]) || null;
minute = parseInt(parts[1]) || null;
}
if ((hour == null || hour < 0 || hour > 23) || (minute == null || minute < 0 || minute > 60)) {
hour = Math.floor(24 * Math.random());
minute = Math.floor(60 * Math.random());
debug(`getCronSeed: writing new cron seed file with ${hour}:${minute} to ${paths.CRON_SEED_FILE}`);
safe.fs.writeFileSync(paths.CRON_SEED_FILE, `${hour}:${minute}`);
}
return { hour, minute };
}
async function startJobs() {
const { hour, minute } = getCronSeed();
debug(`startJobs: starting cron jobs with hour ${hour} and minute ${minute}`);
debug('startJobs: starting cron jobs');
const randomTick = Math.floor(60*Math.random());
gJobs.systemChecks = new CronJob({
cronTime: `00 ${minute} 2 * * *`, // once a day. if you change this interval, change the notification messages with correct duration
cronTime: `${randomTick} ${randomTick} 2 * * *`, // once a day. if you change this interval, change the notification messages with correct duration
onTick: async () => await safe(cloudron.runSystemChecks(), { debug }),
start: true
});
@@ -103,7 +79,7 @@ async function startJobs() {
// this is run separately from the update itself so that the user can disable automatic updates but can still get a notification
gJobs.updateCheckerJob = new CronJob({
cronTime: `00 ${minute} 1,5,9,13,17,21,23 * * *`,
cronTime: `${randomTick} ${randomTick} 1,5,9,13,17,21,23 * * *`,
onTick: async () => await safe(updateChecker.checkForUpdates({ automatic: true }), { debug }),
start: true
});
@@ -138,9 +114,8 @@ async function startJobs() {
start: true
});
// randomized per Cloudron based on hourlySeed
gJobs.certificateRenew = new CronJob({
cronTime: `00 10 ${hour} * * *`,
cronTime: '00 00 */12 * * *', // every 12 hours
onTick: async () => await safe(cloudron.renewCerts({}, AuditSource.CRON), { debug }),
start: true
});
@@ -173,6 +148,10 @@ async function handleSettingsChanged(key, value) {
await stopJobs();
await startJobs();
break;
case settings.USER_DIRECTORY_KEY:
if (value.enabled) await userdirectory.start();
else await userdirectory.stop();
break;
default:
break;
}
+1 -2
View File
@@ -61,9 +61,8 @@ async function initialize() {
// note the pool also has an 'acquire' event but that is called whenever we do a getConnection()
connection.on('error', (error) => debug(`Connection ${connection.threadId} error: ${error.message} ${error.code}`));
connection.query(`USE ${gDatabase.name}`);
connection.query('USE ' + gDatabase.name);
connection.query('SET SESSION sql_mode = \'strict_all_tables\'');
connection.query('SET SESSION group_concat_max_len = 65536'); // GROUP_CONCAT has only 1024 default
});
}
+9 -9
View File
@@ -76,11 +76,11 @@ function validateHostname(subdomain, domainObject) {
const hostname = fqdn(subdomain, domainObject);
const RESERVED_SUBDOMAINS = [
constants.SMTP_SUBDOMAIN,
constants.IMAP_SUBDOMAIN
const RESERVED_LOCATIONS = [
constants.SMTP_LOCATION,
constants.IMAP_LOCATION
];
if (RESERVED_SUBDOMAINS.indexOf(subdomain) !== -1) return new BoxError(BoxError.BAD_FIELD, `subdomain '${subdomain}' is reserved`);
if (RESERVED_LOCATIONS.indexOf(subdomain) !== -1) return new BoxError(BoxError.BAD_FIELD, `subdomain '${subdomain}' is reserved`);
if (hostname === settings.dashboardFqdn()) return new BoxError(BoxError.BAD_FIELD, `subdomain '${subdomain}' is reserved`);
@@ -184,11 +184,11 @@ async function waitForDnsRecord(subdomain, domain, type, value, options) {
await api(domainObject.provider).wait(domainObject, subdomain, type, value, options);
}
function makeWildcard(fqdn) {
assert.strictEqual(typeof fqdn, 'string');
function makeWildcard(vhost) {
assert.strictEqual(typeof vhost, 'string');
// if the fqdn is like *.example.com, this function will do nothing
const parts = fqdn.split('.');
// if the vhost is like *.example.com, this function will do nothing
let parts = vhost.split('.');
parts[0] = '*';
return parts.join('.');
}
@@ -295,7 +295,7 @@ async function syncDnsRecords(options, progressCallback) {
progress += Math.round(100/(1+allDomains.length));
let locations = [];
if (domain.domain === settings.dashboardDomain()) locations.push({ subdomain: constants.DASHBOARD_SUBDOMAIN, domain: settings.dashboardDomain() });
if (domain.domain === settings.dashboardDomain()) locations.push({ subdomain: constants.DASHBOARD_LOCATION, domain: settings.dashboardDomain() });
if (domain.domain === settings.mailDomain() && settings.mailFqdn() !== settings.dashboardFqdn()) locations.push({ subdomain: mailSubdomain, domain: settings.mailDomain() });
for (const app of allApps) {
+1 -1
View File
@@ -178,7 +178,7 @@ async function verifyDomainConfig(domainObject) {
if (error && error.code === 'ENOTFOUND') throw new BoxError(BoxError.BAD_FIELD, 'Unable to resolve nameservers for this domain');
if (error || !nameservers) throw new BoxError(BoxError.BAD_FIELD, error ? error.message : 'Unable to get nameservers');
if (!nameservers.every(function (n) { return n.toLowerCase().indexOf('.domaincontrol.com') !== -1 || n.toLowerCase().indexOf('.secureserver.net') !== -1; })) {
if (!nameservers.every(function (n) { return n.toLowerCase().indexOf('.domaincontrol.com') !== -1; })) {
debug('verifyDomainConfig: %j does not contain GoDaddy NS', nameservers);
throw new BoxError(BoxError.BAD_FIELD, 'Domain nameservers are not set to GoDaddy');
}
+1 -1
View File
@@ -13,7 +13,7 @@ exports = module.exports = {
const assert = require('assert'),
BoxError = require('../boxerror.js'),
constants = require('../constants.js'),
debug = require('debug')('box:dns/hetzner'),
debug = require('debug')('box:dns/digitalocean'),
dig = require('../dig.js'),
dns = require('../dns.js'),
safe = require('safetydance'),
+3 -3
View File
@@ -204,18 +204,18 @@ async function getAddonMounts(app) {
break;
}
case 'tls': {
const certificatePath = await reverseProxy.getCertificatePath(app.fqdn, app.domain);
const bundle = await reverseProxy.getCertificatePath(app.fqdn, app.domain);
mounts.push({
Target: '/etc/certs/tls_cert.pem',
Source: certificatePath.certFilePath,
Source: bundle.certFilePath,
Type: 'bind',
ReadOnly: true
});
mounts.push({
Target: '/etc/certs/tls_key.pem',
Source: certificatePath.keyFilePath,
Source: bundle.keyFilePath,
Type: 'bind',
ReadOnly: true
});
+25 -20
View File
@@ -77,13 +77,13 @@ async function verifyDomainConfig(domainConfig, domain, zoneName, provider) {
if (!backend) throw new BoxError(BoxError.BAD_FIELD, 'Invalid provider');
const domainObject = { config: domainConfig, domain: domain, zoneName: zoneName };
const [error, sanitizedConfig] = await safe(api(provider).verifyDomainConfig(domainObject));
if (error && error.reason === BoxError.ACCESS_DENIED) throw new BoxError(BoxError.BAD_FIELD, `Access denied: ${error.message}`);
if (error && error.reason === BoxError.NOT_FOUND) throw new BoxError(BoxError.BAD_FIELD, `Zone not found: ${error.message}`);
if (error && error.reason === BoxError.EXTERNAL_ERROR) throw new BoxError(BoxError.BAD_FIELD, `Configuration error: ${error.message}`);
if (error) throw error;
const [error, result] = await safe(api(provider).verifyDomainConfig(domainObject));
if (error && error.reason === BoxError.ACCESS_DENIED) return { error: new BoxError(BoxError.BAD_FIELD, `Access denied: ${error.message}`) };
if (error && error.reason === BoxError.NOT_FOUND) return { error: new BoxError(BoxError.BAD_FIELD, `Zone not found: ${error.message}`) };
if (error && error.reason === BoxError.EXTERNAL_ERROR) return { error: new BoxError(BoxError.BAD_FIELD, `Configuration error: ${error.message}`) };
if (error) return { error };
return sanitizedConfig;
return { error: null, sanitizedConfig: result };
}
function validateTlsConfig(tlsConfig, dnsProvider) {
@@ -151,11 +151,12 @@ async function add(domain, data, auditSource) {
dkimSelector = `cloudron-${suffix}`;
}
const sanitizedConfig = await verifyDomainConfig(config, domain, zoneName, provider);
const result = await verifyDomainConfig(config, domain, zoneName, provider);
if (result.error) throw result.error;
const queries = [
let queries = [
{ query: 'INSERT INTO domains (domain, zoneName, provider, configJson, tlsConfigJson, fallbackCertificateJson) VALUES (?, ?, ?, ?, ?, ?)',
args: [ domain, zoneName, provider, JSON.stringify(sanitizedConfig), JSON.stringify(tlsConfig), JSON.stringify(fallbackCertificate) ] },
args: [ domain, zoneName, provider, JSON.stringify(result.sanitizedConfig), JSON.stringify(tlsConfig), JSON.stringify(fallbackCertificate) ] },
{ query: 'INSERT INTO mail (domain, dkimKeyJson, dkimSelector) VALUES (?, ?, ?)', args: [ domain, JSON.stringify(dkimKey), dkimSelector || 'cloudron' ] },
];
@@ -194,6 +195,7 @@ async function setConfig(domain, data, auditSource) {
assert.strictEqual(typeof auditSource, 'object');
let { zoneName, provider, config, fallbackCertificate, tlsConfig } = data;
let error;
if (settings.isDemo() && (domain === settings.dashboardDomain())) throw new BoxError(BoxError.CONFLICT, 'Not allowed in demo mode');
@@ -209,15 +211,16 @@ async function setConfig(domain, data, auditSource) {
if (error) throw error;
}
const tlsConfigError = validateTlsConfig(tlsConfig, provider);
if (tlsConfigError) throw tlsConfigError;
error = validateTlsConfig(tlsConfig, provider);
if (error) throw error;
if (provider === domainObject.provider) api(provider).injectPrivateFields(config, domainObject.config);
const sanitizedConfig = await verifyDomainConfig(config, domain, zoneName, provider);
const result = await verifyDomainConfig(config, domain, zoneName, provider);
if (result.error) throw result.error;
const newData = {
config: sanitizedConfig,
config: result.sanitizedConfig,
zoneName,
provider,
tlsConfig,
@@ -225,7 +228,7 @@ async function setConfig(domain, data, auditSource) {
if (fallbackCertificate) newData.fallbackCertificate = fallbackCertificate;
const args = [], fields = [];
let args = [ ], fields = [ ];
for (const k in newData) {
if (k === 'config' || k === 'tlsConfig' || k === 'fallbackCertificate') { // json fields
fields.push(`${k}Json = ?`);
@@ -237,8 +240,9 @@ async function setConfig(domain, data, auditSource) {
}
args.push(domain);
const result = await database.query('UPDATE domains SET ' + fields.join(', ') + ' WHERE domain=?', args);
if (result.affectedRows === 0) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
[error] = await safe(database.query('UPDATE domains SET ' + fields.join(', ') + ' WHERE domain=?', args));
if (error && error.reason === BoxError.NOT_FOUND) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
if (error) throw new BoxError(BoxError.DATABASE_ERROR, error);
if (!fallbackCertificate) return;
@@ -252,11 +256,12 @@ async function setWellKnown(domain, wellKnown, auditSource) {
assert.strictEqual(typeof wellKnown, 'object');
assert.strictEqual(typeof auditSource, 'object');
const wellKnownError = validateWellKnown(wellKnown);
if (wellKnownError) throw wellKnownError;
let error = validateWellKnown(wellKnown);
if (error) throw error;
const result = await database.query('UPDATE domains SET wellKnownJson = ? WHERE domain=?', [ JSON.stringify(wellKnown), domain ]);
if (result.affectedRows === 0) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
[error] = await safe(database.query('UPDATE domains SET wellKnownJson = ? WHERE domain=?', [ JSON.stringify(wellKnown), domain ]));
if (error && error.reason === BoxError.NOT_FOUND) throw new BoxError(BoxError.NOT_FOUND, 'Domain not found');
if (error) throw new BoxError(BoxError.DATABASE_ERROR, error);
await eventlog.add(eventlog.ACTION_DOMAIN_UPDATE, auditSource, { domain, wellKnown });
}
+2 -2
View File
@@ -36,8 +36,8 @@ async function sync(auditSource) {
}
debug(`refreshDNS: updating IP from ${info.ipv4} to ipv4: ${ipv4} (changed: ${ipv4Changed}) ipv6: ${ipv6} (changed: ${ipv6Changed})`);
if (ipv4Changed) await dns.upsertDnsRecords(constants.DASHBOARD_SUBDOMAIN, settings.dashboardDomain(), 'A', [ ipv4 ]);
if (ipv6Changed) await dns.upsertDnsRecords(constants.DASHBOARD_SUBDOMAIN, settings.dashboardDomain(), 'AAAA', [ ipv6 ]);
if (ipv4Changed) await dns.upsertDnsRecords(constants.DASHBOARD_LOCATION, settings.dashboardDomain(), 'A', [ ipv4 ]);
if (ipv6Changed) await dns.upsertDnsRecords(constants.DASHBOARD_LOCATION, settings.dashboardDomain(), 'AAAA', [ ipv6 ]);
const result = await apps.list();
for (const app of result) {
-1
View File
@@ -69,7 +69,6 @@ exports = module.exports = {
ACTION_USER_ADD: 'user.add',
ACTION_USER_LOGIN: 'user.login',
ACTION_USER_LOGIN_GHOST: 'user.login.ghost',
ACTION_USER_LOGOUT: 'user.logout',
ACTION_USER_REMOVE: 'user.remove',
ACTION_USER_UPDATE: 'user.update',
+2 -33
View File
@@ -2,7 +2,6 @@
exports = module.exports = {
verifyPassword,
verifyPasswordAndTotpToken,
maybeCreateUser,
testConfig,
@@ -45,7 +44,6 @@ function translateUser(ldapConfig, ldapUser) {
return {
username: ldapUser[ldapConfig.usernameField].toLowerCase(),
email: ldapUser.mail || ldapUser.mailPrimaryAddress,
twoFactorAuthenticationEnabled: !!ldapUser.twoFactorAuthenticationEnabled,
displayName: ldapUser.displayName || ldapUser.cn // user.giveName + ' ' + user.sn
};
}
@@ -256,11 +254,8 @@ async function maybeCreateUser(identifier) {
throw error;
}
// fetch the full record and amend potential twoFA settings
const newUser = await users.get(userId);
if (user.twoFactorAuthenticationEnabled) newUser.twoFactorAuthenticationEnabled = true;
return newUser;
// fetch the full record
return await users.get(userId);
}
async function verifyPassword(user, password) {
@@ -284,32 +279,6 @@ async function verifyPassword(user, password) {
return translateUser(externalLdapConfig, ldapUsers[0]);
}
async function verifyPasswordAndTotpToken(user, password, totpToken) {
assert.strictEqual(typeof user, 'object');
assert.strictEqual(typeof password, 'string');
assert.strictEqual(typeof totpToken, 'string');
const externalLdapConfig = await settings.getExternalLdapConfig();
if (externalLdapConfig.provider === 'noop') throw new BoxError(BoxError.BAD_STATE, 'not enabled');
const ldapUsers = await ldapUserSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${user.username}` });
if (ldapUsers.length === 0) throw new BoxError(BoxError.NOT_FOUND);
if (ldapUsers.length > 1) throw new BoxError(BoxError.CONFLICT);
const client = await getClient(externalLdapConfig, { bind: false });
// inject totptoken into first attribute
const rdns = ldapUsers[0].dn.split(',');
const totpTokenDn = `${rdns[0]}+totptoken=${totpToken},` + rdns.slice(1).join(',');
const [error] = await safe(util.promisify(client.bind.bind(client))(totpTokenDn, password));
client.unbind();
if (error instanceof ldap.InvalidCredentialsError) throw new BoxError(BoxError.INVALID_CREDENTIALS);
if (error) throw new BoxError(BoxError.EXTERNAL_ERROR, error);
return translateUser(externalLdapConfig, ldapUsers[0]);
}
async function startSyncer() {
const externalLdapConfig = await settings.getExternalLdapConfig();
if (externalLdapConfig.provider === 'noop') throw new BoxError(BoxError.BAD_STATE, 'not enabled');
-184
View File
@@ -1,184 +0,0 @@
'use strict';
exports = module.exports = {
getSystem,
getByApp
};
const apps = require('./apps.js'),
assert = require('assert'),
BoxError = require('./boxerror.js'),
fs = require('fs'),
safe = require('safetydance'),
superagent = require('superagent'),
system = require('./system.js');
// for testing locally: curl 'http://127.0.0.1:8417/graphite-web/render?format=json&from=-1min&target=absolute(collectd.localhost.du-docker.capacity-usage)'
// the datapoint is (value, timestamp) https://buildmedia.readthedocs.org/media/pdf/graphite/0.9.16/graphite.pdf
const GRAPHITE_RENDER_URL = 'http://127.0.0.1:8417/graphite-web/render';
// https://rootlesscontaine.rs/getting-started/common/cgroup2/#checking-whether-cgroup-v2-is-already-enabled
const CGROUP_VERSION = fs.existsSync('/sys/fs/cgroup/cgroup.controllers') ? '2' : '1';
async function getByApp(app, fromMinutes, noNullPoints) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof fromMinutes, 'number');
assert.strictEqual(typeof noNullPoints, 'boolean');
const timeBucketSize = fromMinutes > (24 * 60) ? (6*60) : 5;
const memoryQuery = {
target: null, // filled below
format: 'json',
from: `-${fromMinutes}min`,
until: 'now'
};
if (CGROUP_VERSION === '1') {
memoryQuery.target = `summarize(collectd.localhost.table-${app.id}-memory.gauge-memsw_usage_in_bytes, "${timeBucketSize}min", "avg")`;
} else {
memoryQuery.target = `summarize(sum(collectd.localhost.table-${app.id}-memory.gauge-memory_current, collectd.localhost.table-${app.id}-memory.gauge-memory_swap_current), "${timeBucketSize}min", "avg")`;
}
if (noNullPoints) memoryQuery.noNullPoints = true;
const [memoryError, memoryResponse] = await safe(superagent.get(GRAPHITE_RENDER_URL)
.query(memoryQuery)
.timeout(30 * 1000)
.ok(() => true));
if (memoryError) throw new BoxError(BoxError.NETWORK_ERROR, memoryError.message);
if (memoryResponse.status !== 200) throw new BoxError(BoxError.EXTERNAL_ERROR, `Unknown error: ${memoryResponse.status} ${memoryResponse.text}`);
let diskDataPoints;
if (app.manifest.addons.localstorage) {
const diskQuery = {
target: `summarize(collectd.localhost.du-${app.id}.capacity-usage, "${timeBucketSize}min", "avg")`,
format: 'json',
from: `-${fromMinutes}min`,
until: 'now'
};
if (noNullPoints) diskQuery.noNullPoints = true;
const [diskError, diskResponse] = await safe(superagent.get(GRAPHITE_RENDER_URL)
.query(diskQuery)
.timeout(30 * 1000)
.ok(() => true));
if (diskError) throw new BoxError(BoxError.NETWORK_ERROR, diskError.message);
if (diskResponse.status !== 200) throw new BoxError(BoxError.EXTERNAL_ERROR, `Unknown error: ${diskResponse.status} ${diskResponse.text}`);
// we may not have any datapoints
if (diskResponse.body.length === 0) diskDataPoints = [];
else diskDataPoints = diskResponse.body[0].datapoints;
} else {
diskDataPoints = [];
}
// app proxy instances have no container and thus no datapoints
return { memory: memoryResponse.body[0] || { datapoints: [] }, disk: { datapoints: diskDataPoints } };
}
async function getSystem(fromMinutes, noNullPoints) {
assert.strictEqual(typeof fromMinutes, 'number');
assert.strictEqual(typeof noNullPoints, 'boolean');
const timeBucketSize = fromMinutes > (24 * 60) ? (6*60) : 5;
const cpuQuery = `summarize(sum(collectd.localhost.aggregation-cpu-average.cpu-system, collectd.localhost.aggregation-cpu-average.cpu-user), "${timeBucketSize}min", "avg")`;
const memoryQuery = `summarize(sum(collectd.localhost.memory.memory-used, collectd.localhost.swap.swap-used), "${timeBucketSize}min", "avg")`;
const query = {
target: [ cpuQuery, memoryQuery ],
format: 'json',
from: `-${fromMinutes}min`,
until: 'now'
};
const [memCpuError, memCpuResponse] = await safe(superagent.get(GRAPHITE_RENDER_URL)
.query(query)
.timeout(30 * 1000)
.ok(() => true));
if (memCpuError) throw new BoxError(BoxError.NETWORK_ERROR, memCpuError.message);
if (memCpuResponse.status !== 200) throw new BoxError(BoxError.EXTERNAL_ERROR, `Unknown error: ${memCpuResponse.status} ${memCpuResponse.text}`);
const allApps = await apps.list();
const appResponses = {};
for (const app of allApps) {
appResponses[app.id] = await getByApp(app, fromMinutes, noNullPoints);
}
const diskInfo = await system.getDisks();
// segregate locations into the correct disks based on 'filesystem'
diskInfo.disks.forEach(function (disk, index) {
disk.id = index;
disk.contains = [];
if (disk.filesystem === diskInfo.platformDataDisk) disk.contains.push({ type: 'standard', label: 'Platform data', id: 'platformdata', usage: 0 });
if (disk.filesystem === diskInfo.boxDataDisk) disk.contains.push({ type: 'standard', label: 'Box data', id: 'boxdata', usage: 0 });
if (disk.filesystem === diskInfo.dockerDataDisk) disk.contains.push({ type: 'standard', label: 'Docker images', id: 'docker', usage: 0 });
if (disk.filesystem === diskInfo.mailDataDisk) disk.contains.push({ type: 'standard', label: 'Email data', id: 'maildata', usage: 0 });
if (disk.filesystem === diskInfo.backupsDisk) disk.contains.push({ type: 'standard', label: 'Backup data', id: 'cloudron-backup', usage: 0 });
// attach appIds which reside on this disk
const apps = Object.keys(diskInfo.apps).filter(function (appId) { return diskInfo.apps[appId] === disk.filesystem; });
apps.forEach(function (appId) {
disk.contains.push({ type: 'app', id: appId, label: '', usage: 0 });
});
// attach volumeIds which reside on this disk
const volumes = Object.keys(diskInfo.volumes).filter(function (volumeId) { return diskInfo.volumes[volumeId] === disk.filesystem; });
volumes.forEach(function (volumeId) {
disk.contains.push({ type: 'volume', id: volumeId, label: '', usage: 0 });
});
});
for (const disk of diskInfo.disks) {
// /dev/sda1 -> sda1
// /dev/mapper/foo.com -> mapper_foo_com (see #348)
let diskName = disk.filesystem.slice(disk.filesystem.indexOf('/', 1) + 1);
diskName = diskName.replace(/\/|\./g, '_');
const target = [
`absolute(collectd.localhost.df-${diskName}.df_complex-free)`,
`absolute(collectd.localhost.df-${diskName}.df_complex-reserved)`, // reserved for root (default: 5%) tune2fs -l/m
`absolute(collectd.localhost.df-${diskName}.df_complex-used)`
];
const diskQuery = {
target: target,
format: 'json',
from: '-1day',
until: 'now'
};
const [diskError, diskResponse] = await safe(superagent.get(GRAPHITE_RENDER_URL).query(diskQuery).timeout(30 * 1000).ok(() => true));
if (diskError) throw new BoxError(BoxError.NETWORK_ERROR, diskError.message);
if (diskResponse.status !== 200) throw new BoxError(BoxError.EXTERNAL_ERROR, `Unknown error: ${diskResponse.status} ${diskResponse.text}`);
disk.size = diskResponse.body[2].datapoints[0][0] + diskResponse.body[1].datapoints[0][0] + diskResponse.body[0].datapoints[0][0];
disk.free = diskResponse.body[0].datapoints[0][0];
disk.occupied = diskResponse.body[2].datapoints[0][0];
for (const content of disk.contains) {
const query = {
target: `absolute(collectd.localhost.du-${content.id}.capacity-usage)`,
format: 'json',
from: '-1day',
until: 'now'
};
const [error, response] = await safe(superagent.get(GRAPHITE_RENDER_URL).query(query).timeout(30 * 1000).ok(() => true));
if (error) throw new BoxError(BoxError.NETWORK_ERROR, error.message);
if (response.status !== 200) throw new BoxError(BoxError.EXTERNAL_ERROR, `Unknown error: ${response.status} ${response.text}`);
// we may not have any datapoints
if (response.body.length === 0) content.usage = null;
else content.usage = response.body[0].datapoints[0][0];
console.log(content)
}
}
return { cpu: memCpuResponse.body[0], memory: memCpuResponse.body[1], apps: appResponses, disks: diskInfo.disks };
}
+2 -2
View File
@@ -6,7 +6,7 @@
exports = module.exports = {
// a version change recreates all containers with latest docker config
'version': '49.1.0',
'version': '49.0.0',
'baseImages': [
{ repo: 'cloudron/base', tag: 'cloudron/base:3.2.0@sha256:ba1d566164a67c266782545ea9809dc611c4152e27686fd14060332dd88263ea' }
@@ -20,7 +20,7 @@ exports = module.exports = {
'postgresql': { repo: 'cloudron/postgresql', tag: 'cloudron/postgresql:4.3.1@sha256:b0c564d097b765d4a639330843e2e813d2c87fc8ed34b7df7550bf2c6df0012c' },
'mongodb': { repo: 'cloudron/mongodb', tag: 'cloudron/mongodb:4.2.1@sha256:f7f689beea07b1c6a9503a48f6fb38ef66e5b22f59fc585a92842a6578b33d46' },
'redis': { repo: 'cloudron/redis', tag: 'cloudron/redis:3.3.0@sha256:89c4e8083631b6d16b5d630d9b27f8ecf301c62f81219d77bd5948a1f4a4375c' },
'mail': { repo: 'cloudron/mail', tag: 'cloudron/mail:3.7.0@sha256:a41a52ba45bea0b2f14be82f8480d5f4583d806dc1f9c99c3bce858d2c9f27d7' },
'mail': { repo: 'cloudron/mail', tag: 'cloudron/mail:3.6.1@sha256:b8b93f007105080d4812a05648e6bc5e15c95c63f511c829cbc14a163d9ea029' },
'graphite': { repo: 'cloudron/graphite', tag: 'cloudron/graphite:3.1.0@sha256:30ec3a01964a1e01396acf265183997c3e17fb07eac1a82b979292cc7719ff4b' },
'sftp': { repo: 'cloudron/sftp', tag: 'cloudron/sftp:3.6.1@sha256:ba4b9a1fe274c0ef0a900e5d0deeb8f3da08e118798d1d90fbf995cc0cf6e3a3' }
}
+26 -32
View File
@@ -25,6 +25,9 @@ let gServer = null;
const NOOP = function () {};
const GROUP_USERS_DN = 'cn=users,ou=groups,dc=cloudron';
const GROUP_ADMINS_DN = 'cn=admins,ou=groups,dc=cloudron';
// Will attach req.app if successful
async function authenticateApp(req, res, next) {
const sourceIp = req.connection.ldap.id.split(':')[0];
@@ -147,9 +150,6 @@ async function userSearch(req, res, next) {
const [error, result] = await safe(getUsersWithAccessToApp(req));
if (error) return next(new ldap.OperationsError(error.toString()));
const [groupsError, allGroups] = await safe(groups.listWithMembers());
if (groupsError) return next(new ldap.OperationsError(error.toString()));
let results = [];
// send user objects
@@ -159,6 +159,9 @@ async function userSearch(req, res, next) {
const dn = ldap.parseDN('cn=' + user.id + ',ou=users,dc=cloudron');
const memberof = [ GROUP_USERS_DN ];
if (users.compareRoles(user.role, users.ROLE_ADMIN) >= 0) memberof.push(GROUP_ADMINS_DN);
const displayName = user.displayName || user.username || ''; // displayName can be empty and username can be null
const nameParts = displayName.split(' ');
const firstName = nameParts[0];
@@ -178,7 +181,7 @@ async function userSearch(req, res, next) {
givenName: firstName,
username: user.username,
samaccountname: user.username, // to support ActiveDirectory clients
memberof: allGroups.filter(function (g) { return g.userIds.indexOf(user.id) !== -1; }).map(function (g) { return g.name; })
memberof: memberof
}
};
@@ -201,6 +204,9 @@ async function userSearch(req, res, next) {
async function groupSearch(req, res, next) {
debug('group search: dn %s, scope %s, filter %s (from %s)', req.dn.toString(), req.scope, req.filter.toString(), req.connection.ldap.id);
const [error, usersWithAccess] = await safe(getUsersWithAccessToApp(req));
if (error) return next(new ldap.OperationsError(error.toString()));
const results = [];
let [errorGroups, resultGroups] = await safe(groups.listWithMembers());
@@ -211,15 +217,15 @@ async function groupSearch(req, res, next) {
}
resultGroups.forEach(function (group) {
const dn = ldap.parseDN(`cn=${group.name},ou=groups,dc=cloudron`);
const dn = ldap.parseDN('cn=' + group.name + ',ou=groups,dc=cloudron');
const members = group.userIds.filter(function (uid) { return usersWithAccess.map(function (u) { return u.id; }).indexOf(uid) !== -1; });
const obj = {
dn: dn.toString(),
attributes: {
objectclass: ['group'],
cn: group.name,
gidnumber: group.id,
memberuid: group.userIds
memberuid: members
}
};
@@ -268,35 +274,25 @@ async function groupAdminsCompare(req, res, next) {
async function mailboxSearch(req, res, next) {
debug('mailbox search: dn %s, scope %s, filter %s (from %s)', req.dn.toString(), req.scope, req.filter.toString(), req.connection.ldap.id);
// if cn is set OR filter is mail= we only search for one mailbox specifically
let email, dn;
// if cn is set we only search for one mailbox specifically
if (req.dn.rdns[0].attrs.cn) {
email = req.dn.rdns[0].attrs.cn.value.toLowerCase();
dn = req.dn.toString();
} else if (req.filter instanceof ldap.EqualityFilter && req.filter.attribute === 'mail') {
email = req.filter.value.toLowerCase();
dn = `cn=${email},${req.dn.toString()}`;
}
if (email) {
const email = req.dn.rdns[0].attrs.cn.value.toLowerCase();
const parts = email.split('@');
if (parts.length !== 2) return next(new ldap.NoSuchObjectError(dn.toString()));
if (parts.length !== 2) return next(new ldap.NoSuchObjectError(req.dn.toString()));
const [error, mailbox] = await safe(mail.getMailbox(parts[0], parts[1]));
if (error) return next(new ldap.OperationsError(error.toString()));
if (!mailbox) return next(new ldap.NoSuchObjectError(dn.toString()));
if (!mailbox.active) return next(new ldap.NoSuchObjectError(dn.toString()));
if (!mailbox) return next(new ldap.NoSuchObjectError(req.dn.toString()));
if (!mailbox.active) return next(new ldap.NoSuchObjectError(req.dn.toString()));
const obj = {
dn: dn.toString(),
dn: req.dn.toString(),
attributes: {
objectclass: ['mailbox'],
objectcategory: 'mailbox',
cn: `${mailbox.name}@${mailbox.domain}`,
uid: `${mailbox.name}@${mailbox.domain}`,
mail: `${mailbox.name}@${mailbox.domain}`,
storagequota: mailbox.storageQuota,
messagesquota: mailbox.messagesQuota,
mail: `${mailbox.name}@${mailbox.domain}`
}
};
@@ -309,7 +305,7 @@ async function mailboxSearch(req, res, next) {
} else {
res.end();
}
} else { // new sogo and dovecot listing (doveadm -A)
} else { // new sogo
// TODO figure out how proper pagination here could work
let [error, mailboxes] = await safe(mail.listAllMailboxes(1, 100000));
if (error) return next(new ldap.OperationsError(error.toString()));
@@ -333,9 +329,7 @@ async function mailboxSearch(req, res, next) {
displayname: mailbox.ownerType === mail.OWNERTYPE_USER ? ownerObject.displayName : ownerObject.name,
cn: `${mailbox.name}@${mailbox.domain}`,
uid: `${mailbox.name}@${mailbox.domain}`,
mail: `${mailbox.name}@${mailbox.domain}`,
storagequota: mailbox.storageQuota,
messagesquota: mailbox.messagesQuota,
mail: `${mailbox.name}@${mailbox.domain}`
}
};
@@ -365,7 +359,7 @@ async function mailAliasSearch(req, res, next) {
const parts = email.split('@');
if (parts.length !== 2) return next(new ldap.NoSuchObjectError(req.dn.toString()));
const [error, alias] = await safe(mail.searchAlias(parts[0], parts[1]));
const [error, alias] = await safe(mail.getAlias(parts[0], parts[1]));
if (error) return next(new ldap.OperationsError(error.toString()));
if (!alias) return next(new ldap.NoSuchObjectError(req.dn.toString()));
@@ -379,7 +373,7 @@ async function mailAliasSearch(req, res, next) {
attributes: {
objectclass: ['nisMailAlias'],
objectcategory: 'nisMailAlias',
cn: `${parts[0]}@${alias.domain}`, // alias.name can contain wildcard character
cn: `${alias.name}@${alias.domain}`,
rfc822MailMember: `${alias.aliasName}@${alias.aliasDomain}`
}
};
@@ -455,7 +449,7 @@ async function authorizeUserForApp(req, res, next) {
// we return no such object, to avoid leakage of a users existence
if (!canAccess) return next(new ldap.NoSuchObjectError(req.dn.toString()));
await eventlog.upsertLoginEvent(req.user.ghost ? eventlog.ACTION_USER_LOGIN_GHOST : eventlog.ACTION_USER_LOGIN, { authType: 'ldap', appId: req.app.id }, { userId: req.user.id, user: users.removePrivateFields(req.user) });
await eventlog.upsertLoginEvent(eventlog.ACTION_USER_LOGIN, { authType: 'ldap', appId: req.app.id }, { userId: req.user.id, user: users.removePrivateFields(req.user) });
res.end();
}
@@ -600,7 +594,7 @@ async function authenticateService(serviceId, dn, req, res, next) {
if (verifyError && verifyError.reason === BoxError.INVALID_CREDENTIALS) return next(new ldap.InvalidCredentialsError(dn.toString()));
if (verifyError) return next(new ldap.OperationsError(verifyError.message));
eventlog.upsertLoginEvent(result.ghost ? eventlog.ACTION_USER_LOGIN_GHOST : eventlog.ACTION_USER_LOGIN, { authType: 'ldap', mailboxId: email }, { userId: result.id, user: users.removePrivateFields(result) });
eventlog.upsertLoginEvent(eventlog.ACTION_USER_LOGIN, { authType: 'ldap', mailboxId: email }, { userId: result.id, user: users.removePrivateFields(result) });
res.end();
}
+25 -63
View File
@@ -48,7 +48,6 @@ exports = module.exports = {
getAlias,
getAliases,
setAliases,
searchAlias,
getLists,
getList,
@@ -97,11 +96,13 @@ const assert = require('assert'),
services = require('./services.js'),
settings = require('./settings.js'),
shell = require('./shell.js'),
smtpTransport = require('nodemailer-smtp-transport'),
superagent = require('superagent'),
sysinfo = require('./sysinfo.js'),
system = require('./system.js'),
tasks = require('./tasks.js'),
users = require('./users.js'),
util = require('util'),
validator = require('validator'),
_ = require('underscore');
@@ -111,7 +112,7 @@ const REMOVE_MAILBOX_CMD = path.join(__dirname, 'scripts/rmmailbox.sh');
const OWNERTYPES = [ exports.OWNERTYPE_USER, exports.OWNERTYPE_GROUP, exports.OWNERTYPE_APP ];
// if you add a field here, listMailboxes has to be updated
const MAILBOX_FIELDS = [ 'name', 'type', 'ownerId', 'ownerType', 'aliasName', 'aliasDomain', 'creationTime', 'membersJson', 'membersOnly', 'domain', 'active', 'enablePop3', 'storageQuota', 'messagesQuota' ].join(',');
const MAILBOX_FIELDS = [ 'name', 'type', 'ownerId', 'ownerType', 'aliasName', 'aliasDomain', 'creationTime', 'membersJson', 'membersOnly', 'domain', 'active', 'enablePop3' ].join(',');
const MAILDB_FIELDS = [ 'domain', 'enabled', 'mailFromValidation', 'catchAllJson', 'relayJson', 'dkimKeyJson', 'dkimSelector', 'bannerJson' ].join(',');
function postProcessMailbox(data) {
@@ -168,18 +169,6 @@ function validateName(name) {
return null;
}
function validateAlias(name) {
assert.strictEqual(typeof name, 'string');
if (name.length < 1) return new BoxError(BoxError.BAD_FIELD, 'mailbox name must be atleast 1 char');
if (name.length >= 200) return new BoxError(BoxError.BAD_FIELD, 'mailbox name too long');
// also need to consider valid LDAP characters here (e.g '+' is reserved). keep hyphen at the end so it doesn't become a range.
if (/[^a-zA-Z0-9._*-]/.test(name)) return new BoxError(BoxError.BAD_FIELD, 'mailbox name can only contain alphanumerals, dot, hyphen, asterisk or underscore');
return null;
}
function validateDisplayName(name) {
assert.strictEqual(typeof name, 'string');
@@ -235,8 +224,7 @@ async function checkSmtpRelay(relay) {
connectionTimeout: 5000,
greetingTimeout: 5000,
host: relay.host,
port: relay.port,
secure: false // haraka relay only supports STARTTLS
port: relay.port
};
// only set auth if either username or password is provided, some relays auth based on IP (range)
@@ -249,9 +237,9 @@ async function checkSmtpRelay(relay) {
if (relay.acceptSelfSignedCerts) options.tls = { rejectUnauthorized: false };
const transporter = nodemailer.createTransport(options);
const transporter = nodemailer.createTransport(smtpTransport(options));
const [error] = await safe(transporter.verify());
const [error] = await safe(util.promisify(transporter.verify)());
result.status = !error;
if (error) {
result.value = result.errorMessage = error.message;
@@ -654,7 +642,7 @@ async function createMailConfig(mailFqdn) {
// create sections for per-domain configuration
for (const domain of mailDomains) {
const catchAll = domain.catchAll.join(',');
const catchAll = domain.catchAll.map(function (c) { return `${c}@${domain.domain}`; }).join(',');
const mailFromValidation = domain.mailFromValidation;
if (!safe.fs.appendFileSync(`${paths.MAIL_CONFIG_DIR}/mail.ini`,
@@ -710,15 +698,15 @@ async function configureMail(mailFqdn, mailDomain, serviceConfig) {
const memory = system.getMemoryAllocation(memoryLimit);
const cloudronToken = hat(8 * 128), relayToken = hat(8 * 128);
const certificatePath = await reverseProxy.getCertificatePath(mailFqdn, mailDomain);
const bundle = await reverseProxy.getCertificatePath(mailFqdn, mailDomain);
const dhparamsFilePath = `${paths.MAIL_CONFIG_DIR}/dhparams.pem`;
const mailCertFilePath = `${paths.MAIL_CONFIG_DIR}/tls_cert.pem`;
const mailKeyFilePath = `${paths.MAIL_CONFIG_DIR}/tls_key.pem`;
if (!safe.child_process.execSync(`cp ${paths.DHPARAMS_FILE} ${dhparamsFilePath}`)) throw new BoxError(BoxError.FS_ERROR, 'Could not copy dhparams:' + safe.error.message);
if (!safe.child_process.execSync(`cp ${certificatePath.certFilePath} ${mailCertFilePath}`)) throw new BoxError(BoxError.FS_ERROR, 'Could not create cert file:' + safe.error.message);
if (!safe.child_process.execSync(`cp ${certificatePath.keyFilePath} ${mailKeyFilePath}`)) throw new BoxError(BoxError.FS_ERROR, 'Could not create key file:' + safe.error.message);
if (!safe.child_process.execSync(`cp ${bundle.certFilePath} ${mailCertFilePath}`)) throw new BoxError(BoxError.FS_ERROR, 'Could not create cert file:' + safe.error.message);
if (!safe.child_process.execSync(`cp ${bundle.keyFilePath} ${mailKeyFilePath}`)) throw new BoxError(BoxError.FS_ERROR, 'Could not create key file:' + safe.error.message);
// if the 'yellowtent' user of OS and the 'cloudron' user of mail container don't match, the keys become inaccessible by mail code
if (!safe.fs.chmodSync(mailKeyFilePath, 0o644)) throw new BoxError(BoxError.FS_ERROR, `Could not chmod key file: ${safe.error.message}`);
@@ -1041,10 +1029,6 @@ async function setCatchAllAddress(domain, addresses) {
assert.strictEqual(typeof domain, 'string');
assert(Array.isArray(addresses));
for (const address of addresses) {
if (!validator.isEmail(address)) throw new BoxError(BoxError.BAD_FIELD, `Invalid catch all address: ${address}`);
}
await updateDomain(domain, { catchAll: addresses });
safe(restartMail(), { debug }); // have to restart mail container since haraka cannot watch symlinked config files (mail.ini)
@@ -1105,7 +1089,7 @@ async function listMailboxes(domain, search, page, perPage) {
const escapedSearch = mysql.escape('%' + search + '%'); // this also quotes the string
const searchQuery = search ? ` HAVING (name LIKE ${escapedSearch} OR aliasNames LIKE ${escapedSearch} OR aliasDomains LIKE ${escapedSearch})` : ''; // having instead of where because of aggregated columns use
const query = 'SELECT m1.name AS name, m1.domain AS domain, m1.ownerId AS ownerId, m1.ownerType as ownerType, m1.active as active, JSON_ARRAYAGG(m2.name) AS aliasNames, JSON_ARRAYAGG(m2.domain) AS aliasDomains, m1.enablePop3 AS enablePop3, m1.storageQuota AS storageQuota, m1.messagesQuota AS messagesQuota '
const query = 'SELECT m1.name AS name, m1.domain AS domain, m1.ownerId AS ownerId, m1.ownerType as ownerType, m1.active as active, JSON_ARRAYAGG(m2.name) AS aliasNames, JSON_ARRAYAGG(m2.domain) AS aliasDomains, m1.enablePop3 AS enablePop3 '
+ ` FROM (SELECT * FROM mailboxes WHERE type='${exports.TYPE_MAILBOX}') AS m1`
+ ` LEFT JOIN (SELECT * FROM mailboxes WHERE type='${exports.TYPE_ALIAS}') AS m2`
+ ' ON m1.name=m2.aliasName AND m1.domain=m2.aliasDomain AND m1.ownerId=m2.ownerId'
@@ -1126,7 +1110,7 @@ async function listAllMailboxes(page, perPage) {
assert.strictEqual(typeof page, 'number');
assert.strictEqual(typeof perPage, 'number');
const query = 'SELECT m1.name AS name, m1.domain AS domain, m1.ownerId AS ownerId, m1.ownerType as ownerType, m1.active as active, JSON_ARRAYAGG(m2.name) AS aliasNames, JSON_ARRAYAGG(m2.domain) AS aliasDomains, m1.enablePop3 AS enablePop3, m1.storageQuota AS storageQuota, m1.messagesQuota AS messagesQuota '
const query = 'SELECT m1.name AS name, m1.domain AS domain, m1.ownerId AS ownerId, m1.ownerType as ownerType, m1.active as active, JSON_ARRAYAGG(m2.name) AS aliasNames, JSON_ARRAYAGG(m2.domain) AS aliasDomains, m1.enablePop3 AS enablePop3 '
+ ` FROM (SELECT * FROM mailboxes WHERE type='${exports.TYPE_MAILBOX}') AS m1`
+ ` LEFT JOIN (SELECT * FROM mailboxes WHERE type='${exports.TYPE_ALIAS}') AS m2`
+ ' ON m1.name=m2.aliasName AND m1.domain=m2.aliasDomain AND m1.ownerId=m2.ownerId'
@@ -1180,12 +1164,10 @@ async function addMailbox(name, domain, data, auditSource) {
assert.strictEqual(typeof data, 'object');
assert.strictEqual(typeof auditSource, 'object');
const { ownerId, ownerType, active, storageQuota, messagesQuota } = data;
const { ownerId, ownerType, active } = data;
assert.strictEqual(typeof ownerId, 'string');
assert.strictEqual(typeof ownerType, 'string');
assert.strictEqual(typeof active, 'boolean');
assert(Number.isInteger(storageQuota) && storageQuota >= 0);
assert(Number.isInteger(messagesQuota) && messagesQuota >= 0);
name = name.toLowerCase();
@@ -1194,13 +1176,12 @@ async function addMailbox(name, domain, data, auditSource) {
if (!OWNERTYPES.includes(ownerType)) throw new BoxError(BoxError.BAD_FIELD, 'bad owner type');
[error] = await safe(database.query('INSERT INTO mailboxes (name, type, domain, ownerId, ownerType, active, storageQuota, messagesQuota) VALUES (?, ?, ?, ?, ?, ?, ?, ?)',
[ name, exports.TYPE_MAILBOX, domain, ownerId, ownerType, active, storageQuota, messagesQuota ]));
[error] = await safe(database.query('INSERT INTO mailboxes (name, type, domain, ownerId, ownerType, active) VALUES (?, ?, ?, ?, ?, ?)', [ name, exports.TYPE_MAILBOX, domain, ownerId, ownerType, active ]));
if (error && error.code === 'ER_DUP_ENTRY') throw new BoxError(BoxError.ALREADY_EXISTS, 'mailbox already exists');
if (error && error.code === 'ER_NO_REFERENCED_ROW_2' && error.sqlMessage.includes('mailboxes_domain_constraint')) throw new BoxError(BoxError.NOT_FOUND, `no such domain '${domain}'`);
if (error) throw error;
await eventlog.add(eventlog.ACTION_MAIL_MAILBOX_ADD, auditSource, { name, domain, ownerId, ownerType, active, storageQuota, messageQuota: messagesQuota });
await eventlog.add(eventlog.ACTION_MAIL_MAILBOX_ADD, auditSource, { name, domain, ownerId, ownerType, active });
}
async function updateMailbox(name, domain, data, auditSource) {
@@ -1209,30 +1190,23 @@ async function updateMailbox(name, domain, data, auditSource) {
assert.strictEqual(typeof data, 'object');
assert.strictEqual(typeof auditSource, 'object');
const args = [];
const fields = [];
for (const k in data) {
if (k === 'enablePop3' || k === 'active') {
fields.push(k + ' = ?');
args.push(data[k] ? 1 : 0);
continue;
}
const { ownerId, ownerType, active, enablePop3 } = data;
assert.strictEqual(typeof ownerId, 'string');
assert.strictEqual(typeof ownerType, 'string');
assert.strictEqual(typeof active, 'boolean');
assert.strictEqual(typeof enablePop3, 'boolean');
if (k === 'ownerType' && !OWNERTYPES.includes(data[k])) throw new BoxError(BoxError.BAD_FIELD, 'bad owner type');
name = name.toLowerCase();
fields.push(k + ' = ?');
args.push(data[k]);
}
args.push(name.toLowerCase());
args.push(domain);
if (!OWNERTYPES.includes(ownerType)) throw new BoxError(BoxError.BAD_FIELD, 'bad owner type');
const mailbox = await getMailbox(name, domain);
if (!mailbox) throw new BoxError(BoxError.NOT_FOUND, 'No such mailbox');
const result = await safe(database.query('UPDATE mailboxes SET ' + fields.join(', ') + ' WHERE name = ? AND domain = ?', args));
const result = await database.query('UPDATE mailboxes SET ownerId = ?, ownerType = ?, active = ?, enablePop3 = ? WHERE name = ? AND domain = ?', [ ownerId, ownerType, active, enablePop3, name, domain ]);
if (result.affectedRows === 0) throw new BoxError(BoxError.NOT_FOUND, 'Mailbox not found');
await eventlog.add(eventlog.ACTION_MAIL_MAILBOX_UPDATE, auditSource, Object.assign(data, { name, domain, oldUserId: mailbox.userId }) );
await eventlog.add(eventlog.ACTION_MAIL_MAILBOX_UPDATE, auditSource, { name, domain, oldUserId: mailbox.userId, ownerId, ownerType, active });
}
async function removeSolrIndex(mailbox) {
@@ -1284,18 +1258,6 @@ async function getAlias(name, domain) {
return results[0];
}
async function searchAlias(name, domain) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof domain, 'string');
const results = await database.query(`SELECT ${MAILBOX_FIELDS} FROM mailboxes WHERE ? LIKE REPLACE(REPLACE(name, '*', '%'), '_', '\\_') AND type = ? AND domain = ?`, [ name, exports.TYPE_ALIAS, domain ]);
if (results.length === 0) return null;
results.forEach(function (result) { postProcessMailbox(result); });
return results[0];
}
async function getAliases(name, domain) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof domain, 'string');
@@ -1315,7 +1277,7 @@ async function setAliases(name, domain, aliases, auditSource) {
const name = aliases[i].name.toLowerCase();
const domain = aliases[i].domain.toLowerCase();
const error = validateAlias(name);
const error = validateName(name);
if (error) throw error;
const mailDomain = await getDomain(domain);
+3 -4
View File
@@ -6,11 +6,10 @@
<p>{{ passwordResetEmail.description }}</p>
<br/>
<p>
<a href="<%= resetLink %>">{{ passwordResetEmail.resetAction }}</a>
</p>
<a style="border-radius: 2px; background-color: #2196f3; color: white; padding: 6px 12px; text-decoration: none;" href="<%= resetLink %>">{{ passwordResetEmail.resetAction }}</a>
<br/>
<br/>
{{ passwordResetEmail.expireNote }}
+3 -3
View File
@@ -5,9 +5,9 @@
<h3>{{ welcomeEmail.salutation }}</h3>
<h2>{{ welcomeEmail.welcomeTo }}</h2>
<br/>
<a style="border-radius: 2px; background-color: #2196f3; color: white; padding: 6px 12px; text-decoration: none;" href="<%= inviteLink %>">{{ welcomeEmail.inviteLinkAction }}</a>
<p>
<a href="<%= inviteLink %>">{{ welcomeEmail.inviteLinkAction }}</a>
</p>
<br/>
<br/>
+3 -2
View File
@@ -25,6 +25,7 @@ const assert = require('assert'),
safe = require('safetydance'),
settings = require('./settings.js'),
translation = require('./translation.js'),
smtpTransport = require('nodemailer-smtp-transport'),
util = require('util');
const MAIL_TEMPLATES_DIR = path.join(__dirname, 'mail_templates');
@@ -51,14 +52,14 @@ async function sendMail(mailOptions) {
const data = await mail.getMailAuth();
const transport = nodemailer.createTransport({
const transport = nodemailer.createTransport(smtpTransport({
host: data.ip,
port: data.port,
auth: {
user: mailOptions.authUser || `no-reply@${settings.dashboardDomain()}`,
pass: data.relayToken
}
});
}));
const transportSendMail = util.promisify(transport.sendMail.bind(transport));
const [error] = await safe(transportSendMail(mailOptions));
+1 -15
View File
@@ -45,7 +45,7 @@ server {
location / {
<% if ( endpoint === 'dashboard' || endpoint === 'setup' ) { %>
return 301 https://$host$request_uri;
<% } else if ( endpoint === 'app' || endpoint === 'external' ) { %>
<% } else if ( endpoint === 'app' ) { %>
return 301 https://$host$request_uri;
<% } else if ( endpoint === 'redirect' ) { %>
return 301 https://<%= redirectTo %>$request_uri;
@@ -175,12 +175,6 @@ server {
proxy_pass http://127.0.0.1:3000;
<% } else if ( endpoint === 'app' ) { %>
proxy_pass http://<%= ip %>:<%= port %>;
<% } else if ( endpoint === 'external' ) { %>
# without a variable, nginx will not start if upstream is down or
resolver 127.0.0.1 valid=30s;
set $upstream <%= upstreamUri %>;
proxy_ssl_verify off;
proxy_pass $upstream;
<% } else if ( endpoint === 'redirect' ) { %>
return 302 https://<%= redirectTo %>$request_uri;
<% } %>
@@ -332,14 +326,6 @@ server {
# to clear a permanent redirect on the browser
return 302 https://<%= redirectTo %>$request_uri;
}
<% } else if ( endpoint === 'external' ) { %>
location / {
# without a variable, nginx will not start if upstream is down or unavailable
resolver 127.0.0.1 valid=30s;
set $upstream <%= upstreamUri %>;
proxy_ssl_verify off;
proxy_pass $upstream;
}
<% } else if ( endpoint === 'ip' ) { %>
location /notfound.html {
root <%= sourceDir %>/dashboard/dist;
+4 -5
View File
@@ -14,7 +14,6 @@ exports = module.exports = {
ALERT_REBOOT: 'reboot',
ALERT_BOX_UPDATE: 'boxUpdate',
ALERT_UPDATE_UBUNTU: 'ubuntuUpdate',
ALERT_MANUAL_APP_UPDATE: 'manualAppUpdate',
alert,
@@ -190,16 +189,16 @@ async function boxUpdateError(eventId, errorMessage) {
await add(eventId, 'Cloudron update failed', `Failed to update Cloudron: ${errorMessage}.`);
}
async function certificateRenewalError(eventId, fqdn, errorMessage) {
async function certificateRenewalError(eventId, vhost, errorMessage) {
assert.strictEqual(typeof eventId, 'string');
assert.strictEqual(typeof fqdn, 'string');
assert.strictEqual(typeof vhost, 'string');
assert.strictEqual(typeof errorMessage, 'string');
await add(eventId, `Certificate renewal of ${fqdn} failed`, `Failed to renew certs of ${fqdn}: ${errorMessage}. Renewal will be retried in 12 hours.`);
await add(eventId, `Certificate renewal of ${vhost} failed`, `Failed to renew certs of ${vhost}: ${errorMessage}. Renewal will be retried in 12 hours.`);
const admins = await users.getAdmins();
for (const admin of admins) {
await mailer.certificateRenewalError(admin.email, fqdn, errorMessage);
await mailer.certificateRenewalError(admin.email, vhost, errorMessage);
}
}
-1
View File
@@ -16,7 +16,6 @@ exports = module.exports = {
CLOUDRON_DEFAULT_AVATAR_FILE: path.join(__dirname + '/../assets/avatar.png'),
INFRA_VERSION_FILE: path.join(baseDir(), 'platformdata/INFRA_VERSION'),
CRON_SEED_FILE: path.join(baseDir(), 'platformdata/CRON_SEED'),
DASHBOARD_DIR: constants.TEST ? path.join(__dirname, '../../dashboard/src') : path.join(baseDir(), 'box/dashboard/dist'),
PROVIDER_FILE: '/etc/cloudron/PROVIDER',
+3 -3
View File
@@ -70,7 +70,7 @@ async function setupTask(domain, auditSource) {
assert.strictEqual(typeof auditSource, 'object');
try {
await cloudron.setupDnsAndCert(constants.DASHBOARD_SUBDOMAIN, domain, auditSource, (progress) => setProgress('setup', progress.message));
await cloudron.setupDnsAndCert(constants.DASHBOARD_LOCATION, domain, auditSource, (progress) => setProgress('setup', progress.message));
await ensureDhparams();
await cloudron.setDashboardDomain(domain, auditSource);
setProgress('setup', 'Done'),
@@ -111,7 +111,7 @@ async function setup(domainConfig, sysinfoConfig, auditSource) {
dkimSelector: 'cloudron'
};
await settings.setMailLocation(domain, `${constants.DASHBOARD_SUBDOMAIN}.${domain}`); // default mail location. do this before we add the domain for upserting mail DNS
await settings.setMailLocation(domain, `${constants.DASHBOARD_LOCATION}.${domain}`); // default mail location. do this before we add the domain for upserting mail DNS
await domains.add(domain, data, auditSource);
await settings.setSysinfoConfig(sysinfoConfig);
@@ -174,7 +174,7 @@ async function restoreTask(backupConfig, remotePath, sysinfoConfig, options, aud
await reverseProxy.restoreFallbackCertificates();
const dashboardDomain = settings.dashboardDomain(); // load this fresh from after the backup.restore
if (!options.skipDnsSetup) await cloudron.setupDnsAndCert(constants.DASHBOARD_SUBDOMAIN, dashboardDomain, auditSource, (progress) => setProgress('restore', progress.message));
if (!options.skipDnsSetup) await cloudron.setupDnsAndCert(constants.DASHBOARD_LOCATION, dashboardDomain, auditSource, (progress) => setProgress('restore', progress.message));
await cloudron.setDashboardDomain(dashboardDomain, auditSource);
await settings.setBackupCredentials(backupConfig); // update just the credentials and not the policy and flags
await eventlog.add(eventlog.ACTION_RESTORE, auditSource, { remotePath });
+6 -14
View File
@@ -50,23 +50,15 @@ function jwtVerify(req, res, next) {
});
}
async function authorizationHeader(req, res, next) {
async function basicAuthVerify(req, res, next) {
const appId = req.headers['x-app-id'] || '';
if (!appId) return next();
if (!req.headers.authorization) return next();
const credentials = basicAuth(req);
if (!appId || !credentials) return next();
const [error, app] = await safe(apps.get(appId));
if (error) return next(new HttpError(503, error.message));
if (!app) return next(new HttpError(503, 'Error getting app'));
// only if the app supports bearer auth, pass it through to the app. without this flag, anyone can access the app with Bearer auth!
if (req.headers.authorization.startsWith('Bearer ') && app.manifest.addons.proxyAuth.supportsBearerAuth) return next(new HttpSuccess(200, {}));
const credentials = basicAuth(req);
if (!credentials) return next();
if (!app.manifest.addons.proxyAuth.basicAuth) return next(); // this is a flag because this allows auth to bypass 2FA
if (!app.manifest.addons.proxyAuth.basicAuth) return next();
const verifyFunc = credentials.name.indexOf('@') !== -1 ? users.verifyWithEmail : users.verifyWithUsername;
const [verifyError, user] = await safe(verifyFunc(credentials.name, credentials.pass, appId));
@@ -147,7 +139,7 @@ function auth(req, res, next) {
res.set('x-remote-email', req.user.email);
res.set('x-remote-name', req.user.displayName);
next(new HttpSuccess(200, {}));
return next(new HttpSuccess(200, {}));
}
// endpoint called by login page, username and password posted as JSON body
@@ -251,7 +243,7 @@ function initializeAuthwallExpressSync() {
.use(middleware.lastMile());
router.get ('/login', loginPage);
router.get ('/auth', jwtVerify, authorizationHeader, auth); // called by nginx before accessing protected page
router.get ('/auth', jwtVerify, basicAuthVerify, auth); // called by nginx before accessing protected page
router.post('/login', json, passwordAuth, authorize);
router.get ('/logout', logoutPage);
router.post('/logout', json, logoutPage);
+129 -132
View File
@@ -1,26 +1,26 @@
'use strict';
exports = module.exports = {
setUserCertificate, // per location certificate
setFallbackCertificate, // per domain certificate
setAppCertificate,
setFallbackCertificate,
generateFallbackCertificate,
validateCertificate,
getCertificatePath, // resolved cert path
getCertificatePath,
ensureCertificate,
checkCerts,
// the 'configure' functions ensure a certificate and generate nginx config
// the 'configure' ensure a certificate and generate nginx config
configureApp,
unconfigureApp,
// these only generate nginx config
writeDefaultConfig,
writeDashboardConfig,
writeAppConfigs,
writeAppConfig,
removeAppConfigs,
restoreFallbackCertificates,
@@ -59,14 +59,17 @@ const RESTART_SERVICE_CMD = path.join(__dirname, 'scripts/restartservice.sh');
function nginxLocation(s) {
if (!s.startsWith('!')) return s;
const re = s.replace(/[\^$\\.*+?()[\]{}|]/g, '\\$&'); // https://github.com/es-shims/regexp.escape/blob/master/implementation.js
let re = s.replace(/[\^$\\.*+?()[\]{}|]/g, '\\$&'); // https://github.com/es-shims/regexp.escape/blob/master/implementation.js
return `~ ^(?!(${re.slice(1)}))`; // negative regex assertion - https://stackoverflow.com/questions/16302897/nginx-location-not-equal-to-regex
}
async function getAcmeApi(domainObject) {
assert.strictEqual(typeof domainObject, 'object');
const apiOptions = { prod: false, performHttpAuthorization: false, wildcard: false, email: '' };
const acmeApi = acme2;
let apiOptions = { prod: false, performHttpAuthorization: false, wildcard: false, email: '' };
apiOptions.prod = domainObject.tlsConfig.provider.match(/.*-prod/) !== null; // matches 'le-prod' or 'letsencrypt-prod'
apiOptions.performHttpAuthorization = domainObject.provider.match(/noop|manual|wildcard/) !== null;
apiOptions.wildcard = !!domainObject.tlsConfig.wildcard;
@@ -78,7 +81,7 @@ async function getAcmeApi(domainObject) {
const [error, owner] = await safe(users.getOwner());
apiOptions.email = (error || !owner) ? 'webmaster@cloudron.io' : owner.email; // can error if not activated yet
return { acme2, apiOptions };
return { acmeApi, apiOptions };
}
function getExpiryDate(certFilePath) {
@@ -141,19 +144,19 @@ function providerMatchesSync(domainObject, certFilePath, apiOptions) {
// note: https://tools.ietf.org/html/rfc4346#section-7.4.2 (certificate_list) requires that the
// servers certificate appears first (and not the intermediate cert)
function validateCertificate(subdomain, domainObject, certificate) {
assert.strictEqual(typeof subdomain, 'string');
function validateCertificate(location, domainObject, certificate) {
assert.strictEqual(typeof location, 'string');
assert.strictEqual(typeof domainObject, 'object');
assert(certificate && typeof certificate, 'object');
const { cert, key } = certificate;
const cert = certificate.cert, key = certificate.key;
// check for empty cert and key strings
if (!cert && key) return new BoxError(BoxError.BAD_FIELD, 'missing cert');
if (cert && !key) return new BoxError(BoxError.BAD_FIELD, 'missing key');
// -checkhost checks for SAN or CN exclusively. SAN takes precedence and if present, ignores the CN.
const fqdn = dns.fqdn(subdomain, domainObject);
const fqdn = dns.fqdn(location, domainObject);
let result = safe.child_process.execSync(`openssl x509 -noout -checkhost "${fqdn}"`, { encoding: 'utf8', input: cert });
if (result === null) return new BoxError(BoxError.BAD_FIELD, 'Unable to get certificate subject:' + safe.error.message);
@@ -195,7 +198,7 @@ async function generateFallbackCertificate(domain) {
let opensslConfWithSan;
const cn = domain;
debug(`generateFallbackCertificate: domain=${domain} cn=${cn}`);
debug(`generateFallbackCertificateSync: domain=${domain} cn=${cn}`);
opensslConfWithSan = `${opensslConf}\n[SAN]\nsubjectAltName=DNS:${domain},DNS:*.${cn}\n`;
const configFile = path.join(os.tmpdir(), 'openssl-' + crypto.randomBytes(4).readUInt32LE(0) + '.conf');
@@ -216,13 +219,14 @@ async function generateFallbackCertificate(domain) {
return { cert, key };
}
async function setFallbackCertificate(domain, certificate) {
async function setFallbackCertificate(domain, fallback) {
assert.strictEqual(typeof domain, 'string');
assert(certificate && typeof certificate === 'object');
assert(fallback && typeof fallback === 'object');
assert.strictEqual(typeof fallback, 'object');
debug(`setFallbackCertificate: setting certs for domain ${domain}`);
if (!safe.fs.writeFileSync(path.join(paths.NGINX_CERT_DIR, `${domain}.host.cert`), certificate.cert)) throw new BoxError(BoxError.FS_ERROR, safe.error.message);
if (!safe.fs.writeFileSync(path.join(paths.NGINX_CERT_DIR, `${domain}.host.key`), certificate.key)) throw new BoxError(BoxError.FS_ERROR, safe.error.message);
if (!safe.fs.writeFileSync(path.join(paths.NGINX_CERT_DIR, `${domain}.host.cert`), fallback.cert)) throw new BoxError(BoxError.FS_ERROR, safe.error.message);
if (!safe.fs.writeFileSync(path.join(paths.NGINX_CERT_DIR, `${domain}.host.key`), fallback.key)) throw new BoxError(BoxError.FS_ERROR, safe.error.message);
// TODO: maybe the cert is being used by the mail container
await reload();
@@ -246,36 +250,55 @@ function getFallbackCertificatePathSync(domain) {
return { certFilePath, keyFilePath };
}
function getUserCertificatePathSync(fqdn) {
assert.strictEqual(typeof fqdn, 'string');
function getAppCertificatePathSync(vhost) {
assert.strictEqual(typeof vhost, 'string');
const certFilePath = path.join(paths.NGINX_CERT_DIR, `${fqdn}.user.cert`);
const keyFilePath = path.join(paths.NGINX_CERT_DIR, `${fqdn}.user.key`);
const certFilePath = path.join(paths.NGINX_CERT_DIR, `${vhost}.user.cert`);
const keyFilePath = path.join(paths.NGINX_CERT_DIR, `${vhost}.user.key`);
return { certFilePath, keyFilePath };
}
function getAcmeCertificatePathSync(fqdn, domainObject) {
assert.strictEqual(typeof fqdn, 'string'); // this can contain wildcard domain (for alias domains)
function getAcmeCertificatePathSync(vhost, domainObject) {
assert.strictEqual(typeof vhost, 'string'); // this can contain wildcard domain (for alias domains)
assert.strictEqual(typeof domainObject, 'object');
let certName, certFilePath, keyFilePath, csrFilePath, acmeChallengesDir = paths.ACME_CHALLENGES_DIR;
if (fqdn !== domainObject.domain && domainObject.tlsConfig.wildcard) { // bare domain is not part of wildcard SAN
certName = dns.makeWildcard(fqdn).replace('*.', '_.');
if (vhost !== domainObject.domain && domainObject.tlsConfig.wildcard) { // bare domain is not part of wildcard SAN
certName = dns.makeWildcard(vhost).replace('*.', '_.');
certFilePath = path.join(paths.NGINX_CERT_DIR, `${certName}.cert`);
keyFilePath = path.join(paths.NGINX_CERT_DIR, `${certName}.key`);
csrFilePath = path.join(paths.NGINX_CERT_DIR, `${certName}.csr`);
} else {
certName = fqdn;
certFilePath = path.join(paths.NGINX_CERT_DIR, `${fqdn}.cert`);
keyFilePath = path.join(paths.NGINX_CERT_DIR, `${fqdn}.key`);
csrFilePath = path.join(paths.NGINX_CERT_DIR, `${fqdn}.csr`);
certName = vhost;
certFilePath = path.join(paths.NGINX_CERT_DIR, `${vhost}.cert`);
keyFilePath = path.join(paths.NGINX_CERT_DIR, `${vhost}.key`);
csrFilePath = path.join(paths.NGINX_CERT_DIR, `${vhost}.csr`);
}
return { certName, certFilePath, keyFilePath, csrFilePath, acmeChallengesDir };
}
async function setAppCertificate(location, domainObject, certificate) {
assert.strictEqual(typeof location, 'string');
assert.strictEqual(typeof domainObject, 'object');
assert.strictEqual(typeof certificate, 'object');
const fqdn = dns.fqdn(location, domainObject);
const { certFilePath, keyFilePath } = getAppCertificatePathSync(fqdn);
if (certificate.cert && certificate.key) {
if (!safe.fs.writeFileSync(certFilePath, certificate.cert)) throw safe.error;
if (!safe.fs.writeFileSync(keyFilePath, certificate.key)) throw safe.error;
} else { // remove existing cert/key
if (!safe.fs.unlinkSync(certFilePath)) debug(`Error removing cert: ${safe.error.message}`);
if (!safe.fs.unlinkSync(keyFilePath)) debug(`Error removing key: ${safe.error.message}`);
}
await reload();
}
async function getCertificatePath(fqdn, domain) {
assert.strictEqual(typeof fqdn, 'string');
assert.strictEqual(typeof domain, 'string');
@@ -286,38 +309,38 @@ async function getCertificatePath(fqdn, domain) {
const domainObject = await domains.get(domain);
const userPath = getUserCertificatePathSync(fqdn); // user cert always wins
if (fs.existsSync(userPath.certFilePath) && fs.existsSync(userPath.keyFilePath)) return userPath;
const appCertPath = getAppCertificatePathSync(fqdn); // user cert always wins
if (fs.existsSync(appCertPath.certFilePath) && fs.existsSync(appCertPath.keyFilePath)) return appCertPath;
if (domainObject.tlsConfig.provider === 'fallback') return getFallbackCertificatePathSync(domain);
const acmePath = getAcmeCertificatePathSync(fqdn, domainObject);
if (fs.existsSync(acmePath.certFilePath) && fs.existsSync(acmePath.keyFilePath)) return acmePath;
const acmeCertPath = getAcmeCertificatePathSync(fqdn, domainObject);
if (fs.existsSync(acmeCertPath.certFilePath) && fs.existsSync(acmeCertPath.keyFilePath)) return acmeCertPath;
return getFallbackCertificatePathSync(domain);
}
async function syncUserCertificate(fqdn, domainObject) {
assert.strictEqual(typeof fqdn, 'string'); // this can contain wildcard domain (for alias domains)
async function checkAppCertificate(vhost, domainObject) {
assert.strictEqual(typeof vhost, 'string'); // this can contain wildcard domain (for alias domains)
assert.strictEqual(typeof domainObject, 'object');
const subdomain = fqdn.substr(0, fqdn.length - domainObject.domain.length - 1);
const userCertificate = await apps.getCertificate(subdomain, domainObject.domain);
if (!userCertificate) return null;
const subdomain = vhost.substr(0, vhost.length - domainObject.domain.length - 1);
const certificate = await apps.getCertificate(subdomain, domainObject.domain);
if (!certificate) return null;
const { certFilePath, keyFilePath } = getUserCertificatePathSync(fqdn);
const { certFilePath, keyFilePath } = getAppCertificatePathSync(vhost);
if (!safe.fs.writeFileSync(certFilePath, userCertificate.cert)) throw new BoxError(BoxError.FS_ERROR, `Failed to write certificate: ${safe.error.message}`);
if (!safe.fs.writeFileSync(keyFilePath, userCertificate.key)) throw new BoxError(BoxError.FS_ERROR, `Failed to write key: ${safe.error.message}`);
if (!safe.fs.writeFileSync(certFilePath, certificate.cert)) throw new BoxError(BoxError.FS_ERROR, `Failed to write certificate: ${safe.error.message}`);
if (!safe.fs.writeFileSync(keyFilePath, certificate.key)) throw new BoxError(BoxError.FS_ERROR, `Failed to write key: ${safe.error.message}`);
return { certFilePath, keyFilePath };
}
async function syncAcmeCertificate(fqdn, domainObject) {
assert.strictEqual(typeof fqdn, 'string'); // this can contain wildcard domain (for alias domains)
async function checkAcmeCertificate(vhost, domainObject) {
assert.strictEqual(typeof vhost, 'string'); // this can contain wildcard domain (for alias domains)
assert.strictEqual(typeof domainObject, 'object');
const { certName, certFilePath, keyFilePath, csrFilePath } = getAcmeCertificatePathSync(fqdn, domainObject);
const { certName, certFilePath, keyFilePath, csrFilePath } = getAcmeCertificatePathSync(vhost, domainObject);
const privateKey = await blobs.get(`${blobs.CERT_PREFIX}-${certName}.key`);
const cert = await blobs.get(`${blobs.CERT_PREFIX}-${certName}.cert`);
@@ -333,11 +356,11 @@ async function syncAcmeCertificate(fqdn, domainObject) {
return { certFilePath, keyFilePath };
}
async function updateCertBlobs(fqdn, domainObject) {
assert.strictEqual(typeof fqdn, 'string'); // this can contain wildcard domain (for alias domains)
async function updateCertBlobs(vhost, domainObject) {
assert.strictEqual(typeof vhost, 'string'); // this can contain wildcard domain (for alias domains)
assert.strictEqual(typeof domainObject, 'object');
const { certName, certFilePath, keyFilePath, csrFilePath } = getAcmeCertificatePathSync(fqdn, domainObject);
const { certName, certFilePath, keyFilePath, csrFilePath } = getAcmeCertificatePathSync(vhost, domainObject);
const privateKey = safe.fs.readFileSync(keyFilePath);
if (!privateKey) throw new BoxError(BoxError.FS_ERROR, `Failed to read private key: ${safe.error.message}`);
@@ -353,76 +376,76 @@ async function updateCertBlobs(fqdn, domainObject) {
await blobs.set(`${blobs.CERT_PREFIX}-${certName}.csr`, csr);
}
async function ensureCertificate(subdomain, domain, auditSource) {
assert.strictEqual(typeof subdomain, 'string');
async function ensureCertificate(vhost, domain, auditSource) {
assert.strictEqual(typeof vhost, 'string');
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof auditSource, 'object');
const domainObject = await domains.get(domain);
const userCertificatePath = await syncUserCertificate(subdomain, domainObject);
if (userCertificatePath) return { certificatePath: userCertificatePath, renewed: false };
let bundle = await checkAppCertificate(vhost, domainObject);
if (bundle) return { bundle, renewed: false };
if (domainObject.tlsConfig.provider === 'fallback') {
debug(`ensureCertificate: ${subdomain} will use fallback certs`);
debug(`ensureCertificate: ${vhost} will use fallback certs`);
return { certificatePath: getFallbackCertificatePathSync(domain), renewed: false };
return { bundle: getFallbackCertificatePathSync(domain), renewed: false };
}
const { acme2, apiOptions } = await getAcmeApi(domainObject);
const { acmeApi, apiOptions } = await getAcmeApi(domainObject);
let notAfter = null;
const [, acmeCertificatePath] = await safe(syncAcmeCertificate(subdomain, domainObject));
if (acmeCertificatePath) {
debug(`ensureCertificate: ${subdomain} certificate already exists at ${acmeCertificatePath.keyFilePath}`);
notAfter = getExpiryDate(acmeCertificatePath.certFilePath);
const [, currentBundle] = await safe(checkAcmeCertificate(vhost, domainObject));
if (currentBundle) {
debug(`ensureCertificate: ${vhost} certificate already exists at ${currentBundle.keyFilePath}`);
notAfter = getExpiryDate(currentBundle.certFilePath);
const isExpiring = (notAfter - new Date()) <= (30 * 24 * 60 * 60 * 1000); // expiring in a month
if (!isExpiring && providerMatchesSync(domainObject, acmeCertificatePath.certFilePath, apiOptions)) return { certificatePath: acmeCertificatePath, renewed: false };
debug(`ensureCertificate: ${subdomain} cert requires renewal`);
if (!isExpiring && providerMatchesSync(domainObject, currentBundle.certFilePath, apiOptions)) return { bundle: currentBundle, renewed: false };
debug(`ensureCertificate: ${vhost} cert requires renewal`);
} else {
debug(`ensureCertificate: ${subdomain} cert does not exist`);
debug(`ensureCertificate: ${vhost} cert does not exist`);
}
debug(`ensureCertificate: getting certificate for ${subdomain} with options ${JSON.stringify(apiOptions)}`);
debug('ensureCertificate: getting certificate for %s with options %j', vhost, apiOptions);
const acmePaths = getAcmeCertificatePathSync(subdomain, domainObject);
const [error] = await safe(acme2.getCertificate(subdomain, domain, acmePaths, apiOptions));
const acmePaths = getAcmeCertificatePathSync(vhost, domainObject);
let [error] = await safe(acmeApi.getCertificate(vhost, domain, acmePaths, apiOptions));
debug(`ensureCertificate: error: ${error ? error.message : 'null'} cert: ${acmePaths.certFilePath || 'null'}`);
await safe(eventlog.add(acmeCertificatePath ? eventlog.ACTION_CERTIFICATE_RENEWAL : eventlog.ACTION_CERTIFICATE_NEW, auditSource, { domain: subdomain, errorMessage: error ? error.message : '', notAfter }));
await safe(eventlog.add(currentBundle ? eventlog.ACTION_CERTIFICATE_RENEWAL : eventlog.ACTION_CERTIFICATE_NEW, auditSource, { domain: vhost, errorMessage: error ? error.message : '', notAfter }));
if (error && acmeCertificatePath && (notAfter - new Date() > 0)) { // still some life left in this certificate
debug('ensureCertificate: continue using existing certificate since renewal failed');
return { certificatePath: acmeCertificatePath, renewed: false };
if (error && currentBundle && (notAfter - new Date() > 0)) { // still some life left in this certificate
debug('ensureCertificate: continue using existing bundle since renewal failed');
return { bundle: currentBundle, renewed: false };
}
if (!error) {
const [updateCertError] = await safe(updateCertBlobs(subdomain, domainObject));
if (!updateCertError) return { certificatePath: { certFilePath: acmePaths.certFilePath, keyFilePath: acmePaths.keyFilePath }, renewed: true };
[error] = await safe(updateCertBlobs(vhost, domainObject));
if (!error) return { bundle: { certFilePath: acmePaths.certFilePath, keyFilePath: acmePaths.keyFilePath }, renewed: true };
}
debug(`ensureCertificate: renewal of ${subdomain} failed. using fallback certificates for ${domain}`);
debug(`ensureCertificate: renewal of ${vhost} failed. using fallback certificates for ${domain}`);
return { certificatePath: getFallbackCertificatePathSync(domain), renewed: false };
return { bundle: getFallbackCertificatePathSync(domain), renewed: false };
}
async function writeDashboardNginxConfig(fqdn, certificatePath) {
assert.strictEqual(typeof fqdn, 'string');
assert.strictEqual(typeof certificatePath, 'object');
async function writeDashboardNginxConfig(vhost, bundle) {
assert.strictEqual(typeof vhost, 'string');
assert.strictEqual(typeof bundle, 'object');
const data = {
sourceDir: path.resolve(__dirname, '..'),
vhost: fqdn,
vhost: vhost,
hasIPv6: sysinfo.hasIPv6(),
endpoint: 'dashboard',
certFilePath: certificatePath.certFilePath,
keyFilePath: certificatePath.keyFilePath,
certFilePath: bundle.certFilePath,
keyFilePath: bundle.keyFilePath,
robotsTxtQuoted: JSON.stringify('User-agent: *\nDisallow: /\n'),
proxyAuth: { enabled: false, id: null, location: nginxLocation('/') },
ocsp: await isOcspEnabled(certificatePath.certFilePath)
ocsp: await isOcspEnabled(bundle.certFilePath)
};
const nginxConf = ejs.render(NGINX_APPCONFIG_EJS, data);
const nginxConfigFilename = path.join(paths.NGINX_APPCONFIG_DIR, `${fqdn}.conf`);
const nginxConfigFilename = path.join(paths.NGINX_APPCONFIG_DIR, `${vhost}.conf`);
if (!safe.fs.writeFileSync(nginxConfigFilename, nginxConf)) throw new BoxError(BoxError.FS_ERROR, safe.error);
@@ -434,10 +457,10 @@ async function writeDashboardConfig(domainObject) {
debug(`writeDashboardConfig: writing admin config for ${domainObject.domain}`);
const dashboardFqdn = dns.fqdn(constants.DASHBOARD_SUBDOMAIN, domainObject);
const certificatePath = await getCertificatePath(dashboardFqdn, domainObject.domain);
const dashboardFqdn = dns.fqdn(constants.DASHBOARD_LOCATION, domainObject);
const bundle = await getCertificatePath(dashboardFqdn, domainObject.domain);
await writeDashboardNginxConfig(dashboardFqdn, certificatePath);
await writeDashboardNginxConfig(dashboardFqdn, bundle);
}
function getNginxConfigFilename(app, fqdn, type) {
@@ -458,11 +481,11 @@ function getNginxConfigFilename(app, fqdn, type) {
return path.join(paths.NGINX_APPCONFIG_DIR, `${app.id}${nginxConfigFilenameSuffix}.conf`);
}
async function writeAppNginxConfig(app, fqdn, type, certificatePath) {
async function writeAppNginxConfig(app, fqdn, type, bundle) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof fqdn, 'string');
assert.strictEqual(typeof type, 'string');
assert.strictEqual(typeof certificatePath, 'object');
assert.strictEqual(typeof bundle, 'object');
const data = {
sourceDir: path.resolve(__dirname, '..'),
@@ -472,28 +495,17 @@ async function writeAppNginxConfig(app, fqdn, type, certificatePath) {
port: null,
endpoint: null,
redirectTo: null,
certFilePath: certificatePath.certFilePath,
keyFilePath: certificatePath.keyFilePath,
certFilePath: bundle.certFilePath,
keyFilePath: bundle.keyFilePath,
robotsTxtQuoted: null,
cspQuoted: null,
hideHeaders: [],
proxyAuth: { enabled: false },
upstreamUri: '', // only for endpoint === external
ocsp: await isOcspEnabled(certificatePath.certFilePath)
ocsp: await isOcspEnabled(bundle.certFilePath)
};
if (type === apps.LOCATION_TYPE_PRIMARY || type === apps.LOCATION_TYPE_ALIAS || type === apps.LOCATION_TYPE_SECONDARY) {
data.endpoint = 'app';
if (app.manifest.id === constants.PROXY_APP_APPSTORE_ID) {
data.endpoint = 'external';
// prevent generating invalid nginx configs
if (!app.upstreamUri) throw new BoxError(BoxError.BAD_FIELD, 'upstreamUri cannot be empty');
data.upstreamUri = app.upstreamUri;
}
// maybe these should become per domain at some point
const reverseProxyConfig = app.reverseProxyConfig || {}; // some of our code uses fake app objects
if (reverseProxyConfig.robotsTxt) data.robotsTxtQuoted = JSON.stringify(app.reverseProxyConfig.robotsTxt);
@@ -532,34 +544,20 @@ async function writeAppNginxConfig(app, fqdn, type, certificatePath) {
await reload();
}
async function writeAppConfigs(app) {
async function writeAppConfig(app) {
assert.strictEqual(typeof app, 'object');
const appDomains = [{ domain: app.domain, fqdn: app.fqdn, certificate: app.certificate, type: apps.LOCATION_TYPE_PRIMARY }]
.concat(app.secondaryDomains.map(sd => { return { domain: sd.domain, certificate: sd.certificate, fqdn: sd.fqdn, type: apps.LOCATION_TYPE_SECONDARY }; }))
.concat(app.redirectDomains.map(rd => { return { domain: rd.domain, certificate: rd.certificate, fqdn: rd.fqdn, type: apps.LOCATION_TYPE_REDIRECT }; }))
.concat(app.aliasDomains.map(ad => { return { domain: ad.domain, certificate: ad.certificate, fqdn: ad.fqdn, type: apps.LOCATION_TYPE_ALIAS }; }));
const appDomains = [{ domain: app.domain, fqdn: app.fqdn, type: apps.LOCATION_TYPE_PRIMARY }]
.concat(app.secondaryDomains.map(sd => { return { domain: sd.domain, fqdn: sd.fqdn, type: apps.LOCATION_TYPE_SECONDARY }; }))
.concat(app.redirectDomains.map(rd => { return { domain: rd.domain, fqdn: rd.fqdn, type: apps.LOCATION_TYPE_REDIRECT }; }))
.concat(app.aliasDomains.map(ad => { return { domain: ad.domain, fqdn: ad.fqdn, type: apps.LOCATION_TYPE_ALIAS }; }));
for (const appDomain of appDomains) {
const certificatePath = await getCertificatePath(appDomain.fqdn, appDomain.domain);
await writeAppNginxConfig(app, appDomain.fqdn, appDomain.type, certificatePath);
const bundle = await getCertificatePath(appDomain.fqdn, appDomain.domain);
await writeAppNginxConfig(app, appDomain.fqdn, appDomain.type, bundle);
}
}
async function setUserCertificate(app, fqdn, certificate) {
const { certFilePath, keyFilePath } = getUserCertificatePathSync(fqdn);
if (certificate !== null) {
if (!safe.fs.writeFileSync(certFilePath, certificate.cert)) throw safe.error;
if (!safe.fs.writeFileSync(keyFilePath, certificate.key)) throw safe.error;
} else { // remove existing cert/key
if (!safe.fs.unlinkSync(certFilePath)) debug(`Error removing cert: ${safe.error.message}`);
if (!safe.fs.unlinkSync(keyFilePath)) debug(`Error removing key: ${safe.error.message}`);
}
await writeAppConfigs(app);
}
async function configureApp(app, auditSource) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof auditSource, 'object');
@@ -573,7 +571,7 @@ async function configureApp(app, auditSource) {
await ensureCertificate(appDomain.fqdn, appDomain.domain, auditSource);
}
await writeAppConfigs(app);
await writeAppConfig(app);
}
async function unconfigureApp(app) {
@@ -625,7 +623,7 @@ async function renewCerts(options, auditSource, progressCallback) {
progressCallback({ percent: progress, message: `Ensuring certs of ${appDomain.fqdn}` });
progress += Math.round(100/appDomains.length);
const { certificatePath, renewed } = await ensureCertificate(appDomain.fqdn, appDomain.domain, auditSource);
const { bundle, renewed } = await ensureCertificate(appDomain.fqdn, appDomain.domain, auditSource);
if (renewed) renewedCerts.push(appDomain.fqdn);
@@ -633,15 +631,15 @@ async function renewCerts(options, auditSource, progressCallback) {
// hack to check if the app's cert changed or not. this doesn't handle prod/staging le change since they use same file name
let currentNginxConfig = safe.fs.readFileSync(appDomain.nginxConfigFilename, 'utf8') || '';
if (currentNginxConfig.includes(certificatePath.certFilePath)) continue;
if (currentNginxConfig.includes(bundle.certFilePath)) continue;
debug(`renewCerts: creating new nginx config since ${appDomain.nginxConfigFilename} does not have ${certificatePath.certFilePath}`);
debug(`renewCerts: creating new nginx config since ${appDomain.nginxConfigFilename} does not have ${bundle.certFilePath}`);
// reconfigure since the cert changed
if (appDomain.type === 'webadmin' || appDomain.type === 'webadmin+mail') {
await writeDashboardNginxConfig(settings.dashboardFqdn(), certificatePath);
await writeDashboardNginxConfig(settings.dashboardFqdn(), bundle);
} else {
await writeAppNginxConfig(appDomain.app, appDomain.fqdn, appDomain.type, certificatePath);
await writeAppNginxConfig(appDomain.app, appDomain.fqdn, appDomain.type, bundle);
}
}
@@ -659,15 +657,14 @@ async function renewCerts(options, auditSource, progressCallback) {
}
}
async function cleanupCerts(auditSource, progressCallback) {
async function cleanupCerts(auditSource) {
assert.strictEqual(typeof auditSource, 'object');
assert.strictEqual(typeof progressCallback, 'function');
const filenames = await fs.promises.readdir(paths.NGINX_CERT_DIR);
const certFilenames = filenames.filter(f => f.endsWith('.cert'));
const now = new Date();
progressCallback({ message: 'Checking expired certs for removal' });
debug('cleanupCerts: start');
const fqdns = [];
@@ -678,7 +675,7 @@ async function cleanupCerts(auditSource, progressCallback) {
if (now - notAfter >= (60 * 60 * 24 * 30 * 6 * 1000)) { // expired 6 months ago
const fqdn = certFilename.replace(/\.cert$/, '');
progressCallback({ message: `deleting certs of ${fqdn}` });
debug(`cleanupCerts: deleting certs of ${fqdn}`);
// it is safe to delete the certs of stopped apps because their nginx configs are removed
safe.fs.unlinkSync(certFilePath);
@@ -704,7 +701,7 @@ async function checkCerts(options, auditSource, progressCallback) {
assert.strictEqual(typeof progressCallback, 'function');
await renewCerts(options, auditSource, progressCallback);
await cleanupCerts(auditSource, progressCallback);
await cleanupCerts(auditSource);
}
function removeAppConfigs() {
@@ -713,7 +710,7 @@ function removeAppConfigs() {
debug('removeAppConfigs: reomving nginx configs of apps');
// remove all configs which are not the default or current dashboard
for (const appConfigFile of fs.readdirSync(paths.NGINX_APPCONFIG_DIR)) {
for (let appConfigFile of fs.readdirSync(paths.NGINX_APPCONFIG_DIR)) {
if (appConfigFile !== constants.NGINX_DEFAULT_CONFIG_FILE_NAME && appConfigFile !== dashboardConfigFilename) {
fs.unlinkSync(path.join(paths.NGINX_APPCONFIG_DIR, appConfigFile));
}
+13 -27
View File
@@ -8,8 +8,8 @@ exports = module.exports = {
authorizeOperator,
};
const apps = require('../apps.js'),
tokens = require('../tokens.js'),
const accesscontrol = require('../accesscontrol.js'),
apps = require('../apps.js'),
assert = require('assert'),
BoxError = require('../boxerror.js'),
externalLdap = require('../externalldap.js'),
@@ -43,13 +43,8 @@ async function passwordAuth(req, res, next) {
if (!user.ghost && !user.appPassword && user.twoFactorAuthenticationEnabled) {
if (!totpToken) return next(new HttpError(401, 'A totpToken must be provided'));
if (user.source === 'ldap') {
const [error] = await safe(externalLdap.verifyPasswordAndTotpToken(user, password, totpToken));
if (error) return next(new HttpError(401, 'Invalid totpToken'));
} else {
const verified = speakeasy.totp.verify({ secret: user.twoFactorAuthenticationSecret, encoding: 'base32', token: totpToken, window: 2 });
if (!verified) return next(new HttpError(401, 'Invalid totpToken'));
}
const verified = speakeasy.totp.verify({ secret: user.twoFactorAuthenticationSecret, encoding: 'base32', token: totpToken, window: 2 });
if (!verified) return next(new HttpError(401, 'Invalid totpToken'));
}
req.user = user;
@@ -58,32 +53,27 @@ async function passwordAuth(req, res, next) {
}
async function tokenAuth(req, res, next) {
let accessToken;
let token;
// this determines the priority
if (req.body && req.body.access_token) accessToken = req.body.access_token;
if (req.query && req.query.access_token) accessToken = req.query.access_token;
if (req.body && req.body.access_token) token = req.body.access_token;
if (req.query && req.query.access_token) token = req.query.access_token;
if (req.headers && req.headers.authorization) {
const parts = req.headers.authorization.split(' ');
if (parts.length == 2) {
const [scheme, credentials] = parts;
if (/^Bearer$/i.test(scheme)) accessToken = credentials;
if (/^Bearer$/i.test(scheme)) token = credentials;
}
}
if (!accessToken) return next(new HttpError(401, 'Token required'));
if (!token) return next(new HttpError(401, 'Token required'));
const token = await tokens.getByAccessToken(accessToken);
if (!token) return next(new HttpError(401, 'No such token'));
const [error, user] = await safe(accesscontrol.verifyToken(token));
if (error && error.reason === BoxError.INVALID_CREDENTIALS) return next(new HttpError(401, error.message));
if (error) return next(new HttpError(500, error.message));
const user = await users.get(token.identifier);
if (!user) return next(new HttpError(401,'User not found'));
if (!user.active) return next(new HttpError(401,'User not active'));
await safe(tokens.update(token.id, { lastUsedTime: new Date() })); // ignore any error
req.token = token;
req.access_token = token; // used in logout route
req.user = user;
next();
@@ -94,10 +84,8 @@ function authorize(requiredRole) {
return function (req, res, next) {
assert.strictEqual(typeof req.user, 'object');
assert.strictEqual(typeof req.token, 'object');
if (users.compareRoles(req.user.role, requiredRole) < 0) return next(new HttpError(403, `role '${requiredRole}' is required but user has only '${req.user.role}'`));
if (!tokens.hasScope(req.token, req.method, req.path)) return next(new HttpError(403, 'access token does not have this scope'));
next();
};
@@ -107,9 +95,7 @@ async function authorizeOperator(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
assert.strictEqual(typeof req.user, 'object');
assert.strictEqual(typeof req.app, 'object');
assert.strictEqual(typeof req.token, 'object');
if (!tokens.hasScope(req.token, req.method, req.path)) return next(new HttpError(403, 'access token does not have this scope'));
if (apps.isOperator(req.app, req.user)) return next();
return next(new HttpError(403, 'user is not an operator'));
-90
View File
@@ -1,90 +0,0 @@
'use strict';
exports = module.exports = {
listByUser,
add,
get,
update,
remove,
getIcon
};
const assert = require('assert'),
applinks = require('../applinks.js'),
BoxError = require('../boxerror.js'),
safe = require('safetydance'),
HttpError = require('connect-lastmile').HttpError,
HttpSuccess = require('connect-lastmile').HttpSuccess;
async function listByUser(req, res, next) {
assert.strictEqual(typeof req.user, 'object');
const [error, result] = await safe(applinks.listByUser(req.user));
if (error) return next(BoxError.toHttpError(error));
// we have a separate route for this
result.forEach(function (a) { delete a.icon; });
next(new HttpSuccess(200, { applinks: result }));
}
async function add(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (!req.body.upstreamUri || typeof req.body.upstreamUri !== 'string') return next(new HttpError(400, 'upstreamUri must be a non-empty string'));
if ('label' in req.body && typeof req.body.label !== 'string') return next(new HttpError(400, 'label must be a string'));
if ('tags' in req.body && !Array.isArray(req.body.tags)) return next(new HttpError(400, 'tags must be an array with strings'));
if ('accessRestriction' in req.body && typeof req.body.accessRestriction !== 'object') return next(new HttpError(400, 'accessRestriction must be an object'));
const [error] = await safe(applinks.add(req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(201, {}));
}
async function get(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
const [error, result] = await safe(applinks.get(req.params.id));
if (error) return next(BoxError.toHttpError(error));
// we have a separate route for this
delete result.icon;
next(new HttpSuccess(200, result));
}
async function update(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
assert.strictEqual(typeof req.body, 'object');
if (!req.body.upstreamUri || typeof req.body.upstreamUri !== 'string') return next(new HttpError(400, 'upstreamUri must be a non-empty string'));
if ('label' in req.body && typeof req.body.label !== 'string') return next(new HttpError(400, 'label must be a string'));
if ('tags' in req.body && !Array.isArray(req.body.tags)) return next(new HttpError(400, 'tags must be an array with strings'));
if ('accessRestriction' in req.body && typeof req.body.accessRestriction !== 'object') return next(new HttpError(400, 'accessRestriction must be an object'));
if ('icon' in req.body && typeof req.body.icon !== 'string') return next(new HttpError(400, 'icon must be a string'));
const [error] = await safe(applinks.update(req.params.id, req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(202, {}));
}
async function remove(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
const [error] = await safe(applinks.remove(req.params.id));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(204));
}
async function getIcon(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
const [error, icon] = await safe(applinks.getIcon(req.params.id, { original: req.query.original }));
if (error) return next(BoxError.toHttpError(error));
if (!icon) return next(new HttpError(404, 'no such icon'));
res.send(icon);
}
+1 -19
View File
@@ -37,7 +37,6 @@ exports = module.exports = {
setLocation,
setStorage,
setMounts,
setUpstreamUri,
stop,
start,
@@ -66,7 +65,6 @@ const apps = require('../apps.js'),
assert = require('assert'),
AuditSource = require('../auditsource.js'),
BoxError = require('../boxerror.js'),
constants = require('../constants.js'),
debug = require('debug')('box:routes/apps'),
HttpError = require('connect-lastmile').HttpError,
HttpSuccess = require('connect-lastmile').HttpSuccess,
@@ -173,13 +171,9 @@ async function install(req, res, next) {
if ('skipDnsSetup' in data && typeof data.skipDnsSetup !== 'boolean') return next(new HttpError(400, 'skipDnsSetup must be boolean'));
if ('enableMailbox' in data && typeof data.enableMailbox !== 'boolean') return next(new HttpError(400, 'enableMailbox must be boolean'));
if ('upstreamUri' in data && (typeof data.upstreamUri !== 'string' || !data.upstreamUri)) return next(new HttpError(400, 'upstreamUri must be a non emptry string'));
let [error, result] = await safe(apps.downloadManifest(data.appStoreId, data.manifest));
if (error) return next(BoxError.toHttpError(error));
if (result.manifest.appStoreId === constants.PROXY_APP_APPSTORE_ID && (typeof data.upstreamUri !== 'string' || !data.upstreamUri)) return next(new HttpError(400, 'upstreamUri must be a non empty string'));
if (safe.query(result.manifest, 'addons.docker') && req.user.role !== users.ROLE_OWNER) return next(new HttpError(403, '"owner" role is required to install app with docker addon'));
data.appStoreId = result.appStoreId;
@@ -409,6 +403,7 @@ async function setLocation(req, res, next) {
assert.strictEqual(typeof req.app, 'object');
if (typeof req.body.subdomain !== 'string') return next(new HttpError(400, 'subdomain must be string')); // subdomain may be an empty string
if (!req.body.domain) return next(new HttpError(400, 'domain is required'));
if (typeof req.body.domain !== 'string') return next(new HttpError(400, 'domain must be string'));
if ('portBindings' in req.body && typeof req.body.portBindings !== 'object') return next(new HttpError(400, 'portBindings must be an object'));
@@ -507,7 +502,6 @@ async function importApp(req, res, next) {
if (req.body.backupConfig) {
if (typeof backupConfig.provider !== 'string') return next(new HttpError(400, 'provider is required'));
if ('password' in backupConfig && typeof backupConfig.password !== 'string') return next(new HttpError(400, 'password must be a string'));
if ('encryptedFilenames' in backupConfig && typeof backupConfig.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean'));
if ('acceptSelfSignedCerts' in backupConfig && typeof backupConfig.acceptSelfSignedCerts !== 'boolean') return next(new HttpError(400, 'format must be a boolean'));
// testing backup config can take sometime
@@ -904,18 +898,6 @@ async function setMounts(req, res, next) {
next(new HttpSuccess(202, { taskId: result.taskId }));
}
async function setUpstreamUri(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
assert.strictEqual(typeof req.app, 'object');
if (typeof req.body.upstreamUri !== 'string') return next(new HttpError(400, 'upstreamUri must be a string'));
const [error] = await safe(apps.setUpstreamUri(req.app, req.body.upstreamUri, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
}
async function listEventlog(req, res, next) {
assert.strictEqual(typeof req.app, 'object');
+3 -3
View File
@@ -63,7 +63,7 @@ async function login(req, res, next) {
[error, token] = await safe(tokens.add({ clientId: type, identifier: req.user.id, expires: Date.now() + constants.DEFAULT_TOKEN_EXPIRATION_MSECS }));
if (error) return next(new HttpError(500, error));
await eventlog.add(req.user.ghost ? eventlog.ACTION_USER_LOGIN_GHOST : eventlog.ACTION_USER_LOGIN, auditSource, { userId: req.user.id, user: users.removePrivateFields(req.user) });
await eventlog.add(eventlog.ACTION_USER_LOGIN, auditSource, { userId: req.user.id, user: users.removePrivateFields(req.user) });
if (!req.user.ghost) safe(users.notifyLoginLocation(req.user, ip, userAgent, auditSource), { debug });
@@ -71,11 +71,11 @@ async function login(req, res, next) {
}
async function logout(req, res) {
assert.strictEqual(typeof req.token, 'object');
assert.strictEqual(typeof req.access_token, 'string');
await eventlog.add(eventlog.ACTION_USER_LOGOUT, AuditSource.fromRequest(req), { userId: req.user.id, user: users.removePrivateFields(req.user) });
await safe(tokens.delByAccessToken(req.token.accessToken));
await safe(tokens.delByAccessToken(req.access_token));
res.redirect('/login.html');
}
+27 -25
View File
@@ -1,36 +1,38 @@
'use strict';
exports = module.exports = {
getSystemGraphs,
getAppGraphs
getGraphs
};
const assert = require('assert'),
graphs = require('../graphs.js'),
const middleware = require('../middleware/index.js'),
HttpError = require('connect-lastmile').HttpError,
HttpSuccess = require('connect-lastmile').HttpSuccess,
safe = require('safetydance');
url = require('url');
async function getSystemGraphs(req, res, next) {
if (!req.query.fromMinutes || !parseInt(req.query.fromMinutes)) return next(new HttpError(400, 'fromMinutes must be a number'));
// for testing locally: curl 'http://127.0.0.1:8417/graphite-web/render?format=json&from=-1min&target=absolute(collectd.localhost.du-docker.capacity-usage)'
// the datapoint is (value, timestamp) https://buildmedia.readthedocs.org/media/pdf/graphite/0.9.16/graphite.pdf
const graphiteProxy = middleware.proxy(url.parse('http://127.0.0.1:8417'));
const fromMinutes = parseInt(req.query.fromMinutes);
const noNullPoints = !!req.query.noNullPoints;
const [error, result] = await safe(graphs.getSystem(fromMinutes, noNullPoints));
if (error) return next(new HttpError(500, error));
function getGraphs(req, res, next) {
const parsedUrl = url.parse(req.url, true /* parseQueryString */);
delete parsedUrl.query['access_token'];
delete req.headers['authorization'];
delete req.headers['cookies'];
next(new HttpSuccess(200, result));
// 'graphite-web' is the URL_PREFIX in docker-graphite
req.url = url.format({ pathname: 'graphite-web/render', query: parsedUrl.query });
// graphs may take very long to respond so we run into headers already sent issues quite often
// nginx still has a request timeout which can deal with this then.
req.clearTimeout();
graphiteProxy(req, res, function (error) {
if (!error) return next();
if (error.code === 'ECONNREFUSED') return next(new HttpError(424, 'Unable to connect to graphite'));
// ECONNRESET here is most likely because of a bug in the query or the uwsgi buffer size is too small
if (error.code === 'ECONNRESET') return next(new HttpError(424, 'Unable to query graphite'));
next(new HttpError(500, error));
});
}
async function getAppGraphs(req, res, next) {
assert.strictEqual(typeof req.app, 'object');
if (!req.query.fromMinutes || !parseInt(req.query.fromMinutes)) return next(new HttpError(400, 'fromMinutes must be a number'));
const fromMinutes = parseInt(req.query.fromMinutes);
const noNullPoints = !!req.query.noNullPoints;
const [error, result] = await safe(graphs.getByApp(req.app, fromMinutes, noNullPoints));
if (error) return next(new HttpError(500, error));
next(new HttpSuccess(200, result));
}
+2 -2
View File
@@ -6,7 +6,7 @@ exports = module.exports = {
add,
update,
remove,
setMembers
updateMembers
};
const assert = require('assert'),
@@ -49,7 +49,7 @@ async function update(req, res, next) {
next(new HttpSuccess(200, { }));
}
async function setMembers(req, res, next) {
async function updateMembers(req, res, next) {
assert.strictEqual(typeof req.params.groupId, 'string');
if (!req.body.userIds) return next(new HttpError(404, 'missing or invalid userIds fields'));
-1
View File
@@ -4,7 +4,6 @@ exports = module.exports = {
accesscontrol: require('./accesscontrol.js'),
appPasswords: require('./apppasswords.js'),
apps: require('./apps.js'),
applinks: require('./applinks.js'),
appstore: require('./appstore.js'),
backups: require('./backups.js'),
branding: require('./branding.js'),
-10
View File
@@ -177,11 +177,6 @@ async function addMailbox(req, res, next) {
if (typeof req.body.ownerType !== 'string') return next(new HttpError(400, 'ownerType must be a string'));
if (typeof req.body.active !== 'boolean') return next(new HttpError(400, 'active must be a boolean'));
if (!Number.isInteger(req.body.storageQuota)) return next(new HttpError(400, 'storageQuota must be an integer'));
if (req.body.storageQuota < 0) return next(new HttpError(400, 'storageQuota must be a postive integer or zero'));
if (!Number.isInteger(req.body.messagesQuota)) return next(new HttpError(400, 'messagesQuota must be an integer'));
if (req.body.messagesQuota < 0) return next(new HttpError(400, 'messagesQuota must be a positive integer or zero'));
const [error] = await safe(mail.addMailbox(req.body.name, req.params.domain, req.body, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
@@ -197,11 +192,6 @@ async function updateMailbox(req, res, next) {
if (typeof req.body.active !== 'boolean') return next(new HttpError(400, 'active must be a boolean'));
if (typeof req.body.enablePop3 !== 'boolean') return next(new HttpError(400, 'enablePop3 must be a boolean'));
if (!Number.isInteger(req.body.storageQuota)) return next(new HttpError(400, 'storageQuota must be an integer'));
if (req.body.storageQuota < 0) return next(new HttpError(400, 'storageQuota must be a postive integer or zero'));
if (!Number.isInteger(req.body.messagesQuota)) return next(new HttpError(400, 'messagesQuota must be an integer'));
if (req.body.messagesQuota < 0) return next(new HttpError(400, 'messagesQuota must be a positive integer or zero'));
const [error] = await safe(mail.updateMailbox(req.params.name, req.params.domain, req.body, AuditSource.fromRequest(req)));
if (error) return next(BoxError.toHttpError(error));
+5 -15
View File
@@ -3,7 +3,6 @@
exports = module.exports = {
proxy,
restart,
queueProxy,
setLocation,
getLocation
@@ -27,8 +26,9 @@ async function restart(req, res, next) {
next();
}
async function proxyToMailContainer(port, pathname, req, res, next) {
const parsedUrl = url.parse(req.url, true /* parseQueryString */);
async function proxy(req, res, next) {
let parsedUrl = url.parse(req.url, true /* parseQueryString */);
const pathname = req.path.split('/').pop();
// do not proxy protected values
delete parsedUrl.query['access_token'];
@@ -39,9 +39,9 @@ async function proxyToMailContainer(port, pathname, req, res, next) {
if (error) return next(BoxError.toHttpError(error));
parsedUrl.query['access_token'] = addonDetails.token;
req.url = url.format({ pathname, query: parsedUrl.query });
req.url = url.format({ pathname: pathname, query: parsedUrl.query });
const proxyOptions = url.parse(`http://${addonDetails.ip}:${port}`);
const proxyOptions = url.parse(`http://${addonDetails.ip}:3000`);
const mailserverProxy = middleware.proxy(proxyOptions);
req.clearTimeout(); // TODO: add timeout to mail server proxy logic instead of this
@@ -55,16 +55,6 @@ async function proxyToMailContainer(port, pathname, req, res, next) {
});
}
async function proxy(req, res, next) {
const pathname = req.path.split('/').pop();
proxyToMailContainer(3000, pathname, req, res, next);
}
async function queueProxy(req, res, next) {
proxyToMailContainer(6000, req.path.replace('/', '/queue/'), req, res, next);
}
async function getLocation(req, res, next) {
const [error, result] = await safe(mail.getLocation());
if (error) return next(BoxError.toHttpError(error));
+1 -1
View File
@@ -52,7 +52,7 @@ async function update(req, res, next) {
assert.strictEqual(typeof req.notification, 'object');
assert.strictEqual(typeof req.body, 'object');
if (typeof req.body.acknowledged !== 'boolean') return next(new HttpError(400, 'acknowledged must be a boolean'));
if (typeof req.body.acknowledged !== 'boolean') return next(new HttpError(400, 'acknowledged must be a booliean'));
const [error] = await safe(notifications.update(req.notification, { acknowledged: req.body.acknowledged }));
if (error) return next(BoxError.toHttpError(error));
-2
View File
@@ -103,8 +103,6 @@ async function restore(req, res, next) {
const backupConfig = req.body.backupConfig;
if (typeof backupConfig.provider !== 'string') return next(new HttpError(400, 'provider is required'));
if ('password' in backupConfig && typeof backupConfig.password !== 'string') return next(new HttpError(400, 'password must be a string'));
if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean'));
if (typeof backupConfig.format !== 'string') return next(new HttpError(400, 'format must be a string'));
if ('acceptSelfSignedCerts' in backupConfig && typeof backupConfig.acceptSelfSignedCerts !== 'boolean') return next(new HttpError(400, 'format must be a boolean'));
+6 -8
View File
@@ -74,8 +74,6 @@ async function setBackupConfig(req, res, next) {
if (typeof req.body.provider !== 'string') return next(new HttpError(400, 'provider is required'));
if (typeof req.body.schedulePattern !== 'string') return next(new HttpError(400, 'schedulePattern is required'));
if ('password' in req.body && typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a string'));
if ('encryptedFilenames' in req.body && typeof req.body.encryptedFilenames !== 'boolean') return next(new HttpError(400, 'encryptedFilenames must be a boolean'));
if ('syncConcurrency' in req.body) {
if (typeof req.body.syncConcurrency !== 'number') return next(new HttpError(400, 'syncConcurrency must be a positive integer'));
if (req.body.syncConcurrency < 1) return next(new HttpError(400, 'syncConcurrency must be a positive integer'));
@@ -139,21 +137,21 @@ async function setExternalLdapConfig(req, res, next) {
next(new HttpSuccess(200, {}));
}
async function getDirectoryServerConfig(req, res, next) {
const [error, config] = await safe(settings.getDirectoryServerConfig());
async function getUserDirectoryConfig(req, res, next) {
const [error, config] = await safe(settings.getUserDirectoryConfig());
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, config));
}
async function setDirectoryServerConfig(req, res, next) {
async function setUserDirectoryConfig(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (typeof req.body.enabled !== 'boolean') return next(new HttpError(400, 'enabled must be a boolean'));
if (typeof req.body.secret !== 'string') return next(new HttpError(400, 'secret must be a string'));
if ('allowlist' in req.body && typeof req.body.allowlist !== 'string') return next(new HttpError(400, 'allowlist must be a string'));
const [error] = await safe(settings.setDirectoryServerConfig(req.body));
const [error] = await safe(settings.setUserDirectoryConfig(req.body));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
@@ -300,7 +298,7 @@ function get(req, res, next) {
case settings.IPV6_CONFIG_KEY: return getIPv6Config(req, res, next);
case settings.BACKUP_CONFIG_KEY: return getBackupConfig(req, res, next);
case settings.EXTERNAL_LDAP_KEY: return getExternalLdapConfig(req, res, next);
case settings.DIRECTORY_SERVER_KEY: return getDirectoryServerConfig(req, res, next);
case settings.USER_DIRECTORY_KEY: return getUserDirectoryConfig(req, res, next);
case settings.UNSTABLE_APPS_KEY: return getUnstableAppsConfig(req, res, next);
case settings.REGISTRY_CONFIG_KEY: return getRegistryConfig(req, res, next);
case settings.SYSINFO_CONFIG_KEY: return getSysinfoConfig(req, res, next);
@@ -323,7 +321,7 @@ function set(req, res, next) {
case settings.DYNAMIC_DNS_KEY: return setDynamicDnsConfig(req, res, next);
case settings.IPV6_CONFIG_KEY: return setIPv6Config(req, res, next);
case settings.EXTERNAL_LDAP_KEY: return setExternalLdapConfig(req, res, next);
case settings.DIRECTORY_SERVER_KEY: return setDirectoryServerConfig(req, res, next);
case settings.USER_DIRECTORY_KEY: return setUserDirectoryConfig(req, res, next);
case settings.UNSTABLE_APPS_KEY: return setUnstableAppsConfig(req, res, next);
case settings.REGISTRY_CONFIG_KEY: return setRegistryConfig(req, res, next);
case settings.SYSINFO_CONFIG_KEY: return setSysinfoConfig(req, res, next);
+3
View File
@@ -63,6 +63,9 @@ async function canEnableRemoteSupport(req, res, next) {
const sshdConfig = safe.fs.readFileSync(SSHD_CONFIG_FILE, 'utf8');
if (!sshdConfig) return next(new HttpError(412, `Failed to read file ${SSHD_CONFIG_FILE}`));
// only check for PermitRootLogin if we want to enable remote support
if (req.body.enable && !sshdConfig.split('\n').find(function (line) { return line.search(/^PermitRootLogin.*yes/) !== -1; })) return next(new HttpError(417, `Set "PermitRootLogin yes" in ${SSHD_CONFIG_FILE}`));
next();
}
+2 -2
View File
@@ -333,10 +333,10 @@ xdescribe('App API', function () {
it('app install fails - reserved smtp subdomain', function (done) {
superagent.post(SERVER_URL + '/api/v1/apps/install')
.query({ access_token: token })
.send({ manifest: APP_MANIFEST, subdomain: constants.SMTP_SUBDOMAIN, accessRestriction: null, domain: DOMAIN_0.domain })
.send({ manifest: APP_MANIFEST, subdomain: constants.SMTP_LOCATION, accessRestriction: null, domain: DOMAIN_0.domain })
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
expect(res.body.message).to.contain(constants.SMTP_SUBDOMAIN + ' is reserved');
expect(res.body.message).to.contain(constants.SMTP_LOCATION + ' is reserved');
done();
});
});
+20 -2
View File
@@ -29,13 +29,13 @@ describe('Appstore Apps API', function () {
it('cannot get app with bad token', async function () {
const scope1 = nock(settings.apiServerOrigin())
.get(`/api/v1/apps/org.wordpress.cloudronapp?accessToken=${appstoreToken}`)
.reply(403, {});
.reply(402, {});
const response = await superagent.get(`${serverUrl}/api/v1/appstore/apps/org.wordpress.cloudronapp`)
.query({ access_token: owner.token })
.ok(() => true);
expect(response.statusCode).to.be(412);
expect(response.statusCode).to.be(402);
expect(scope1.isDone()).to.be.ok();
});
@@ -109,6 +109,15 @@ describe('Appstore Cloudron Registration API - existing user', function () {
nock.cleanAll();
});
it('cannot re-register - already registered', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/appstore/register_cloudron`)
.send({ email: 'test@cloudron.io', password: 'secret', signup: false })
.query({ access_token: owner.token })
.ok(() => true);
expect(response.statusCode).to.equal(409);
});
it('can get subscription', async function () {
const scope1 = nock(settings.apiServerOrigin())
.get('/api/v1/subscription?accessToken=CLOUDRON_TOKEN', () => true)
@@ -156,6 +165,15 @@ describe('Appstore Cloudron Registration API - new user signup', function () {
expect(await settings.getAppstoreWebToken()).to.be('SECRET_TOKEN');
});
it('cannot re-register - already registered', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/appstore/register_cloudron`)
.send({ email: 'test@cloudron.io', password: 'secret', signup: false })
.query({ access_token: owner.token })
.ok(() => true);
expect(response.statusCode).to.equal(409);
});
it('can get subscription', async function () {
const scope1 = nock(settings.apiServerOrigin())
.get('/api/v1/subscription?accessToken=CLOUDRON_TOKEN', () => true)
+1 -4
View File
@@ -107,10 +107,7 @@ async function waitForTask(taskId) {
for (let i = 0; i < 10; i++) {
const result = await tasks.get(taskId);
expect(result).to.not.be(null);
if (!result.active) {
if (result.success) return result;
throw new Error(`Task ${taskId} failed: ${result.error.message} - ${result.error.stack}`);
}
if (!result.active) return;
await delay(2000);
console.log(`Waiting for task to ${taskId} finish`);
}
+7 -20
View File
@@ -352,19 +352,10 @@ describe('Mail API', function () {
expect(response.statusCode).to.equal(400);
});
it('cannot set with bad addresses field', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/mail/${dashboardDomain}/catch_all`)
.query({ access_token: owner.token })
.send({ addresses: [ 'user1' ] })
.ok(() => true);
expect(response.statusCode).to.equal(400);
});
it('set succeeds', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/mail/${dashboardDomain}/catch_all`)
.query({ access_token: owner.token })
.send({ addresses: [ `user1@${dashboardDomain}` ] });
.send({ addresses: [ 'user1' ] });
expect(response.statusCode).to.equal(202);
});
@@ -374,7 +365,7 @@ describe('Mail API', function () {
.query({ access_token: owner.token });
expect(response.statusCode).to.equal(200);
expect(response.body.catchAll).to.eql([ `user1@${dashboardDomain}` ]);
expect(response.body.catchAll).to.eql([ 'user1' ]);
});
});
@@ -431,7 +422,7 @@ describe('Mail API', function () {
it('add succeeds', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/mail/${dashboardDomain}/mailboxes`)
.send({ name: MAILBOX_NAME, ownerId: owner.id, ownerType: 'user', active: true, storageQuota: 10, messagesQuota: 20 })
.send({ name: MAILBOX_NAME, ownerId: owner.id, ownerType: 'user', active: true })
.query({ access_token: owner.token });
expect(response.statusCode).to.equal(201);
@@ -439,7 +430,7 @@ describe('Mail API', function () {
it('cannot add again', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/mail/${dashboardDomain}/mailboxes`)
.send({ name: MAILBOX_NAME, ownerId: owner.id, ownerType: 'user', active: true, storageQuota: 10, messagesQuota: 20 })
.send({ name: MAILBOX_NAME, ownerId: owner.id, ownerType: 'user', active: true })
.query({ access_token: owner.token })
.ok(() => true);
@@ -466,8 +457,6 @@ describe('Mail API', function () {
expect(response.body.mailbox.aliasName).to.equal(null);
expect(response.body.mailbox.aliasDomain).to.equal(null);
expect(response.body.mailbox.domain).to.equal(dashboardDomain);
expect(response.body.mailbox.storageQuota).to.equal(10);
expect(response.body.mailbox.messagesQuota).to.equal(20);
});
it('listing succeeds', async function () {
@@ -482,8 +471,6 @@ describe('Mail API', function () {
expect(response.body.mailboxes[0].ownerType).to.equal('user');
expect(response.body.mailboxes[0].aliases).to.eql([]);
expect(response.body.mailboxes[0].domain).to.equal(dashboardDomain);
expect(response.body.mailboxes[0].storageQuota).to.equal(10);
expect(response.body.mailboxes[0].messagesQuota).to.equal(20);
});
it('disable fails even if not exist', async function () {
@@ -518,7 +505,7 @@ describe('Mail API', function () {
it('add the mailbox', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/mail/${dashboardDomain}/mailboxes`)
.send({ name: MAILBOX_NAME, ownerId: owner.id, ownerType: 'user', active: true, storageQuota: 10, messagesQuota: 20 })
.send({ name: MAILBOX_NAME, ownerId: owner.id, ownerType: 'user', active: true })
.query({ access_token: owner.token });
expect(response.statusCode).to.equal(201);
@@ -552,7 +539,7 @@ describe('Mail API', function () {
it('set succeeds', async function () {
const response = await superagent.put(`${serverUrl}/api/v1/mail/${dashboardDomain}/mailboxes/${MAILBOX_NAME}/aliases`)
.send({ aliases: [{ name: 'hello*', domain: dashboardDomain}, {name: 'there', domain: dashboardDomain}] })
.send({ aliases: [{ name: 'hello', domain: dashboardDomain}, {name: 'there', domain: dashboardDomain}] })
.query({ access_token: owner.token });
expect(response.statusCode).to.equal(202);
@@ -563,7 +550,7 @@ describe('Mail API', function () {
.query({ access_token: owner.token });
expect(response.statusCode).to.equal(200);
expect(response.body.aliases).to.eql([{ name: 'hello*', domain: dashboardDomain}, {name: 'there', domain: dashboardDomain}]);
expect(response.body.aliases).to.eql([{ name: 'hello', domain: dashboardDomain}, {name: 'there', domain: dashboardDomain}]);
});
it('get fails if mailbox does not exist', async function () {
+2 -31
View File
@@ -15,7 +15,7 @@ describe('Tokens API', function () {
before(setup);
after(cleanup);
let token, readOnlyToken;
let token;
it('cannot create token with bad name', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/tokens`)
@@ -35,42 +35,13 @@ describe('Tokens API', function () {
token = response.body;
});
it('can create read-only token', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/tokens`)
.query({ access_token: owner.token })
.send({ name: 'mytoken1', scope: { '*': 'r' }});
expect(response.status).to.equal(201);
expect(response.body).to.be.a('object');
readOnlyToken = response.body;
});
it('cannot create read-only token with invalid scope', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/tokens`)
.query({ access_token: owner.token })
.send({ name: 'mytoken1', scope: { 'foobar': 'rw' }})
.ok(() => true);
expect(response.status).to.equal(400);
});
it('can list tokens', async function () {
const response = await superagent.get(`${serverUrl}/api/v1/tokens`)
.query({ access_token: owner.token });
expect(response.statusCode).to.equal(200);
expect(response.body.tokens.length).to.be(3); // one is owner token on activation
expect(response.body.tokens.length).to.be(2); // one is owner token on activation
const tokenIds = response.body.tokens.map(t => t.id);
expect(tokenIds).to.contain(token.id);
expect(tokenIds).to.contain(readOnlyToken.id);
});
it('cannot create token with read only token', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/tokens`)
.query({ access_token: readOnlyToken.accessToken })
.send({ name: 'somename' })
.ok(() => true);
expect(response.status).to.equal(403);
});
it('cannot get non-existent token', async function () {
+1 -1
View File
@@ -591,7 +591,7 @@ describe('Users API', function () {
it('add mailbox succeeds as mail manager', async function () {
const response = await superagent.post(`${serverUrl}/api/v1/mail/${dashboardDomain}/mailboxes`)
.send({ name: 'support', ownerId: owner.id, ownerType: 'user', active: true, storageQuota: 0, messagesQuota: 0 })
.send({ name: 'support', ownerId: owner.id, ownerType: 'user', active: true })
.query({ access_token: user.token });
expect(response.statusCode).to.equal(201);
+1 -3
View File
@@ -50,12 +50,10 @@ async function add(req, res, next) {
if (typeof req.body.name !== 'string') return next(new HttpError(400, 'name must be string'));
if ('expiresAt' in req.body && typeof req.body.expiresAt !== 'number') return next(new HttpError(400, 'expiresAt must be number'));
if ('scope' in req.body && typeof req.body.scope !== 'object') return next(new HttpError(400, 'scope must be an object'));
const expiresAt = req.body.expiresAt || (Date.now() + (100 * 365 * 24 * 60 * 60 * 1000)); // forever - 100 years TODO maybe we should allow 0 or -1 to make that explicit
const scope = req.body.scope || null;
const [error, result] = await safe(tokens.add({ clientId: tokens.ID_SDK, identifier: req.user.id, expires: expiresAt, name: req.body.name, scope }));
const [error, result] = await safe(tokens.add({ clientId: tokens.ID_SDK, identifier: req.user.id, expires: expiresAt, name: req.body.name }));
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(201, result));
+41 -55
View File
@@ -79,17 +79,14 @@ function initializeExpressSync() {
const multipart = middleware.multipart({ maxFieldsSize: FIELD_LIMIT, limit: FILE_SIZE_LIMIT, timeout: FILE_TIMEOUT });
// authentication
// to keep routes code short
const password = routes.accesscontrol.passwordAuth;
const token = routes.accesscontrol.tokenAuth;
// authorization
const authorizeOwner = routes.accesscontrol.authorize(users.ROLE_OWNER);
const authorizeAdmin = routes.accesscontrol.authorize(users.ROLE_ADMIN);
const authorizeOperator = routes.accesscontrol.authorizeOperator;
const authorizeUserManager = routes.accesscontrol.authorize(users.ROLE_USER_MANAGER);
const authorizeMailManager = routes.accesscontrol.authorize(users.ROLE_MAIL_MANAGER);
const authorizeUser = routes.accesscontrol.authorize(users.ROLE_USER);
// public routes
router.post('/api/v1/cloudron/setup', json, routes.provision.setupTokenAuth, routes.provision.providerTokenAuth, routes.provision.setup); // only available until no-domain
@@ -116,7 +113,7 @@ function initializeExpressSync() {
router.post('/api/v1/cloudron/check_for_updates', json, token, authorizeAdmin, routes.cloudron.checkForUpdates);
router.get ('/api/v1/cloudron/reboot', token, authorizeAdmin, routes.cloudron.isRebootRequired);
router.post('/api/v1/cloudron/reboot', json, token, authorizeAdmin, routes.cloudron.reboot);
router.get ('/api/v1/cloudron/graphs', token, authorizeAdmin, routes.graphs.getSystemGraphs);
router.get ('/api/v1/cloudron/graphs', token, authorizeAdmin, routes.graphs.getGraphs);
router.get ('/api/v1/cloudron/disks', token, authorizeAdmin, routes.cloudron.getDisks);
router.get ('/api/v1/cloudron/memory', token, authorizeAdmin, routes.cloudron.getMemory);
router.get ('/api/v1/cloudron/logs/:unit', token, authorizeAdmin, routes.cloudron.getLogs);
@@ -124,8 +121,8 @@ function initializeExpressSync() {
router.get ('/api/v1/cloudron/eventlog', token, authorizeAdmin, routes.eventlog.list);
router.get ('/api/v1/cloudron/eventlog/:eventId', token, authorizeAdmin, routes.eventlog.get);
router.post('/api/v1/cloudron/sync_external_ldap', json, token, authorizeAdmin, routes.cloudron.syncExternalLdap);
router.get ('/api/v1/cloudron/server_ipv4', token, authorizeAdmin, routes.cloudron.getServerIpv4);
router.get ('/api/v1/cloudron/server_ipv6', token, authorizeAdmin, routes.cloudron.getServerIpv6);
router.get ('/api/v1/cloudron/server_ipv4', token, authorizeAdmin, routes.cloudron.getServerIpv4);
router.get ('/api/v1/cloudron/server_ipv6', token, authorizeAdmin, routes.cloudron.getServerIpv6);
// task routes
router.get ('/api/v1/tasks', token, authorizeAdmin, routes.tasks.list);
@@ -147,31 +144,31 @@ function initializeExpressSync() {
router.post('/api/v1/backups/:backupId', json, token, authorizeAdmin, routes.backups.update);
// config route (for dashboard). can return some private configuration unlike status
router.get ('/api/v1/config', token, authorizeUser, routes.cloudron.getConfig);
router.get ('/api/v1/config', token, routes.cloudron.getConfig);
// working off the user behind the provided token
router.get ('/api/v1/profile', token, authorizeUser, routes.profile.get);
router.post('/api/v1/profile', json, token, authorizeUser, routes.profile.authorize, routes.profile.update);
router.get ('/api/v1/profile/avatar/:identifier', routes.profile.getAvatar); // this is not scoped so it can used directly in img tag
router.post('/api/v1/profile/avatar', json, token, authorizeUser, (req, res, next) => { return typeof req.body.avatar === 'string' ? next() : multipart(req, res, next); }, routes.profile.setAvatar); // avatar is not exposed in LDAP. so it's personal and not locked
router.get ('/api/v1/profile/backgroundImage', token, authorizeUser, routes.profile.getBackgroundImage);
router.post('/api/v1/profile/backgroundImage', token, authorizeUser, multipart, routes.profile.setBackgroundImage); // backgroundImage is not exposed in LDAP. so it's personal and not locked
router.post('/api/v1/profile/password', json, token, authorizeUser, routes.users.verifyPassword, routes.profile.setPassword);
router.post('/api/v1/profile/twofactorauthentication_secret', json, token, authorizeUser, routes.profile.setTwoFactorAuthenticationSecret);
router.post('/api/v1/profile/twofactorauthentication_enable', json, token, authorizeUser, routes.profile.enableTwoFactorAuthentication);
router.post('/api/v1/profile/twofactorauthentication_disable', json, token, authorizeUser, routes.users.verifyPassword, routes.profile.disableTwoFactorAuthentication);
router.get ('/api/v1/profile', token, routes.profile.get);
router.post('/api/v1/profile', json, token, routes.profile.authorize, routes.profile.update);
router.get ('/api/v1/profile/avatar/:identifier', routes.profile.getAvatar); // this is not scoped so it can used directly in img tag
router.post('/api/v1/profile/avatar', json, token, (req, res, next) => { return typeof req.body.avatar === 'string' ? next() : multipart(req, res, next); }, routes.profile.setAvatar); // avatar is not exposed in LDAP. so it's personal and not locked
router.get ('/api/v1/profile/backgroundImage', token, routes.profile.getBackgroundImage);
router.post('/api/v1/profile/backgroundImage', token, multipart, routes.profile.setBackgroundImage); // backgroundImage is not exposed in LDAP. so it's personal and not locked
router.post('/api/v1/profile/password', json, token, routes.users.verifyPassword, routes.profile.setPassword);
router.post('/api/v1/profile/twofactorauthentication_secret', json, token, routes.profile.setTwoFactorAuthenticationSecret);
router.post('/api/v1/profile/twofactorauthentication_enable', json, token, routes.profile.enableTwoFactorAuthentication);
router.post('/api/v1/profile/twofactorauthentication_disable', json, token, routes.users.verifyPassword, routes.profile.disableTwoFactorAuthentication);
// app password routes
router.get ('/api/v1/app_passwords', token, authorizeUser, routes.appPasswords.list);
router.post('/api/v1/app_passwords', json, token, authorizeUser, routes.appPasswords.add);
router.get ('/api/v1/app_passwords/:id', token, authorizeUser, routes.appPasswords.get);
router.del ('/api/v1/app_passwords/:id', token, authorizeUser, routes.appPasswords.del);
router.get ('/api/v1/app_passwords', token, routes.appPasswords.list);
router.post('/api/v1/app_passwords', json, token, routes.appPasswords.add);
router.get ('/api/v1/app_passwords/:id', token, routes.appPasswords.get);
router.del ('/api/v1/app_passwords/:id', token, routes.appPasswords.del);
// access tokens
router.get ('/api/v1/tokens', token, authorizeUser, routes.tokens.list);
router.post('/api/v1/tokens', json, token, authorizeUser, routes.tokens.add);
router.get ('/api/v1/tokens/:id', token, authorizeUser, routes.tokens.verifyOwnership, routes.tokens.get);
router.del ('/api/v1/tokens/:id', token, authorizeUser, routes.tokens.verifyOwnership, routes.tokens.del);
router.get ('/api/v1/tokens', token, routes.tokens.list);
router.post('/api/v1/tokens', json, token, routes.tokens.add);
router.get ('/api/v1/tokens/:id', token, routes.tokens.verifyOwnership, routes.tokens.get);
router.del ('/api/v1/tokens/:id', token, routes.tokens.verifyOwnership, routes.tokens.del);
// user routes
router.get ('/api/v1/users', token, authorizeUserManager, routes.users.list);
@@ -182,37 +179,37 @@ function initializeExpressSync() {
router.post('/api/v1/users/:userId/password', json, token, authorizeUserManager, routes.users.load, routes.users.setPassword);
router.post('/api/v1/users/:userId/ghost', json, token, authorizeAdmin, routes.users.load, routes.users.setGhost);
router.put ('/api/v1/users/:userId/groups', json, token, authorizeUserManager, routes.users.load, routes.users.setGroups);
router.post('/api/v1/users/:userId/make_local', json, token, authorizeUserManager, routes.users.load, routes.users.makeLocal);
router.get ('/api/v1/users/:userId/password_reset_link', json, token, authorizeUserManager, routes.users.load, routes.users.getPasswordResetLink);
router.post('/api/v1/users/:userId/send_password_reset_email', json, token, authorizeUserManager, routes.users.load, routes.users.sendPasswordResetEmail);
router.get ('/api/v1/users/:userId/invite_link', json, token, authorizeUserManager, routes.users.load, routes.users.getInviteLink);
router.post('/api/v1/users/:userId/send_invite_email', json, token, authorizeUserManager, routes.users.load, routes.users.sendInviteEmail);
router.post('/api/v1/users/:userId/twofactorauthentication_disable', json, token, authorizeUserManager, routes.users.load, routes.users.disableTwoFactorAuthentication);
router.get ('/api/v1/users/:userId/password_reset_link', json, token, authorizeUserManager, routes.users.load, routes.users.getPasswordResetLink);
router.post('/api/v1/users/:userId/send_password_reset_email', json, token, authorizeUserManager, routes.users.load, routes.users.sendPasswordResetEmail);
router.get ('/api/v1/users/:userId/invite_link', json, token, authorizeUserManager, routes.users.load, routes.users.getInviteLink);
router.post('/api/v1/users/:userId/send_invite_email', json, token, authorizeUserManager, routes.users.load, routes.users.sendInviteEmail);
router.post('/api/v1/users/:userId/make_local', json, token, authorizeUserManager, routes.users.load, routes.users.makeLocal);
// Group management
router.get ('/api/v1/groups', token, authorizeUserManager, routes.groups.list);
router.post('/api/v1/groups', json, token, authorizeUserManager, routes.groups.add);
router.get ('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.get);
router.put ('/api/v1/groups/:groupId/members', json, token, authorizeUserManager, routes.groups.setMembers);
router.put ('/api/v1/groups/:groupId/members', json, token, authorizeUserManager, routes.groups.updateMembers);
router.post('/api/v1/groups/:groupId', json, token, authorizeUserManager, routes.groups.update);
router.del ('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.remove);
// appstore and subscription routes
router.post('/api/v1/appstore/register_cloudron', json, token, authorizeOwner, routes.appstore.registerCloudron);
router.get ('/api/v1/appstore/web_token', json, token, authorizeOwner, routes.appstore.getWebToken);
router.get ('/api/v1/appstore/subscription', token, authorizeUser, routes.appstore.getSubscription); // for all users
router.get ('/api/v1/appstore/subscription', token, routes.appstore.getSubscription); // for all users
router.get ('/api/v1/appstore/apps', token, authorizeAdmin, routes.appstore.getApps);
router.get ('/api/v1/appstore/apps/:appstoreId', token, authorizeAdmin, routes.appstore.getApp);
router.get ('/api/v1/appstore/apps/:appstoreId/versions/:versionId', token, authorizeAdmin, routes.appstore.getAppVersion);
// app routes
router.post('/api/v1/apps/install', json, token, authorizeAdmin, routes.apps.install);
router.get ('/api/v1/apps', token, authorizeUser, routes.apps.listByUser);
router.post('/api/v1/apps/install', json, token, authorizeAdmin, routes.apps.install);
router.get ('/api/v1/apps', token, routes.apps.listByUser);
router.get ('/api/v1/apps/:id', token, routes.apps.load, authorizeOperator, routes.apps.getApp);
router.get ('/api/v1/apps/:id/icon', token, routes.apps.load, authorizeUser, routes.apps.getAppIcon);
router.post('/api/v1/apps/:id/uninstall', json, token, routes.apps.load, authorizeAdmin, routes.apps.uninstall);
router.post('/api/v1/apps/:id/configure/access_restriction', json, token, routes.apps.load, authorizeAdmin, routes.apps.setAccessRestriction);
router.post('/api/v1/apps/:id/configure/operators', json, token, routes.apps.load, authorizeAdmin, routes.apps.setOperators);
router.get ('/api/v1/apps/:id/icon', token, routes.apps.load, routes.apps.getAppIcon);
router.post('/api/v1/apps/:id/uninstall', json, token, authorizeAdmin, routes.apps.load, routes.apps.uninstall);
router.post('/api/v1/apps/:id/configure/access_restriction', json, token, authorizeAdmin, routes.apps.load, routes.apps.setAccessRestriction);
router.post('/api/v1/apps/:id/configure/operators', json, token, authorizeAdmin, routes.apps.load, routes.apps.setOperators);
router.post('/api/v1/apps/:id/configure/label', json, token, routes.apps.load, authorizeOperator, routes.apps.setLabel);
router.post('/api/v1/apps/:id/configure/tags', json, token, routes.apps.load, authorizeOperator, routes.apps.setTags);
router.post('/api/v1/apps/:id/configure/icon', json, token, routes.apps.load, authorizeOperator, routes.apps.setIcon);
@@ -230,7 +227,6 @@ function initializeExpressSync() {
router.post('/api/v1/apps/:id/configure/location', json, token, routes.apps.load, authorizeAdmin, routes.apps.setLocation);
router.post('/api/v1/apps/:id/configure/mounts', json, token, routes.apps.load, authorizeAdmin, routes.apps.setMounts);
router.post('/api/v1/apps/:id/configure/crontab', json, token, routes.apps.load, authorizeOperator, routes.apps.setCrontab);
router.post('/api/v1/apps/:id/configure/upstream_uri', json, token, routes.apps.load, authorizeOperator, routes.apps.setUpstreamUri);
router.post('/api/v1/apps/:id/repair', json, token, routes.apps.load, authorizeOperator, routes.apps.repair);
router.post('/api/v1/apps/:id/check_for_updates', json, token, routes.apps.load, authorizeOperator, routes.apps.checkForUpdates);
router.post('/api/v1/apps/:id/update', json, token, routes.apps.load, authorizeOperator, routes.apps.update);
@@ -248,7 +244,7 @@ function initializeExpressSync() {
router.get ('/api/v1/apps/:id/eventlog', token, routes.apps.load, authorizeOperator, routes.apps.listEventlog);
router.get ('/api/v1/apps/:id/limits', token, routes.apps.load, authorizeOperator, routes.apps.getLimits);
router.get ('/api/v1/apps/:id/task', token, routes.apps.load, authorizeOperator, routes.apps.getTask);
router.get ('/api/v1/apps/:id/graphs', token, routes.apps.load, authorizeOperator, routes.graphs.getAppGraphs);
router.get ('/api/v1/apps/:id/graphs', token, routes.apps.load, authorizeOperator, routes.graphs.getGraphs); // TODO: restrict to app graphs
router.post('/api/v1/apps/:id/clone', json, token, routes.apps.load, authorizeAdmin, routes.apps.clone);
router.get ('/api/v1/apps/:id/download', token, routes.apps.load, authorizeOperator, routes.apps.downloadFile);
router.post('/api/v1/apps/:id/upload', json, token, multipart, routes.apps.load, authorizeOperator, routes.apps.uploadFile);
@@ -258,15 +254,7 @@ function initializeExpressSync() {
router.get ('/api/v1/apps/:id/exec/:execId', token, routes.apps.load, authorizeOperator, routes.apps.getExec);
// websocket cannot do bearer authentication
router.get ('/api/v1/apps/:id/exec/:execId/startws', token, routes.apps.load, authorizeOperator, routes.apps.startExecWebSocket);
// app links in dashboard
router.get ('/api/v1/applinks', token, authorizeUser, routes.applinks.listByUser);
router.post('/api/v1/applinks', json, token, authorizeAdmin, routes.applinks.add);
router.get ('/api/v1/applinks/:id', token, authorizeAdmin, routes.applinks.get);
router.post('/api/v1/applinks/:id', json, token, authorizeAdmin, routes.applinks.update);
router.del ('/api/v1/applinks/:id', token, authorizeAdmin, routes.applinks.remove);
router.get ('/api/v1/applinks/:id/icon', token, authorizeUser, routes.applinks.getIcon);
router.get ('/api/v1/apps/:id/exec/:execId/startws', token, routes.apps.load, routes.accesscontrol.authorizeOperator, routes.apps.startExecWebSocket);
// branding routes
router.get ('/api/v1/branding/:setting', token, authorizeOwner, routes.branding.get);
@@ -303,8 +291,6 @@ function initializeExpressSync() {
router.post('/api/v1/mailserver/mailbox_sharing', token, authorizeAdmin, routes.mailserver.proxy, routes.mailserver.restart);
router.get ('/api/v1/mailserver/usage', token, authorizeMailManager, routes.mailserver.proxy);
router.use ('/api/v1/mailserver/queue', token, authorizeAdmin, routes.mailserver.queueProxy);
router.get ('/api/v1/mail/:domain', token, authorizeMailManager, routes.mail.getDomain);
router.post('/api/v1/mail/:domain/enable', json, token, authorizeAdmin, routes.mail.setMailEnabled);
router.get ('/api/v1/mail/:domain/status', token, authorizeMailManager, routes.mail.getStatus);
@@ -334,10 +320,10 @@ function initializeExpressSync() {
// domain routes
router.post('/api/v1/domains', json, token, authorizeAdmin, routes.domains.add);
router.get ('/api/v1/domains', token, authorizeUser, routes.domains.list);
router.get ('/api/v1/domains', token, routes.domains.list);
router.get ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.get); // this is manage scope because it returns non-restricted fields
router.post('/api/v1/domains/:domain/config', json, token, authorizeAdmin, routes.domains.setConfig);
router.post('/api/v1/domains/:domain/wellknown', json, token, authorizeAdmin, routes.domains.setWellKnown);
router.post('/api/v1/domains/:domain/config', json, token, authorizeAdmin, routes.domains.setConfig);
router.post('/api/v1/domains/:domain/wellknown', json, token, authorizeAdmin, routes.domains.setWellKnown);
router.del ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.del);
router.get ('/api/v1/domains/:domain/dns_check', token, authorizeAdmin, routes.domains.checkDnsRecords);
+3 -10
View File
@@ -502,9 +502,7 @@ async function rebuildService(id, auditSource) {
// this attempts to recreate the service docker container if they don't exist but platform infra version is unchanged
// passing an infra version of 'none' will not attempt to purge existing data
const [name, instance] = id.split(':');
switch (name) {
switch (id) {
case 'turn':
await startTurn({ version: 'none' });
break;
@@ -526,18 +524,13 @@ async function rebuildService(id, auditSource) {
case 'mail':
await mail.startMail({ version: 'none' });
break;
case 'redis': {
await shell.promises.exec('removeRedis', `docker rm -f redis-${instance} || true`);
const app = await apps.get(instance);
if (app) await setupRedis(app, app.manifest.addons.redis); // starts the container
break;
}
default:
// nothing to rebuild for now.
}
safe(applyMemoryLimit(id), { debug }); // do this in background. ok to fail
// TODO: missing redis container is not created
await eventlog.add(eventlog.ACTION_SERVICE_REBUILD, auditSource, { id });
}
@@ -1706,7 +1699,7 @@ async function startRedis(existingInfra) {
for (const app of allApps) {
if (!('redis' in app.manifest.addons)) continue; // app doesn't use the addon
const redisName = `redis-${app.id}`;
const redisName = 'redis-' + app.id;
if (upgrading) await backupRedis(app, {});
+18 -18
View File
@@ -34,8 +34,8 @@ exports = module.exports = {
getExternalLdapConfig,
setExternalLdapConfig,
getDirectoryServerConfig,
setDirectoryServerConfig,
getUserDirectoryConfig,
setUserDirectoryConfig,
getRegistryConfig,
setRegistryConfig,
@@ -100,7 +100,7 @@ exports = module.exports = {
BACKUP_CONFIG_KEY: 'backup_config',
SERVICES_CONFIG_KEY: 'services_config',
EXTERNAL_LDAP_KEY: 'external_ldap_config',
DIRECTORY_SERVER_KEY: 'user_directory_config',
USER_DIRECTORY_KEY: 'user_directory_config',
REGISTRY_CONFIG_KEY: 'registry_config',
SYSINFO_CONFIG_KEY: 'sysinfo_config', // misnomer: ipv4 config
APPSTORE_LISTING_CONFIG_KEY: 'appstore_listing_config',
@@ -148,7 +148,6 @@ const assert = require('assert'),
CronJob = require('cron').CronJob,
database = require('./database.js'),
debug = require('debug')('box:settings'),
directoryServer = require('./directoryserver.js'),
docker = require('./docker.js'),
externalLdap = require('./externalldap.js'),
moment = require('moment-timezone'),
@@ -158,6 +157,7 @@ const assert = require('assert'),
sysinfo = require('./sysinfo.js'),
tokens = require('./tokens.js'),
translation = require('./translation.js'),
userdirectory = require('./userdirectory.js'),
users = require('./users.js'),
_ = require('underscore');
@@ -194,7 +194,7 @@ const gDefaults = (function () {
provider: 'noop',
autoCreate: false
};
result[exports.DIRECTORY_SERVER_KEY] = {
result[exports.USER_DIRECTORY_KEY] = {
enabled: false,
secret: '',
allowlist: '' // empty means allow all
@@ -455,7 +455,7 @@ async function setBackupConfig(backupConfig) {
}
// if any of these changes, we have to clear the cache
if ([ 'format', 'provider', 'prefix', 'bucket', 'region', 'endpoint', 'backupFolder', 'mountPoint', 'encryption', 'encryptedFilenames' ].some(p => backupConfig[p] !== oldConfig[p])) {
if ([ 'format', 'provider', 'prefix', 'bucket', 'region', 'endpoint', 'backupFolder', 'mountPoint', 'encryption' ].some(p => backupConfig[p] !== oldConfig[p])) {
debug('setBackupConfig: clearing backup cache');
backups.cleanupCacheFilesSync();
}
@@ -528,29 +528,29 @@ async function setExternalLdapConfig(externalLdapConfig) {
notifyChange(exports.EXTERNAL_LDAP_KEY, externalLdapConfig);
}
async function getDirectoryServerConfig() {
const value = await get(exports.DIRECTORY_SERVER_KEY);
if (value === null) return gDefaults[exports.DIRECTORY_SERVER_KEY];
async function getUserDirectoryConfig() {
const value = await get(exports.USER_DIRECTORY_KEY);
if (value === null) return gDefaults[exports.USER_DIRECTORY_KEY];
return JSON.parse(value);
}
async function setDirectoryServerConfig(directoryServerConfig) {
assert.strictEqual(typeof directoryServerConfig, 'object');
async function setUserDirectoryConfig(userDirectoryConfig) {
assert.strictEqual(typeof userDirectoryConfig, 'object');
if (isDemo()) throw new BoxError(BoxError.BAD_FIELD, 'Not allowed in demo mode');
const config = {
enabled: directoryServerConfig.enabled,
secret: directoryServerConfig.secret,
enabled: userDirectoryConfig.enabled,
secret: userDirectoryConfig.secret,
// if list is empty, we allow all IPs
allowlist: directoryServerConfig.allowlist || ''
allowlist: userDirectoryConfig.allowlist || ''
};
await directoryServer.validateConfig(config);
await set(exports.DIRECTORY_SERVER_KEY, JSON.stringify(config));
await directoryServer.applyConfig(config);
await userdirectory.validateConfig(config);
await set(exports.USER_DIRECTORY_KEY, JSON.stringify(config));
await userdirectory.applyConfig(config);
notifyChange(exports.DIRECTORY_SERVER_KEY, config);
notifyChange(exports.USER_DIRECTORY_KEY, config);
}
async function getRegistryConfig() {
-1
View File
@@ -22,7 +22,6 @@ function api(provider) {
case 'wasabi': return require('./storage/s3.js');
case 'scaleway-objectstorage': return require('./storage/s3.js');
case 'backblaze-b2': return require('./storage/s3.js');
case 'cloudflare-r2': return require('./storage/s3.js');
case 'linode-objectstorage': return require('./storage/s3.js');
case 'ovh-objectstorage': return require('./storage/s3.js');
case 'ionos-objectstorage': return require('./storage/s3.js');
-3
View File
@@ -10,7 +10,6 @@ exports = module.exports = {
const apps = require('./apps.js'),
assert = require('assert'),
BoxError = require('./boxerror.js'),
constants = require('./constants.js'),
debug = require('debug')('box:disks'),
df = require('@sindresorhus/df'),
docker = require('./docker.js'),
@@ -42,8 +41,6 @@ async function getAppDisks(appsDataDisk) {
const allApps = await apps.list();
for (const app of allApps) {
if (app.manifest.id === constants.PROXY_APP_APPSTORE_ID) continue;
if (!app.storageVolumeId) {
appDisks[app.id] = appsDataDisk;
} else {
+1 -1
View File
@@ -204,7 +204,7 @@ function startTask(id, options, callback) {
if (callback) callback(taskError, task ? task.result : null);
debug(`startTask: ${id} done. error:`, taskError);
debug(`startTask: ${id} done. error: ${taskError}`);
});
if (options.timeout) {
-132
View File
@@ -1,132 +0,0 @@
/* global it:false */
/* global describe:false */
/* global before:false */
/* global after:false */
'use strict';
const applinks = require('../applinks.js'),
BoxError = require('../boxerror.js'),
common = require('./common.js'),
expect = require('expect.js'),
safe = require('safetydance');
describe('Applinks', function () {
const { setup, cleanup } = common;
before(setup);
after(cleanup);
const APPLINK_0 = {
upstreamUri: 'https://cloudron.io'
};
const APPLINK_1 = {
upstreamUri: 'https://www.digitalocean.com/',
label: 'Digitalocean YaY',
accessRestriction: { users: [], groups: [] },
tags: [ 'vps', 'vservers' ],
icon: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEX/TQBcNTh/AAAAAXRSTlPM0jRW/QAAAApJREFUeJxjYgAAAAYAAzY3fKgAAAAASUVORK5CYII='
};
const APPLINK_2 = {
upstreamUri: 'https://google.com'
};
const APPLINK_3 = {
upstreamUri: 'http://example.com'
};
it('can add applink with redirect', async function () {
APPLINK_0.id = await applinks.add(JSON.parse(JSON.stringify(APPLINK_0)));
// redirect should have put www in
APPLINK_0.upstreamUri = 'https://www.cloudron.io/';
});
it('can add second applink with attributes', async function () {
APPLINK_1.id = await applinks.add(JSON.parse(JSON.stringify(APPLINK_1)));
});
it('can add third applink to test google.com favicon', async function () {
APPLINK_2.id = await applinks.add(JSON.parse(JSON.stringify(APPLINK_2)));
const result = await applinks.get(APPLINK_2.id);
expect(result.upstreamUri).to.eql('https://www.google.com/');
expect(result.icon.length).to.not.eql(0);
});
it('can add fourth applink to test no favicon', async function () {
APPLINK_3.id = await applinks.add(JSON.parse(JSON.stringify(APPLINK_3)));
const result = await applinks.get(APPLINK_3.id);
expect(result.upstreamUri).to.eql('http://example.com');
expect(result.icon).to.eql(null);
});
it('can list all without accessRestriction', async function () {
const result = await applinks.list();
expect(result.length).to.equal(4);
expect(result[1].id).to.eql(APPLINK_0.id);
expect(result[1].upstreamUri).to.eql(APPLINK_0.upstreamUri);
expect(result[2].id).to.eql(APPLINK_1.id);
expect(result[2].upstreamUri).to.eql(APPLINK_1.upstreamUri);
expect(result[2].label).to.eql(APPLINK_1.label);
expect(result[2].accessRestriction).to.eql(APPLINK_1.accessRestriction);
expect(result[2].tags).to.eql(APPLINK_1.tags);
expect(result[2].icon.toString('base64')).to.eql(APPLINK_1.icon);
});
it('cannot get applink with wrong id', async function () {
const [error] = await safe(applinks.get('doesnotexist'));
expect(error).to.be.a(BoxError);
expect(error.reason).to.eql(BoxError.NOT_FOUND);
});
it('can get applink', async function () {
const result = await applinks.get(APPLINK_0.id);
expect(result.upstreamUri).to.eql(APPLINK_0.upstreamUri);
});
it('can get second applink', async function () {
const result = await applinks.get(APPLINK_1.id);
expect(result.id).to.eql(APPLINK_1.id);
expect(result.upstreamUri).to.eql(APPLINK_1.upstreamUri);
expect(result.label).to.eql(APPLINK_1.label);
expect(result.accessRestriction).to.eql(APPLINK_1.accessRestriction);
expect(result.tags).to.eql(APPLINK_1.tags);
expect(result.icon.toString('base64')).to.eql(APPLINK_1.icon);
});
it('can update applink', async function () {
APPLINK_0.upstreamUri = 'https://duckduckgo.com';
APPLINK_0.icon = APPLINK_1.icon;
await applinks.update(APPLINK_0.id, JSON.parse(JSON.stringify(APPLINK_0)));
const result = await applinks.get(APPLINK_0.id);
expect(result.upstreamUri).to.equal('https://duckduckgo.com');
expect(result.icon.toString('base64')).to.eql(APPLINK_1.icon);
});
it('can get applink icon', async function () {
const result = await applinks.getIcon(APPLINK_0.id);
expect(result.toString('base64')).to.eql(APPLINK_1.icon);
});
it('cannot remove applink with wrong id', async function () {
const [error] = await safe(applinks.remove('doesnotexist'));
expect(error).to.be.a(BoxError);
expect(error.reason).to.eql(BoxError.NOT_FOUND);
});
it('can remove applink', async function () {
await applinks.remove(APPLINK_0.id);
const [error] = await safe(applinks.get(APPLINK_0.id));
expect(error).to.be.a(BoxError);
expect(error.reason).to.eql(BoxError.NOT_FOUND);
});
});
+1 -1
View File
@@ -46,7 +46,7 @@ describe('Apps', function () {
port1: 4033,
port2: 3242,
port3: 1234
}, { tcpPorts: { port1: {}, port2: {}, port3: {} } })).to.be(null);
}, { tcpPorts: { port1: null, port2: null, port3: null } })).to.be(null);
});
});
+2 -3
View File
@@ -23,15 +23,14 @@ describe('janitor', function () {
clientId: 'clientid-1',
expires: Number.MAX_SAFE_INTEGER,
lastUsedTime: null,
scope: { '*': 'rw' }
scope: 'unused'
};
const token2 = {
name: 'token2',
identifier: '2',
clientId: 'clientid-2',
expires: Date.now(),
lastUsedTime: null,
scope: null //{ '*': 'rw '}
lastUsedTime: null
};
it('can cleanupTokens', async function () {
+2 -11
View File
@@ -67,14 +67,12 @@ describe('Ldap', function () {
const mailbox = `support@${domain.domain}`;
const mailAliasName = 'alsosupport';
const mailAlias = `alsosupport@${domain.domain}`;
const mailAliasWildcardName = 'help';
const mailAliasWildcard = `helpmeplz@${domain.domain}`;
before(function (done) {
async.series([
setup,
async () => await mail.addMailbox(mailboxName, domain.domain, { ownerId: user.id, ownerType: mail.OWNERTYPE_USER, active: true, storageQuota: 0, messagesQuota: 0 }, auditSource),
async () => await mail.setAliases(mailboxName, domain.domain, [ { name: mailAliasName, domain: domain.domain}, { name: mailAliasWildcardName + '*', domain: domain.domain } ], auditSource),
async () => await mail.addMailbox(mailboxName, domain.domain, { ownerId: user.id, ownerType: mail.OWNERTYPE_USER, active: true }, auditSource),
async () => await mail.setAliases(mailboxName, domain.domain, [ { name: mailAliasName, domain: domain.domain} ], auditSource),
ldapServer.start.bind(null),
async () => {
group = await groups.add({ name: 'ldap-test-1' });
@@ -337,13 +335,6 @@ describe('Ldap', function () {
expect(entries[0].rfc822MailMember).to.equal(mailbox);
});
it('get alias matching wildcard', async function () {
const entries = await ldapSearch(`cn=${mailAliasWildcard},ou=mailaliases,dc=cloudron`, 'objectclass=nismailalias');
expect(entries.length).to.equal(1);
expect(entries[0].cn).to.equal(mailAliasWildcard);
expect(entries[0].rfc822MailMember).to.equal(mailbox);
});
it('cannot get mailbox as alias', async function () {
const [error] = await safe(ldapSearch(`cn=${mailbox},ou=mailaliases,dc=cloudron`, 'objectclass=nismailalias'));
expect(error).to.be.a(ldap.NoSuchObjectError);
+6 -34
View File
@@ -40,16 +40,11 @@ describe('Mail', function () {
expect(mailConfig.mailFromValidation).to.be(false);
});
it('cannot set invalid catch all address', async function () {
const [error] = await safe(mail.setCatchAllAddress(domain.domain, [ 'user1' ]));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('can set invalid catch all address', async function () {
await mail.setCatchAllAddress(domain.domain, [ `user1@${domain.domain}`, `user2@${domain.domain}` ]);
it('can set catch all address', async function () {
await mail.setCatchAllAddress(domain.domain, [ 'user1', 'user2' ]);
const mailConfig = await mail.getDomain(domain.domain);
expect(mailConfig.catchAll).to.eql([ `user1@${domain.domain}`, `user2@${domain.domain}` ]);
expect(mailConfig.catchAll).to.eql([ 'user1', 'user2' ]);
});
it('can set mail relay', async function () {
@@ -119,16 +114,16 @@ describe('Mail', function () {
describe('mailboxes', function () {
it('add user mailbox succeeds', async function () {
await mail.addMailbox('girish', domain.domain, { ownerId: 'uid-0', ownerType: mail.OWNERTYPE_USER, active: true, storageQuota: 0, messagesQuota: 0 }, auditSource);
await mail.addMailbox('girish', domain.domain, { ownerId: 'uid-0', ownerType: mail.OWNERTYPE_USER, active: true }, auditSource);
});
it('cannot add dup entry', async function () {
const [error] = await safe(mail.addMailbox('girish', domain.domain, { ownerId: 'uid-1', ownerType: mail.OWNERTYPE_GROUP, active: true, storageQuota: 0, messagesQuota: 0 }, auditSource));
const [error] = await safe(mail.addMailbox('girish', domain.domain, { ownerId: 'uid-1', ownerType: mail.OWNERTYPE_GROUP, active: true }, auditSource));
expect(error.reason).to.be(BoxError.ALREADY_EXISTS);
});
it('add app mailbox succeeds', async function () {
await mail.addMailbox('support', domain.domain, { ownerId: 'osticket', ownerType: 'user', active: true, storageQuota: 10, messagesQuota: 20}, auditSource);
await mail.addMailbox('support', domain.domain, { ownerId: 'osticket', ownerType: 'user', active: true}, auditSource);
});
it('get succeeds', async function () {
@@ -137,8 +132,6 @@ describe('Mail', function () {
expect(mailbox.ownerId).to.equal('osticket');
expect(mailbox.domain).to.equal(domain.domain);
expect(mailbox.creationTime).to.be.a(Date);
expect(mailbox.storageQuota).to.be(10);
expect(mailbox.messagesQuota).to.be(20);
});
it('get non-existent mailbox', async function () {
@@ -146,14 +139,6 @@ describe('Mail', function () {
expect(mailbox).to.be(null);
});
it('update app mailbox succeeds', async function () {
await mail.updateMailbox('support', domain.domain, { ownerId: 'osticket', ownerType: 'user', active: true, storageQuota: 20, messagesQuota: 30}, auditSource);
const mailbox = await mail.getMailbox('support', domain.domain);
expect(mailbox.storageQuota).to.be(20);
expect(mailbox.messagesQuota).to.be(30);
});
it('list mailboxes succeeds', async function () {
const mailboxes = await mail.listMailboxes(domain.domain, null /* search */, 1, 10);
expect(mailboxes.length).to.be(2);
@@ -188,19 +173,6 @@ describe('Mail', function () {
expect(results[1].domain).to.be(domain.domain);
});
it('can set wildcard alias', async function () {
await mail.setAliases('support', domain.domain, [ { name: 'support*', domain: domain.domain }, { name: 'help', domain: domain.domain } ], auditSource);
});
it('can get aliases of name', async function () {
const results = await mail.getAliases('support', domain.domain);
expect(results.length).to.be(2);
expect(results[0].name).to.be('help');
expect(results[0].domain).to.be(domain.domain);
expect(results[1].name).to.be('support*');
expect(results[1].domain).to.be(domain.domain);
});
it('unset aliases', async function () {
await mail.setAliases('support', domain.domain, [], auditSource);
+4 -4
View File
@@ -147,8 +147,8 @@ describe('Reverse Proxy', function () {
});
it('returns prod acme in prod cloudron', async function () {
const { acme2, apiOptions } = await reverseProxy._getAcmeApi(domainCopy);
expect(acme2._name).to.be('acme');
const { acmeApi, apiOptions } = await reverseProxy._getAcmeApi(domainCopy);
expect(acmeApi._name).to.be('acme');
expect(apiOptions.prod).to.be(true);
});
});
@@ -161,8 +161,8 @@ describe('Reverse Proxy', function () {
});
it('returns staging acme in prod cloudron', async function () {
const { acme2, apiOptions } = await reverseProxy._getAcmeApi(domainCopy);
expect(acme2._name).to.be('acme');
const { acmeApi, apiOptions } = await reverseProxy._getAcmeApi(domainCopy);
expect(acmeApi._name).to.be('acme');
expect(apiOptions.prod).to.be(false);
});
});
+1 -1
View File
@@ -73,7 +73,7 @@ describe('Settings', function () {
});
it('can set default profile config', async function () {
await tokens.add({ name: 'token1', identifier: admin.id, clientId: tokens.ID_WEBADMIN, expires: Number.MAX_SAFE_INTEGER, lastUsedTime: null });
await tokens.add({ name: 'token1', identifier: admin.id, clientId: tokens.ID_WEBADMIN, expires: Number.MAX_SAFE_INTEGER, lastUsedTime: null, scope: 'unused' });
let result = await tokens.listByUserId(admin.id);
expect(result.length).to.be(1); // just confirm the token was really added!
+3 -24
View File
@@ -26,7 +26,7 @@ describe('Tokens', function () {
clientId: 'clientid-0',
expires: Date.now() + 60 * 60000,
lastUsedTime: null,
scope: { '*': 'rw' }
scope: 'unused'
};
it('add succeeds', async function () {
@@ -42,27 +42,6 @@ describe('Tokens', function () {
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('add fails with unknown scope', async function () {
const badToken = Object.assign({}, TOKEN_0);
badToken.scope = { 'foobar': 'rw', '*': 'r' };
const [error] = await safe(tokens.add(badToken));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('add fails with invalid scope rule', async function () {
const badToken = Object.assign({}, TOKEN_0);
badToken.scope = { '*': 'rw ' };
const [error] = await safe(tokens.add(badToken));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('add fails with bad name', async function () {
const badToken = Object.assign({}, TOKEN_0);
badToken.name = new Array(100).fill('x').join('');
const [error] = await safe(tokens.add(badToken));
expect(error.reason).to.be(BoxError.BAD_FIELD);
});
it('get succeeds', async function () {
const result = await tokens.get(TOKEN_0.id);
expect(result).to.be.eql(TOKEN_0);
@@ -114,7 +93,7 @@ describe('Tokens', function () {
clientId: 'clientid-1',
expires: Number.MAX_SAFE_INTEGER,
lastUsedTime: null,
scope: { '*': 'rw' }
scope: 'unused'
};
const token2 = {
name: 'token2',
@@ -148,7 +127,7 @@ describe('Tokens', function () {
clientId: tokens.ID_WEBADMIN,
expires: Number.MAX_SAFE_INTEGER,
lastUsedTime: null,
scope: { '*': 'rw' }
scope: 'unused'
};
const token2 = {
name: 'token2',
@@ -9,12 +9,12 @@
const async = require('async'),
common = require('./common.js'),
constants = require('../constants.js'),
directoryServer = require('../directoryserver.js'),
expect = require('expect.js'),
groups = require('../groups.js'),
ldap = require('ldapjs'),
safe = require('safetydance'),
settings = require('../settings.js');
settings = require('../settings.js'),
userdirectory = require('../userdirectory.js');
async function ldapBind(dn, password) {
return new Promise((resolve, reject) => {
@@ -80,8 +80,8 @@ describe('User Directory Ldap', function () {
before(function (done) {
async.series([
setup,
directoryServer.start.bind(null),
settings.setDirectoryServerConfig.bind(null, { enabled: true, secret: auth.secret, allowlist: '127.0.0.1' }),
userdirectory.start.bind(null),
settings.setUserDirectoryConfig.bind(null, { enabled: true, secret: auth.secret, allowlist: '127.0.0.1' }),
async () => {
group = await groups.add({ name: 'ldap-test-1' });
await groups.setMembers(group.id, [ admin.id, user.id ]);
@@ -95,7 +95,7 @@ describe('User Directory Ldap', function () {
after(function (done) {
async.series([
directoryServer.stop,
userdirectory.stop,
cleanup
], done);
});
+8 -56
View File
@@ -15,33 +15,19 @@ exports = module.exports = {
validateTokenType,
hasScope,
// token client ids. we categorize them so we can have different restrictions based on the client
ID_WEBADMIN: 'cid-webadmin', // dashboard
ID_SDK: 'cid-sdk', // created by user via dashboard
SCOPES: ['*']//, 'apps', 'domains'],
};
const TOKENS_FIELDS = [ 'id', 'accessToken', 'identifier', 'clientId', 'scopeJson', 'expires', 'name', 'lastUsedTime' ].join(',');
const TOKENS_FIELDS = [ 'id', 'accessToken', 'identifier', 'clientId', 'scope', 'expires', 'name', 'lastUsedTime' ].join(',');
const assert = require('assert'),
BoxError = require('./boxerror.js'),
database = require('./database.js'),
hat = require('./hat.js'),
safe = require('safetydance'),
uuid = require('uuid');
function postProcess(result) {
assert.strictEqual(typeof result, 'object');
result.scope = safe.JSON.parse(result.scopeJson) || {};
delete result.scopeJson;
return result;
}
function validateTokenName(name) {
assert.strictEqual(typeof name, 'string');
@@ -54,55 +40,24 @@ function validateTokenType(type) {
assert.strictEqual(typeof type, 'string');
const types = [ exports.ID_WEBADMIN, exports.ID_SDK ];
if (types.indexOf(type) === -1) return new BoxError(BoxError.BAD_FIELD, `type must be one of ${types.join(',')}`);
if (types.indexOf(type) === -1) return BoxError(BoxError.BAD_FIELD, `type must be one of ${types.join(',')}`);
return null;
}
function validateScope(scope) {
assert.strictEqual(typeof scope, 'object');
for (const key in scope) {
if (exports.SCOPES.indexOf(key) === -1) return new BoxError(BoxError.BAD_FIELD, `Unkown token scope ${key}. Valid scopes are ${exports.SCOPES.join(',')}`);
if (scope[key] !== 'rw' && scope[key] !== 'r') return new BoxError(BoxError.BAD_FIELD, `Unkown token scope value ${scope[key]} for ${key}. Valid values are 'rw' or 'r'`);
}
return null;
}
function hasScope(token, method, path) {
assert.strictEqual(typeof token, 'object');
assert.strictEqual(typeof method, 'string');
assert.strictEqual(typeof path, 'string');
// TODO path checking in the future
const matchAll = token.scope['*'];
if (matchAll === 'rw') {
return true;
} else if (matchAll === 'r' && (method === 'GET' || method === 'HEAD' || method === 'OPTIONS')) {
return true;
} else {
return false;
}
}
async function add(token) {
assert.strictEqual(typeof token, 'object');
const { clientId, identifier, expires } = token;
const name = token.name || '';
const scope = token.scope || { '*': 'rw' };
let error = validateTokenName(name);
if (error) throw error;
error = validateScope(scope);
const error = validateTokenName(name);
if (error) throw error;
const id = 'tid-' + uuid.v4();
const accessToken = hat(8 * 32);
const scope = 'unused';
await database.query('INSERT INTO tokens (id, accessToken, identifier, clientId, expires, scopeJson, name) VALUES (?, ?, ?, ?, ?, ?, ?)', [ id, accessToken, identifier, clientId, expires, JSON.stringify(scope), name ]);
await database.query('INSERT INTO tokens (id, accessToken, identifier, clientId, expires, scope, name) VALUES (?, ?, ?, ?, ?, ?, ?)', [ id, accessToken, identifier, clientId, expires, scope, name ]);
return { id, accessToken, scope, clientId, identifier, expires, name };
}
@@ -113,7 +68,7 @@ async function get(id) {
const result = await database.query(`SELECT ${TOKENS_FIELDS} FROM tokens WHERE id = ?`, [ id ]);
if (result.length === 0) return null;
return postProcess(result[0]);
return result[0];
}
async function del(id) {
@@ -126,10 +81,7 @@ async function del(id) {
async function listByUserId(userId) {
assert.strictEqual(typeof userId, 'string');
const results = await database.query(`SELECT ${TOKENS_FIELDS} FROM tokens WHERE identifier = ?`, [ userId ]);
results.forEach(postProcess);
return results;
return await database.query(`SELECT ${TOKENS_FIELDS} FROM tokens WHERE identifier = ?`, [ userId ]);
}
async function getByAccessToken(accessToken) {
@@ -137,7 +89,7 @@ async function getByAccessToken(accessToken) {
const result = await database.query('SELECT ' + TOKENS_FIELDS + ' FROM tokens WHERE accessToken = ? AND expires > ?', [ accessToken, Date.now() ]);
if (result.length === 0) return null;
return postProcess(result[0]);
return result[0];
}
async function delByAccessToken(accessToken) {
+28 -41
View File
@@ -11,7 +11,7 @@ exports = module.exports = {
const assert = require('assert'),
BoxError = require('./boxerror.js'),
constants = require('./constants.js'),
debug = require('debug')('box:directoryserver'),
debug = require('debug')('box:userdirectory'),
dns = require('./dns.js'),
domains = require('./domains.js'),
eventlog = require('./eventlog.js'),
@@ -23,7 +23,6 @@ const assert = require('assert'),
reverseproxy = require('./reverseproxy.js'),
safe = require('safetydance'),
settings = require('./settings.js'),
speakeasy = require('speakeasy'),
shell = require('./shell.js'),
users = require('./users.js'),
util = require('util'),
@@ -33,6 +32,8 @@ let gServer = null;
const NOOP = function () {};
const GROUP_USERS_DN = 'cn=users,ou=groups,dc=cloudron';
const GROUP_ADMINS_DN = 'cn=admins,ou=groups,dc=cloudron';
const SET_LDAP_ALLOWLIST_CMD = path.join(__dirname, 'scripts/setldapallowlist.sh');
async function validateConfig(config) {
@@ -67,8 +68,6 @@ async function applyConfig(config) {
const [error] = await safe(shell.promises.sudo('setLdapAllowlist', [ SET_LDAP_ALLOWLIST_CMD ], {}));
if (error) throw new BoxError(BoxError.IPTABLES_ERROR, `Error setting ldap allowlist: ${error.message}`);
if (config.enabled) await start(); else await stop();
}
// helper function to deal with pagination
@@ -146,20 +145,20 @@ async function authorize(req, res, next) {
async function userSearch(req, res, next) {
debug('user search: dn %s, scope %s, filter %s (from %s)', req.dn.toString(), req.scope, req.filter.toString(), req.connection.ldap.id);
const [error, allUsers] = await safe(users.list());
const [error, result] = await safe(users.list());
if (error) return next(new ldap.OperationsError(error.toString()));
const [groupsError, allGroups] = await safe(groups.listWithMembers());
if (groupsError) return next(new ldap.OperationsError(error.toString()));
let results = [];
// send user objects
for (const user of allUsers) {
result.forEach(function (user) {
// skip entries with empty username. Some apps like owncloud can't deal with this
if (!user.username) continue;
if (!user.username) return;
const dn = ldap.parseDN(`cn=${user.id},ou=users,dc=cloudron`);
const dn = ldap.parseDN('cn=' + user.id + ',ou=users,dc=cloudron');
const memberof = [ GROUP_USERS_DN ];
if (users.compareRoles(user.role, users.ROLE_ADMIN) >= 0) memberof.push(GROUP_ADMINS_DN);
const displayName = user.displayName || user.username || ''; // displayName can be empty and username can be null
const nameParts = displayName.split(' ');
@@ -180,12 +179,10 @@ async function userSearch(req, res, next) {
givenName: firstName,
username: user.username,
samaccountname: user.username, // to support ActiveDirectory clients
memberof: allGroups.filter(function (g) { return g.userIds.indexOf(user.id) !== -1; }).map(function (g) { return g.name; })
memberof: memberof
}
};
if (user.twoFactorAuthenticationEnabled) obj.attributes.twoFactorAuthenticationEnabled = true;
// http://www.zytrax.com/books/ldap/ape/core-schema.html#sn has 'name' as SUP which is a DirectoryString
// which is required to have atleast one character if present
if (lastName.length !== 0) obj.attributes.sn = lastName;
@@ -197,7 +194,7 @@ async function userSearch(req, res, next) {
if ((req.dn.equals(dn) || req.dn.parentOf(dn)) && lowerCaseFilter.matches(obj.attributes)) {
results.push(obj);
}
}
});
finalSend(results, req, res, next);
}
@@ -205,24 +202,23 @@ async function userSearch(req, res, next) {
async function groupSearch(req, res, next) {
debug('group search: dn %s, scope %s, filter %s (from %s)', req.dn.toString(), req.scope, req.filter.toString(), req.connection.ldap.id);
const [error, allUsers] = await safe(users.list());
const [error, result] = await safe(users.list());
if (error) return next(new ldap.OperationsError(error.toString()));
const results = [];
let [errorGroups, allGroups] = await safe(groups.listWithMembers());
let [errorGroups, resultGroups] = await safe(groups.listWithMembers());
if (errorGroups) return next(new ldap.OperationsError(errorGroups.toString()));
for (const group of allGroups) {
const dn = ldap.parseDN(`cn=${group.name},ou=groups,dc=cloudron`);
const members = group.userIds.filter(function (uid) { return allUsers.map(function (u) { return u.id; }).indexOf(uid) !== -1; });
resultGroups.forEach(function (group) {
const dn = ldap.parseDN('cn=' + group.name + ',ou=groups,dc=cloudron');
const members = group.userIds.filter(function (uid) { return result.map(function (u) { return u.id; }).indexOf(uid) !== -1; });
const obj = {
dn: dn.toString(),
attributes: {
objectclass: ['group'],
cn: group.name,
gidnumber: group.id,
memberuid: members
}
};
@@ -234,7 +230,7 @@ async function groupSearch(req, res, next) {
if ((req.dn.equals(dn) || req.dn.parentOf(dn)) && lowerCaseFilter.matches(obj.attributes)) {
results.push(obj);
}
}
});
finalSend(results, req, res, next);
}
@@ -242,15 +238,12 @@ async function groupSearch(req, res, next) {
// Will attach req.user if successful
async function userAuth(req, res, next) {
// extract the common name which might have different attribute names
const cnAttributeName = Object.keys(req.dn.rdns[0].attrs)[0];
const commonName = req.dn.rdns[0].attrs[cnAttributeName].value;
const attributeName = Object.keys(req.dn.rdns[0].attrs)[0];
const commonName = req.dn.rdns[0].attrs[attributeName].value;
if (!commonName) return next(new ldap.NoSuchObjectError(req.dn.toString()));
const TOTPTOKEN_ATTRIBUTE_NAME = 'totptoken'; // This has to be in-sync with externalldap.js
const totpToken = req.dn.rdns[0].attrs[TOTPTOKEN_ATTRIBUTE_NAME] ? req.dn.rdns[0].attrs[TOTPTOKEN_ATTRIBUTE_NAME].value : null;
let verifyFunc;
if (cnAttributeName === 'mail') {
if (attributeName === 'mail') {
verifyFunc = users.verifyWithEmail;
} else if (commonName.indexOf('@') !== -1) { // if mail is specified, enforce mail check
verifyFunc = users.verifyWithEmail;
@@ -265,12 +258,6 @@ async function userAuth(req, res, next) {
if (error && error.reason === BoxError.INVALID_CREDENTIALS) return next(new ldap.InvalidCredentialsError(req.dn.toString()));
if (error) return next(new ldap.OperationsError(error.message));
// currently this is only optional if totpToken is provided and user has 2fa enabled
if (totpToken && user.twoFactorAuthenticationEnabled) {
const verified = speakeasy.totp.verify({ secret: user.twoFactorAuthenticationSecret, encoding: 'base32', token: totpToken, window: 2 });
if (!verified) return next(new ldap.InvalidCredentialsError(req.dn.toString()));
}
req.user = user;
next();
@@ -290,12 +277,12 @@ async function start() {
};
const domainObject = await domains.get(settings.dashboardDomain());
const dashboardFqdn = dns.fqdn(constants.DASHBOARD_SUBDOMAIN, domainObject);
const certificatePath = await reverseproxy.getCertificatePath(dashboardFqdn, domainObject.domain);
const dashboardFqdn = dns.fqdn(constants.DASHBOARD_LOCATION, domainObject);
const bundle = await reverseproxy.getCertificatePath(dashboardFqdn, domainObject.domain);
gServer = ldap.createServer({
certificate: fs.readFileSync(certificatePath.certFilePath, 'utf8'),
key: fs.readFileSync(certificatePath.keyFilePath, 'utf8'),
certificate: fs.readFileSync(bundle.certFilePath, 'utf8'),
key: fs.readFileSync(bundle.keyFilePath, 'utf8'),
log: logger
});
@@ -306,12 +293,12 @@ async function start() {
gServer.bind('ou=system,dc=cloudron', async function(req, res, next) {
debug('system bind: %s (from %s)', req.dn.toString(), req.connection.ldap.id);
const tmp = await settings.getDirectoryServerConfig();
const tmp = await settings.getUserDirectoryConfig();
if (!req.dn.equals(constants.USER_DIRECTORY_LDAP_DN)) return next(new ldap.InvalidCredentialsError(req.dn.toString()));
if (req.credentials !== tmp.secret) return next(new ldap.InvalidCredentialsError(req.dn.toString()));
req.user = { user: 'directoryServerAdmin' };
req.user = { user: 'userDirectoryAdmin' };
res.end();
@@ -324,7 +311,7 @@ async function start() {
gServer.bind('ou=users,dc=cloudron', userAuth, async function (req, res) {
assert.strictEqual(typeof req.user, 'object');
await eventlog.upsertLoginEvent(req.user.ghost ? eventlog.ACTION_USER_LOGIN_GHOST : eventlog.ACTION_USER_LOGIN, { authType: 'directoryserver', id: req.connection.ldap.id }, { userId: req.user.id, user: users.removePrivateFields(req.user) });
await eventlog.upsertLoginEvent(eventlog.ACTION_USER_LOGIN, { authType: 'userdirectory', id: req.connection.ldap.id }, { userId: req.user.id, user: users.removePrivateFields(req.user) });
res.end();
});
+1 -3
View File
@@ -356,9 +356,7 @@ async function verify(userId, password, identifier) {
}
if (user.source === 'ldap') {
const ldapUser = await externalLdap.verifyPassword(user, password);
// currently we store twoFactorAuthenticationEnabled in the db as local so amend it to user object
user.twoFactorAuthenticationEnabled = !!ldapUser.twoFactorAuthenticationEnabled;
await externalLdap.verifyPassword(user, password);
} else {
const saltBinary = Buffer.from(user.salt, 'hex');
const [error, derivedKey] = await safe(pbkdf2Async(password, saltBinary, CRYPTO_ITERATIONS, CRYPTO_KEY_LENGTH, CRYPTO_DIGEST));