Compare commits

..

1 Commits

Author SHA1 Message Date
Girish Ramakrishnan
2e76b8bed9 Do not restart stopped apps 2020-05-26 07:48:16 -07:00
88 changed files with 1585 additions and 2853 deletions

93
CHANGES
View File

@@ -1935,8 +1935,6 @@
* stopping an app will stop dependent services
* Add new wasabi s3 storage region us-east-2
* mail: Fix bug where SRS translation was done on the main domain instead of mailing list domain
* backups: add retention policy
* Drop `NET_RAW` caps from container preventing sniffing of network traffic
[5.2.1]
* Fix app disk graphs
@@ -1954,94 +1952,3 @@
[5.2.3]
* Do not restart stopped apps
[5.2.4]
* mail: enable/disable incoming mail was showing an error
* Do not trigger backup of stopped apps. Instead, we will just retain it's existing backups
based on retention policy
* remove broken disk graphs
* fix OVH backups
[5.3.0]
* better nginx config for higher loads
* backups: add CIFS storage provider
* backups: add SSHFS storage provider
* backups: add NFS storage provider
* s3: use vhost style
* Fix crash when redis config was set
* Update schedule was unselected in the UI
* cloudron-setup: --provider is now optional
* show warning for unstable updates
* add forumUrl to app manifest
* postgresql: add unaccent extension for peertube
* mail: Add Auto-Submitted header to NDRs
* backups: ensure that the latest backup of installed apps is always preserved
* add nginx logs
* mail: make authentication case insensitive
* Fix timeout issues in postgresql and mysql addon
* Do not count stopped apps for memory use
* LDAP group synchronization
[5.3.1]
* better nginx config for higher loads
* backups: add CIFS storage provider
* backups: add SSHFS storage provider
* backups: add NFS storage provider
* s3: use vhost style
* Fix crash when redis config was set
* Update schedule was unselected in the UI
* cloudron-setup: --provider is now optional
* show warning for unstable updates
* add forumUrl to app manifest
* postgresql: add unaccent extension for peertube
* mail: Add Auto-Submitted header to NDRs
* backups: ensure that the latest backup of installed apps is always preserved
* add nginx logs
* mail: make authentication case insensitive
* Fix timeout issues in postgresql and mysql addon
* Do not count stopped apps for memory use
* LDAP group synchronization
[5.3.2]
* Do not install sshfs package
* 'provider' is not required anymore in various API calls
* redis: Set maxmemory and maxmemory-policy
* Add mlock capability to manifest (for vault app)
[5.3.3]
* Fix issue where some postinstall messages where causing angular to infinite loop
[5.3.4]
* Fix issue in database error handling
[5.4.0]
* Update nginx to 1.18 for various security fixes
* Add ping capability (for statping app)
* Fix bug where aliases were displayed incorrectly in SOGo
* Add univention as LDAP provider
* Bump max_connection for postgres addon to 200
* mail: Add pagination to mailing list API
* Allow admin to lock email and display name of users
* Allow admin to ensure all users have 2FA setup
* ami: fix regression where we didn't send provider as part of get status call
* nginx: hide version
* backups: add b2 provider
* Add filemanager webinterface
* Add darkmode
* Add note that password reset and invite links expire in 24 hours
[5.4.1]
* Update nginx to 1.18 for various security fixes
* Add ping capability (for statping app)
* Fix bug where aliases were displayed incorrectly in SOGo
* Add univention as LDAP provider
* Bump max_connection for postgres addon to 200
* mail: Add pagination to mailing list API
* Allow admin to lock email and display name of users
* Allow admin to ensure all users have 2FA setup
* ami: fix regression where we didn't send provider as part of get status call
* nginx: hide version
* backups: add b2 provider
* Add filemanager webinterface
* Add darkmode
* Add note that password reset and invite links expire in 24 hours

View File

@@ -4,7 +4,8 @@ set -euv -o pipefail
readonly SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
readonly arg_infraversionpath="${SOURCE_DIR}/../src"
readonly arg_provider="${1:-generic}"
readonly arg_infraversionpath="${SOURCE_DIR}/${2:-}"
function die {
echo $1
@@ -13,9 +14,6 @@ function die {
export DEBIAN_FRONTEND=noninteractive
readonly ubuntu_codename=$(lsb_release -cs)
readonly ubuntu_version=$(lsb_release -rs)
# hold grub since updating it breaks on some VPS providers. also, dist-upgrade will trigger it
apt-mark hold grub* >/dev/null
apt-get -o Dpkg::Options::="--force-confdef" update -y
@@ -29,6 +27,8 @@ debconf-set-selections <<< 'mysql-server mysql-server/root_password_again passwo
# this enables automatic security upgrades (https://help.ubuntu.com/community/AutomaticSecurityUpdates)
# resolvconf is needed for unbound to work property after disabling systemd-resolved in 18.04
ubuntu_version=$(lsb_release -rs)
ubuntu_codename=$(lsb_release -cs)
gpg_package=$([[ "${ubuntu_version}" == "16.04" ]] && echo "gnupg" || echo "gpg")
apt-get -y install \
acl \
@@ -53,11 +53,16 @@ apt-get -y install \
unbound \
xfsprogs
echo "==> installing nginx for xenial for TLSv3 support"
curl -sL http://nginx.org/packages/ubuntu/pool/nginx/n/nginx/nginx_1.18.0-1~${ubuntu_codename}_amd64.deb -o /tmp/nginx.deb
# apt install with install deps (as opposed to dpkg -i)
apt install -y /tmp/nginx.deb
rm /tmp/nginx.deb
if [[ "${ubuntu_version}" == "16.04" ]]; then
echo "==> installing nginx for xenial for TLSv3 support"
curl -sL http://nginx.org/packages/ubuntu/pool/nginx/n/nginx/nginx_1.14.0-1~xenial_amd64.deb -o /tmp/nginx.deb
# apt install with install deps (as opposed to dpkg -i)
apt install -y /tmp/nginx.deb
rm /tmp/nginx.deb
else
apt install -y nginx-full
fi
# on some providers like scaleway the sudo file is changed and we want to keep the old one
apt-get -o Dpkg::Options::="--force-confold" install -y sudo
@@ -68,7 +73,7 @@ cp /usr/share/unattended-upgrades/20auto-upgrades /etc/apt/apt.conf.d/20auto-upg
echo "==> Installing node.js"
mkdir -p /usr/local/node-10.18.1
curl -sL https://nodejs.org/dist/v10.18.1/node-v10.18.1-linux-x64.tar.gz | tar zxf - --strip-components=1 -C /usr/local/node-10.18.1
curl -sL https://nodejs.org/dist/v10.18.1/node-v10.18.1-linux-x64.tar.gz | tar zxvf - --strip-components=1 -C /usr/local/node-10.18.1
ln -sf /usr/local/node-10.18.1/bin/node /usr/bin/node
ln -sf /usr/local/node-10.18.1/bin/npm /usr/bin/npm
apt-get install -y python # Install python which is required for npm rebuild

View File

@@ -12,6 +12,8 @@ exports.up = function(db, callback) {
db.all('SELECT * FROM users WHERE admin=1', function (error, results) {
if (error) return done(error);
console.dir(results);
async.eachSeries(results, function (r, next) {
db.runSql('INSERT INTO groupMembers (groupId, userId) VALUES (?, ?)', [ ADMIN_GROUP_ID, r.id ], next);
}, done);

View File

@@ -1,18 +0,0 @@
'use strict';
exports.up = function(db, callback) {
db.all('SELECT value FROM settings WHERE name="backup_config"', function (error, results) {
if (error || results.length === 0) return callback(error);
var backupConfig = JSON.parse(results[0].value);
if (backupConfig.provider !== 'minio' && backupConfig.provider !== 's3-v4-compat') return callback();
backupConfig.s3ForcePathStyle = true; // usually minio is self-hosted. s3 v4 compat, we don't know
db.runSql('UPDATE settings SET value=? WHERE name="backup_config"', [ JSON.stringify(backupConfig) ], callback);
});
};
exports.down = function(db, callback) {
callback();
};

View File

@@ -1,17 +0,0 @@
'use strict';
var async = require('async');
exports.up = function(db, callback) {
// http://stackoverflow.com/questions/386294/what-is-the-maximum-length-of-a-valid-email-address
async.series([
db.runSql.bind(db, 'ALTER TABLE appPasswords DROP INDEX name'),
db.runSql.bind(db, 'ALTER TABLE appPasswords ADD CONSTRAINT appPasswords_name_userId_identifier UNIQUE (name, userId, identifier)'),
], callback);
};
exports.down = function(db, callback) {
callback();
};

View File

@@ -1,17 +0,0 @@
'use strict';
exports.up = function(db, callback) {
db.runSql('ALTER TABLE userGroups ADD COLUMN source VARCHAR(128) DEFAULT ""', function (error) {
if (error) return callback(error);
callback();
});
};
exports.down = function(db, callback) {
db.runSql('ALTER TABLE userGroups DROP COLUMN source', function (error) {
if (error) console.error(error);
callback(error);
});
};

View File

@@ -1,38 +0,0 @@
'use strict';
const async = require('async');
exports.up = function(db, callback) {
db.runSql('ALTER TABLE backups ADD COLUMN identifier VARCHAR(128)', function (error) {
if (error) return callback(error);
db.all('SELECT * FROM backups', function (error, backups) {
if (error) return callback(error);
async.eachSeries(backups, function (backup, next) {
let identifier = 'unknown';
if (backup.type === 'box') {
identifier = 'box';
} else {
const match = backup.id.match(/app_(.+?)_.+/);
if (match) identifier = match[1];
}
db.runSql('UPDATE backups SET identifier=? WHERE id=?', [ identifier, backup.id ], next);
}, function (error) {
if (error) return callback(error);
db.runSql('ALTER TABLE backups MODIFY COLUMN identifier VARCHAR(128) NOT NULL', callback);
});
});
});
};
exports.down = function(db, callback) {
db.runSql('ALTER TABLE backups DROP COLUMN identifier', function (error) {
if (error) console.error(error);
callback(error);
});
};

View File

@@ -1,16 +0,0 @@
'use strict';
exports.up = function(db, callback) {
db.runSql('ALTER TABLE users ADD COLUMN ts TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP', function (error) {
if (error) console.error(error);
db.runSql('ALTER TABLE users DROP COLUMN modifiedAt', callback);
});
};
exports.down = function(db, callback) {
db.runSql('ALTER TABLE users DROP COLUMN ts', function (error) {
if (error) console.error(error);
callback(error);
});
};

View File

@@ -21,7 +21,7 @@ CREATE TABLE IF NOT EXISTS users(
password VARCHAR(1024) NOT NULL,
salt VARCHAR(512) NOT NULL,
createdAt VARCHAR(512) NOT NULL,
ts TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
modifiedAt VARCHAR(512) NOT NULL,
displayName VARCHAR(512) DEFAULT "",
fallbackEmail VARCHAR(512) DEFAULT "",
twoFactorAuthenticationSecret VARCHAR(128) DEFAULT "",
@@ -37,7 +37,6 @@ CREATE TABLE IF NOT EXISTS users(
CREATE TABLE IF NOT EXISTS userGroups(
id VARCHAR(128) NOT NULL UNIQUE,
name VARCHAR(254) NOT NULL UNIQUE,
source VARCHAR(128) DEFAULT "",
PRIMARY KEY(id));
CREATE TABLE IF NOT EXISTS groupMembers(
@@ -88,7 +87,6 @@ CREATE TABLE IF NOT EXISTS apps(
taskId INTEGER, // current task
errorJson TEXT,
bindsJson TEXT, // bind mounts
servicesConfigJson TEXT, // app services configuration
FOREIGN KEY(mailboxDomain) REFERENCES domains(domain),
FOREIGN KEY(taskId) REFERENCES tasks(id),
@@ -126,7 +124,6 @@ CREATE TABLE IF NOT EXISTS backups(
packageVersion VARCHAR(128) NOT NULL, /* app version or box version */
encryptionVersion INTEGER, /* when null, unencrypted backup */
type VARCHAR(16) NOT NULL, /* 'box' or 'app' */
identifier VARCHAR(128) NOT NULL, /* 'box' or the app id */
dependsOn TEXT, /* comma separate list of objects this backup depends on */
state VARCHAR(16) NOT NULL,
manifestJson TEXT, /* to validate if the app can be installed in this version of box */
@@ -222,7 +219,7 @@ CREATE TABLE IF NOT EXISTS notifications(
message TEXT,
acknowledged BOOLEAN DEFAULT false,
creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE KEY appPasswords_name_appId_identifier (name, userId, identifier),
PRIMARY KEY (id)
);
@@ -234,7 +231,7 @@ CREATE TABLE IF NOT EXISTS appPasswords(
hashedPassword VARCHAR(1024) NOT NULL,
creationTime TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(userId) REFERENCES users(id),
UNIQUE (name, userId),
PRIMARY KEY (id)
);

744
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -18,33 +18,33 @@
"@google-cloud/storage": "^2.5.0",
"@sindresorhus/df": "git+https://github.com/cloudron-io/df.git#type",
"async": "^2.6.3",
"aws-sdk": "^2.685.0",
"aws-sdk": "^2.610.0",
"body-parser": "^1.19.0",
"cloudron-manifestformat": "^5.5.0",
"cloudron-manifestformat": "^5.1.1",
"connect": "^3.7.0",
"connect-lastmile": "^2.0.0",
"connect-lastmile": "^1.2.2",
"connect-timeout": "^1.9.0",
"cookie-session": "^1.4.0",
"cron": "^1.8.2",
"db-migrate": "^0.11.11",
"db-migrate": "^0.11.6",
"db-migrate-mysql": "^1.1.10",
"debug": "^4.1.1",
"dockerode": "^2.5.8",
"ejs": "^2.6.1",
"ejs-cli": "^2.2.0",
"ejs-cli": "^2.1.1",
"express": "^4.17.1",
"js-yaml": "^3.14.0",
"js-yaml": "^3.13.1",
"json": "^9.0.6",
"ldapjs": "^1.0.2",
"lodash": "^4.17.15",
"lodash.chunk": "^4.2.0",
"mime": "^2.4.6",
"moment": "^2.26.0",
"moment-timezone": "^0.5.31",
"morgan": "^1.10.0",
"mime": "^2.4.4",
"moment": "^2.25.3",
"moment-timezone": "^0.5.27",
"morgan": "^1.9.1",
"multiparty": "^4.2.1",
"mysql": "^2.18.1",
"nodemailer": "^6.4.6",
"nodemailer": "^6.4.2",
"nodemailer-smtp-transport": "^2.7.4",
"once": "^1.4.0",
"parse-links": "^0.1.0",
@@ -52,34 +52,34 @@
"progress-stream": "^2.0.0",
"proxy-middleware": "^0.15.0",
"qrcode": "^1.4.4",
"readdirp": "^3.4.0",
"request": "^2.88.2",
"readdirp": "^3.3.0",
"request": "^2.88.0",
"rimraf": "^2.6.3",
"s3-block-read-stream": "^0.5.0",
"safetydance": "^1.1.1",
"safetydance": "^1.0.0",
"semver": "^6.1.1",
"showdown": "^1.9.1",
"speakeasy": "^2.0.0",
"split": "^1.0.1",
"superagent": "^5.2.2",
"superagent": "^5.2.1",
"supererror": "^0.7.2",
"tar-fs": "github:cloudron-io/tar-fs#ignore_stat_error",
"tar-stream": "^2.1.2",
"tar-stream": "^2.1.0",
"tldjs": "^2.3.1",
"underscore": "^1.10.2",
"underscore": "^1.9.2",
"uuid": "^3.4.0",
"validator": "^11.0.0",
"ws": "^7.3.0",
"ws": "^7.2.1",
"xml2js": "^0.4.23"
},
"devDependencies": {
"expect.js": "*",
"hock": "^1.4.1",
"js2xmlparser": "^4.0.1",
"hock": "^1.3.3",
"js2xmlparser": "^4.0.0",
"mocha": "^6.1.4",
"mock-aws-s3": "git+https://github.com/cloudron-io/mock-aws-s3.git",
"nock": "^10.0.6",
"node-sass": "^4.14.1",
"node-sass": "^4.12.0",
"recursive-readdir": "^2.2.2"
},
"scripts": {

View File

@@ -41,14 +41,16 @@ if systemctl -q is-active box; then
fi
initBaseImage="true"
provider="generic"
# provisioning data
provider=""
requestedVersion=""
apiServerOrigin="https://api.cloudron.io"
webServerOrigin="https://cloudron.io"
sourceTarballUrl=""
rebootServer="true"
license=""
args=$(getopt -o "" -l "help,skip-baseimage-init,provider:,version:,env:,skip-reboot" -n "$0" -- "$@")
args=$(getopt -o "" -l "help,skip-baseimage-init,provider:,version:,env:,skip-reboot,license:" -n "$0" -- "$@")
eval set -- "${args}"
while true; do
@@ -65,6 +67,7 @@ while true; do
webServerOrigin="https://staging.cloudron.io"
fi
shift 2;;
--license) license="$2"; shift 2;;
--skip-baseimage-init) initBaseImage="false"; shift;;
--skip-reboot) rebootServer="false"; shift;;
--) break;;
@@ -88,6 +91,48 @@ fi
# Can only write after we have confirmed script has root access
echo "Running cloudron-setup with args : $@" > "${LOG_FILE}"
# validate arguments in the absence of data
readonly AVAILABLE_PROVIDERS="azure, caas, cloudscale, contabo, digitalocean, ec2, exoscale, gce, hetzner, interox, lightsail, linode, netcup, ovh, rosehosting, scaleway, skysilk, time4vps, upcloud, vultr or generic"
if [[ -z "${provider}" ]]; then
echo "--provider is required ($AVAILABLE_PROVIDERS)"
exit 1
elif [[ \
"${provider}" != "ami" && \
"${provider}" != "azure" && \
"${provider}" != "azure-image" && \
"${provider}" != "caas" && \
"${provider}" != "cloudscale" && \
"${provider}" != "contabo" && \
"${provider}" != "digitalocean" && \
"${provider}" != "digitalocean-mp" && \
"${provider}" != "ec2" && \
"${provider}" != "exoscale" && \
"${provider}" != "gce" && \
"${provider}" != "hetzner" && \
"${provider}" != "interox" && \
"${provider}" != "interox-image" && \
"${provider}" != "lightsail" && \
"${provider}" != "linode" && \
"${provider}" != "linode-oneclick" && \
"${provider}" != "linode-stackscript" && \
"${provider}" != "netcup" && \
"${provider}" != "netcup-image" && \
"${provider}" != "ovh" && \
"${provider}" != "rosehosting" && \
"${provider}" != "scaleway" && \
"${provider}" != "skysilk" && \
"${provider}" != "skysilk-image" && \
"${provider}" != "time4vps" && \
"${provider}" != "time4vps-image" && \
"${provider}" != "upcloud" && \
"${provider}" != "upcloud-image" && \
"${provider}" != "vultr" && \
"${provider}" != "generic" \
]]; then
echo "--provider must be one of: $AVAILABLE_PROVIDERS"
exit 1
fi
echo ""
echo "##############################################"
echo " Cloudron Setup (${requestedVersion:-latest})"
@@ -106,6 +151,12 @@ if [[ "${initBaseImage}" == "true" ]]; then
exit 1
fi
echo "=> Ensure required apt sources"
if ! add-apt-repository universe &>> "${LOG_FILE}"; then
echo "Could not add required apt sources (for nginx-full). See ${LOG_FILE}"
exit 1
fi
echo "=> Updating apt and installing script dependencies"
if ! apt-get update &>> "${LOG_FILE}"; then
echo "Could not update package repositories. See ${LOG_FILE}"
@@ -145,19 +196,20 @@ fi
if [[ "${initBaseImage}" == "true" ]]; then
echo -n "=> Installing base dependencies and downloading docker images (this takes some time) ..."
# initializeBaseUbuntuImage.sh args (provider, infraversion path) are only to support installation of pre 5.3 Cloudrons
if ! /bin/bash "${box_src_tmp_dir}/baseimage/initializeBaseUbuntuImage.sh" "generic" "../src" &>> "${LOG_FILE}"; then
if ! /bin/bash "${box_src_tmp_dir}/baseimage/initializeBaseUbuntuImage.sh" "${provider}" "../src" &>> "${LOG_FILE}"; then
echo "Init script failed. See ${LOG_FILE} for details"
exit 1
fi
echo ""
fi
# The provider flag is still used for marketplace images
# NOTE: this install script only supports 4.2 and above
echo "=> Installing version ${version} (this takes some time) ..."
mkdir -p /etc/cloudron
echo "${provider}" > /etc/cloudron/PROVIDER
[[ -n "${license}" ]] && echo -n "$license" > /etc/cloudron/LICENSE
if ! /bin/bash "${box_src_tmp_dir}/scripts/installer.sh" &>> "${LOG_FILE}"; then
echo "Failed to install cloudron. See ${LOG_FILE} for details"
exit 1
@@ -169,13 +221,13 @@ mysql -uroot -ppassword -e "REPLACE INTO box.settings (name, value) VALUES ('web
echo -n "=> Waiting for cloudron to be ready (this takes some time) ..."
while true; do
echo -n "."
if status=$($curl -s -f "http://localhost:3000/api/v1/cloudron/status" 2>/dev/null); then
if status=$($curl -q -f "http://localhost:3000/api/v1/cloudron/status" 2>/dev/null); then
break # we are up and running
fi
sleep 10
done
if ! ip=$(curl -s --fail --connect-timeout 2 --max-time 2 https://api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p'); then
if ! ip=$(curl --fail --connect-timeout 2 --max-time 2 -q https://api.cloudron.io/api/v1/helper/public_ip | sed -n -e 's/.*"ip": "\(.*\)"/\1/p'); then
ip='<IP>'
fi
echo -e "\n\n${GREEN}Visit https://${ip} and accept the self-signed certificate to finish setup.${DONE}\n"

View File

@@ -112,7 +112,7 @@ if [[ "${enableSSH}" == "true" ]]; then
permit_root_login=$(grep -q ^PermitRootLogin.*yes /etc/ssh/sshd_config && echo "yes" || echo "no")
# support.js uses similar logic
if [[ -d /home/ubuntu ]]; then
if $(grep -q "ec2\|lightsail\|ami" /etc/cloudron/PROVIDER); then
ssh_user="ubuntu"
keys_file="/home/ubuntu/.ssh/authorized_keys"
else

View File

@@ -58,9 +58,9 @@ if [[ $(docker version --format {{.Client.Version}}) != "18.09.2" ]]; then
fi
readonly nginx_version=$(nginx -v)
if [[ "${nginx_version}" != *"1.18."* ]]; then
echo "==> installer: installing nginx 1.18"
curl -sL http://nginx.org/packages/ubuntu/pool/nginx/n/nginx/nginx_1.18.0-1~${ubuntu_codename}_amd64.deb -o /tmp/nginx.deb
if [[ "${nginx_version}" != *"1.14."* && "${ubuntu_version}" == "16.04" ]]; then
echo "==> installer: installing nginx for xenial for TLSv3 support"
curl -sL http://nginx.org/packages/ubuntu/pool/nginx/n/nginx/nginx_1.14.0-1~xenial_amd64.deb -o /tmp/nginx.deb
# apt install with install deps (as opposed to dpkg -i)
apt install -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes /tmp/nginx.deb
rm /tmp/nginx.deb

View File

@@ -85,9 +85,6 @@ systemctl daemon-reload
systemctl restart systemd-journald
setfacl -n -m u:${USER}:r /var/log/journal/*/system.journal
# Give user access to nginx logs (uses adm group)
usermod -a -G adm ${USER}
echo "==> Setting up unbound"
# DO uses Google nameservers by default. This causes RBL queries to fail (host 2.0.0.127.zen.spamhaus.org)
# We do not use dnsmasq because it is not a recursive resolver and defaults to the value in the interfaces file (which is Google DNS!)
@@ -153,15 +150,8 @@ cp "${script_dir}/start/nginx/mime.types" "${PLATFORM_DATA_DIR}/nginx/mime.types
if ! grep -q "^Restart=" /etc/systemd/system/multi-user.target.wants/nginx.service; then
# default nginx service file does not restart on crash
echo -e "\n[Service]\nRestart=always\n" >> /etc/systemd/system/multi-user.target.wants/nginx.service
systemctl daemon-reload
fi
# worker_rlimit_nofile in nginx config can be max this number
mkdir -p /etc/systemd/system/nginx.service.d
if ! grep -q "^LimitNOFILE=" /etc/systemd/system/nginx.service.d/cloudron.conf; then
echo -e "[Service]\nLimitNOFILE=16384\n" > /etc/systemd/system/nginx.service.d/cloudron.conf
fi
systemctl daemon-reload
systemctl start nginx
# restart mysql to make sure it has latest config

View File

@@ -1,18 +1,11 @@
user www-data;
# detect based on available CPU cores
worker_processes auto;
# this is 4096 by default. See /proc/<PID>/limits and /etc/security/limits.conf
# usually twice the worker_connections (one for uptsream, one for downstream)
# see also LimitNOFILE=16384 in systemd drop-in
worker_rlimit_nofile 8192;
worker_processes 1;
pid /run/nginx.pid;
events {
# a single worker has these many simultaneous connections max
worker_connections 4096;
worker_connections 1024;
}
http {

View File

@@ -14,7 +14,7 @@ WorkingDirectory=/home/yellowtent/box
Restart=always
; Systemd does not append logs when logging to files, we spawn a shell first and exec to replace it after setting up the pipes
ExecStart=/bin/sh -c 'echo "Logging to /home/yellowtent/platformdata/logs/box.log"; exec /usr/bin/node /home/yellowtent/box/box.js >> /home/yellowtent/platformdata/logs/box.log 2>&1'
Environment="HOME=/home/yellowtent" "USER=yellowtent" "DEBUG=box:*,connect-lastmile,-box:ldap" "BOX_ENV=cloudron" "NODE_ENV=production"
Environment="HOME=/home/yellowtent" "USER=yellowtent" "DEBUG=box*,connect-lastmile" "BOX_ENV=cloudron" "NODE_ENV=production"
; kill apptask processes as well
KillMode=control-group
; Do not kill this process on OOM. Children inherit this score. Do not set it to -1000 so that MemoryMax can keep working

View File

@@ -506,13 +506,6 @@ function getServiceLogs(id, options, callback) {
args.push('--output=short-iso');
if (follow) args.push('--follow');
} else if (name === 'nginx') {
cmd = '/usr/bin/tail';
args.push('--lines=' + (lines === -1 ? '+1' : lines));
if (follow) args.push('--follow', '--retry', '--quiet'); // same as -F. to make it work if file doesn't exist, --quiet to not output file headers, which are no logs
args.push('/var/log/nginx/access.log');
args.push('/var/log/nginx/error.log');
} else {
cmd = '/usr/bin/tail';
@@ -1762,7 +1755,7 @@ function setupRedis(app, options, callback) {
const redisServiceToken = hat(4 * 48);
// Compute redis memory limit based on app's memory limit (this is arbitrary)
const memoryLimit = app.servicesConfig['redis'] ? app.servicesConfig['redis'].memory : APP_SERVICES['redis'].defaultMemoryLimit;
const memoryLimit = app.servicesConfig['redis'] ? app.servicesConfig['redis'].memoryLimit : APP_SERVICES['redis'].defaultMemoryLimit;
const tag = infra.images.redis.tag;
const label = app.fqdn;

View File

@@ -181,10 +181,12 @@ function processApp(callback) {
if (error) return callback(error);
async.each(allApps, checkAppHealth, function (error) {
if (error) console.error(error);
const alive = allApps
.filter(function (a) { return a.installationState === apps.ISTATE_INSTALLED && a.runState === apps.RSTATE_RUNNING && a.health === apps.HEALTH_HEALTHY; });
debug(`app health: ${alive.length} alive / ${allApps.length - alive.length} dead.` + (error ? ` ${error.reason}` : ''));
debug(`app health: ${alive.length} alive / ${allApps.length - alive.length} dead`);
callback(null);
});
@@ -199,7 +201,7 @@ function run(intervalSecs, callback) {
processApp, // this is first because docker.getEvents seems to get 'stuck' sometimes
processDockerEvents.bind(null, intervalSecs)
], function (error) {
if (error) debug(`run: could not check app health. ${error.message}`);
if (error) debug(error);
callback();
});

View File

@@ -426,8 +426,8 @@ function removeInternalFields(app) {
// non-admins can only see these
function removeRestrictedFields(app) {
return _.pick(app,
'id', 'appStoreId', 'installationState', 'error', 'runState', 'health', 'taskId', 'alternateDomains', 'sso',
'location', 'domain', 'fqdn', 'manifest', 'portBindings', 'iconUrl', 'creationTime', 'ts', 'tags', 'label', 'enableBackup');
'id', 'appStoreId', 'installationState', 'error', 'runState', 'health', 'taskId',
'location', 'domain', 'fqdn', 'manifest', 'portBindings', 'iconUrl', 'creationTime', 'ts', 'tags', 'label');
}
function getIconUrlSync(app) {
@@ -696,11 +696,6 @@ function checkAppState(app, state) {
if (state !== exports.ISTATE_PENDING_UNINSTALL && state !== exports.ISTATE_PENDING_RESTORE) return new BoxError(BoxError.BAD_STATE, 'Not allowed in error state');
}
if (app.runState === exports.RSTATE_STOPPED) {
// can't backup or restore since app addons are down. can't update because migration scripts won't run
if (state === exports.ISTATE_PENDING_UPDATE || state === exports.ISTATE_PENDING_BACKUP || state === exports.ISTATE_PENDING_RESTORE) return new BoxError(BoxError.BAD_STATE, 'Not allowed in stopped state');
}
return null;
}
@@ -1837,25 +1832,18 @@ function exec(app, options, callback) {
});
}
function canAutoupdateApp(app, updateInfo) {
assert.strictEqual(typeof app, 'object');
assert.strictEqual(typeof updateInfo, 'object');
const manifest = updateInfo.manifest;
function canAutoupdateApp(app, newManifest) {
if (!app.enableAutomaticUpdate) return false;
// for invalid subscriptions the appstore does not return a dockerImage
if (!manifest.dockerImage) return false;
if (!newManifest.dockerImage) return false;
if (updateInfo.unstable) return false; // only manual update allowed for unstable updates
if ((semver.major(app.manifest.version) !== 0) && (semver.major(app.manifest.version) !== semver.major(manifest.version))) return false; // major changes are blocking
if ((semver.major(app.manifest.version) !== 0) && (semver.major(app.manifest.version) !== semver.major(newManifest.version))) return false; // major changes are blocking
if (app.runState === exports.RSTATE_STOPPED) return false; // stopped apps won't run migration scripts and shouldn't be updated
const newTcpPorts = manifest.tcpPorts || { };
const newUdpPorts = manifest.udpPorts || { };
const newTcpPorts = newManifest.tcpPorts || { };
const newUdpPorts = newManifest.udpPorts || { };
const portBindings = app.portBindings; // this is never null
for (let portName in portBindings) {
@@ -1880,7 +1868,7 @@ function autoupdateApps(updateInfo, auditSource, callback) { // updateInfo is {
return iteratorDone();
}
if (!canAutoupdateApp(app, updateInfo[appId])) {
if (!canAutoupdateApp(app, updateInfo[appId].manifest)) {
debug(`app ${app.fqdn} requires manual update`);
return iteratorDone();
}
@@ -1925,7 +1913,7 @@ function listBackups(app, page, perPage, callback) {
assert(typeof perPage === 'number' && perPage > 0);
assert.strictEqual(typeof callback, 'function');
backups.getByIdentifierAndStatePaged(app.id, backups.BACKUP_STATE_NORMAL, page, perPage, function (error, results) {
backups.getByAppIdPaged(page, perPage, app.id, function (error, results) {
if (error) return callback(error);
callback(null, results);
@@ -1942,7 +1930,7 @@ function restoreInstalledApps(callback) {
apps = apps.filter(app => app.installationState !== exports.ISTATE_PENDING_RESTORE); // safeguard against tasks being created non-stop if we crash on startup
async.eachSeries(apps, function (app, iteratorDone) {
backups.getByIdentifierAndStatePaged(app.id, backups.BACKUP_STATE_NORMAL, 1, 1, function (error, results) {
backups.getByAppIdPaged(1, 1, app.id, function (error, results) {
let installationState, restoreConfig, oldManifest;
if (!error && results.length) {
installationState = exports.ISTATE_PENDING_RESTORE;

View File

@@ -11,6 +11,7 @@ exports = module.exports = {
trackFinishedSetup: trackFinishedSetup,
registerWithLoginCredentials: registerWithLoginCredentials,
registerWithLicense: registerWithLicense,
purchaseApp: purchaseApp,
unpurchaseApp: unpurchaseApp,
@@ -19,6 +20,8 @@ exports = module.exports = {
getSubscription: getSubscription,
isFreePlan: isFreePlan,
sendAliveStatus: sendAliveStatus,
getAppUpdate: getAppUpdate,
getBoxUpdate: getBoxUpdate,
@@ -31,26 +34,32 @@ var apps = require('./apps.js'),
BoxError = require('./boxerror.js'),
constants = require('./constants.js'),
debug = require('debug')('box:appstore'),
domains = require('./domains.js'),
eventlog = require('./eventlog.js'),
groups = require('./groups.js'),
mail = require('./mail.js'),
os = require('os'),
paths = require('./paths.js'),
safe = require('safetydance'),
semver = require('semver'),
settings = require('./settings.js'),
superagent = require('superagent'),
users = require('./users.js'),
util = require('util');
const NOOP_CALLBACK = function (error) { if (error) debug(error); };
// These are the default options and will be adjusted once a subscription state is obtained
// Keep in sync with appstore/routes/cloudrons.js
let gFeatures = {
userMaxCount: 5,
domainMaxCount: 1,
externalLdap: false,
privateDockerRegistry: false,
branding: false,
support: false,
directoryConfig: false,
mailboxMaxCount: 5,
emailPremium: false
userMaxCount: null,
externalLdap: true,
eventLog: true,
privateDockerRegistry: true,
branding: true,
userManager: true,
multiAdmin: true,
support: true
};
// attempt to load feature cache in case appstore would be down
@@ -226,6 +235,111 @@ function unpurchaseApp(appId, data, callback) {
});
}
function sendAliveStatus(callback) {
callback = callback || NOOP_CALLBACK;
let allSettings, allDomains, mailDomains, loginEvents, userCount, groupCount;
async.series([
function (callback) {
settings.getAll(function (error, result) {
if (error) return callback(error);
allSettings = result;
callback();
});
},
function (callback) {
domains.getAll(function (error, result) {
if (error) return callback(error);
allDomains = result;
callback();
});
},
function (callback) {
mail.getDomains(function (error, result) {
if (error) return callback(error);
mailDomains = result;
callback();
});
},
function (callback) {
eventlog.getAllPaged([ eventlog.ACTION_USER_LOGIN ], null, 1, 1, function (error, result) {
if (error) return callback(error);
loginEvents = result;
callback();
});
},
function (callback) {
users.count(function (error, result) {
if (error) return callback(error);
userCount = result;
callback();
});
},
function (callback) {
groups.count(function (error, result) {
if (error) return callback(error);
groupCount = result;
callback();
});
}
], function (error) {
if (error) return callback(error);
var backendSettings = {
backupConfig: {
provider: allSettings[settings.BACKUP_CONFIG_KEY].provider,
hardlinks: !allSettings[settings.BACKUP_CONFIG_KEY].noHardlinks
},
domainConfig: {
count: allDomains.length,
domains: Array.from(new Set(allDomains.map(function (d) { return { domain: d.domain, provider: d.provider }; })))
},
mailConfig: {
outboundCount: mailDomains.length,
inboundCount: mailDomains.filter(function (d) { return d.enabled; }).length,
catchAllCount: mailDomains.filter(function (d) { return d.catchAll.length !== 0; }).length,
relayProviders: Array.from(new Set(mailDomains.map(function (d) { return d.relay.provider; })))
},
userCount: userCount,
groupCount: groupCount,
appAutoupdatePattern: allSettings[settings.APP_AUTOUPDATE_PATTERN_KEY],
boxAutoupdatePattern: allSettings[settings.BOX_AUTOUPDATE_PATTERN_KEY],
timeZone: allSettings[settings.TIME_ZONE_KEY],
sysinfoProvider: allSettings[settings.SYSINFO_CONFIG_KEY].provider
};
var data = {
version: constants.VERSION,
adminFqdn: settings.adminFqdn(),
provider: settings.provider(),
backendSettings: backendSettings,
machine: {
cpus: os.cpus(),
totalmem: os.totalmem()
},
events: {
lastLogin: loginEvents[0] ? (new Date(loginEvents[0].creationTime).getTime()) : 0
}
};
getCloudronToken(function (error, token) {
if (error) return callback(error);
const url = `${settings.apiServerOrigin()}/api/v1/alive`;
superagent.post(url).send(data).query({ accessToken: token }).timeout(30 * 1000).end(function (error, result) {
if (error && !error.response) return callback(new BoxError(BoxError.NETWORK_ERROR, error));
if (result.statusCode === 404) return callback(new BoxError(BoxError.NOT_FOUND));
if (result.statusCode === 401) return callback(new BoxError(BoxError.INVALID_CREDENTIALS));
if (result.statusCode === 422) return callback(new BoxError(BoxError.LICENSE_ERROR, result.body.message));
if (result.statusCode !== 201) return callback(new BoxError(BoxError.EXTERNAL_ERROR, util.format('Sending alive status failed. %s %j', result.status, result.body)));
callback(null);
});
});
});
}
function getBoxUpdate(options, callback) {
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof callback, 'function');
@@ -302,9 +416,7 @@ function getAppUpdate(app, options, callback) {
return callback(new BoxError(BoxError.EXTERNAL_ERROR, util.format('Malformed update: %s %s', result.statusCode, result.text)));
}
updateInfo.unstable = !!updateInfo.unstable;
// { id, creationDate, manifest, unstable }
// { id, creationDate, manifest }
callback(null, updateInfo);
});
});
@@ -341,14 +453,16 @@ function registerCloudron(data, callback) {
// This works without a Cloudron token as this Cloudron was not yet registered
let gBeginSetupAlreadyTracked = false;
function trackBeginSetup() {
function trackBeginSetup(provider) {
assert.strictEqual(typeof provider, 'string');
// avoid browser reload double tracking, not perfect since box might restart, but covers most cases and is simple
if (gBeginSetupAlreadyTracked) return;
gBeginSetupAlreadyTracked = true;
const url = `${settings.apiServerOrigin()}/api/v1/helper/setup_begin`;
superagent.post(url).send({}).timeout(30 * 1000).end(function (error, result) {
superagent.post(url).send({ provider }).timeout(30 * 1000).end(function (error, result) {
if (error && !error.response) return console.error(error.message);
if (result.statusCode !== 200) return console.error(error.message);
});
@@ -366,6 +480,21 @@ function trackFinishedSetup(domain) {
});
}
function registerWithLicense(license, domain, callback) {
assert.strictEqual(typeof license, 'string');
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof callback, 'function');
getCloudronToken(function (error, token) {
if (token) return callback(new BoxError(BoxError.CONFLICT, 'Cloudron is already registered'));
const provider = settings.provider();
const version = constants.VERSION;
registerCloudron({ license, domain, provider, version }, callback);
});
}
function registerWithLoginCredentials(options, callback) {
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof callback, 'function');
@@ -385,7 +514,7 @@ function registerWithLoginCredentials(options, callback) {
login(options.email, options.password, options.totpToken || '', function (error, result) {
if (error) return callback(error);
registerCloudron({ domain: settings.adminDomain(), accessToken: result.accessToken, version: constants.VERSION, purpose: options.purpose || '' }, callback);
registerCloudron({ domain: settings.adminDomain(), accessToken: result.accessToken, provider: settings.provider(), version: constants.VERSION, purpose: options.purpose || '' }, callback);
});
});
});
@@ -410,7 +539,7 @@ function createTicket(info, auditSource, callback) {
if (error) return callback(error);
collectAppInfoIfNeeded(function (error, result) {
if (error) return callback(error);
if (error) console.error('Unable to get app info', error);
if (result) info.app = result;
let url = settings.apiServerOrigin() + '/api/v1/ticket';

View File

@@ -37,6 +37,7 @@ var addons = require('./addons.js'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
manifestFormat = require('cloudron-manifestformat'),
mkdirp = require('mkdirp'),
net = require('net'),
os = require('os'),
path = require('path'),
@@ -175,7 +176,7 @@ function createAppDir(app, callback) {
assert.strictEqual(typeof callback, 'function');
const appDir = path.join(paths.APPS_DATA_DIR, app.id);
fs.mkdir(appDir, { recursive: true }, function (error) {
mkdirp(appDir, function (error) {
if (error) return callback(new BoxError(BoxError.FS_ERROR, `Error creating directory: ${error.message}`, { appDir }));
callback(null);

View File

@@ -6,20 +6,27 @@ var assert = require('assert'),
safe = require('safetydance'),
util = require('util');
var BACKUPS_FIELDS = [ 'id', 'identifier', 'creationTime', 'packageVersion', 'type', 'dependsOn', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ];
var BACKUPS_FIELDS = [ 'id', 'creationTime', 'packageVersion', 'type', 'dependsOn', 'state', 'manifestJson', 'format', 'preserveSecs', 'encryptionVersion' ];
exports = module.exports = {
add,
add: add,
getByTypePaged,
getByIdentifierPaged,
getByIdentifierAndStatePaged,
getByTypeAndStatePaged: getByTypeAndStatePaged,
getByTypePaged: getByTypePaged,
get,
del,
update,
get: get,
del: del,
update: update,
getByAppIdPaged: getByAppIdPaged,
_clear: clear
_clear: clear,
BACKUP_TYPE_APP: 'app',
BACKUP_TYPE_BOX: 'box',
BACKUP_STATE_NORMAL: 'normal', // should rename to created to avoid listing in UI?
BACKUP_STATE_CREATING: 'creating',
BACKUP_STATE_ERROR: 'error'
};
function postProcess(result) {
@@ -31,15 +38,15 @@ function postProcess(result) {
delete result.manifestJson;
}
function getByIdentifierAndStatePaged(identifier, state, page, perPage, callback) {
assert.strictEqual(typeof identifier, 'string');
function getByTypeAndStatePaged(type, state, page, perPage, callback) {
assert(type === exports.BACKUP_TYPE_APP || type === exports.BACKUP_TYPE_BOX);
assert.strictEqual(typeof state, 'string');
assert(typeof page === 'number' && page > 0);
assert(typeof perPage === 'number' && perPage > 0);
assert.strictEqual(typeof callback, 'function');
database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups WHERE identifier = ? AND state = ? ORDER BY creationTime DESC LIMIT ?,?',
[ identifier, state, (page-1)*perPage, perPage ], function (error, results) {
database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups WHERE type = ? AND state = ? ORDER BY creationTime DESC LIMIT ?,?',
[ type, state, (page-1)*perPage, perPage ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
results.forEach(function (result) { postProcess(result); });
@@ -49,7 +56,7 @@ function getByIdentifierAndStatePaged(identifier, state, page, perPage, callback
}
function getByTypePaged(type, page, perPage, callback) {
assert.strictEqual(typeof type, 'string');
assert(type === exports.BACKUP_TYPE_APP || type === exports.BACKUP_TYPE_BOX);
assert(typeof page === 'number' && page > 0);
assert(typeof perPage === 'number' && perPage > 0);
assert.strictEqual(typeof callback, 'function');
@@ -64,14 +71,15 @@ function getByTypePaged(type, page, perPage, callback) {
});
}
function getByIdentifierPaged(identifier, page, perPage, callback) {
assert.strictEqual(typeof identifier, 'string');
function getByAppIdPaged(page, perPage, appId, callback) {
assert(typeof page === 'number' && page > 0);
assert(typeof perPage === 'number' && perPage > 0);
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof callback, 'function');
database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups WHERE identifier = ? ORDER BY creationTime DESC LIMIT ?,?',
[ identifier, (page-1)*perPage, perPage ], function (error, results) {
// box versions (0.93.x and below) used to use appbackup_ prefix
database.query('SELECT ' + BACKUPS_FIELDS + ' FROM backups WHERE type = ? AND state = ? AND id LIKE ? ORDER BY creationTime DESC LIMIT ?,?',
[ exports.BACKUP_TYPE_APP, exports.BACKUP_STATE_NORMAL, '%app%\\_' + appId + '\\_%', (page-1)*perPage, perPage ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
results.forEach(function (result) { postProcess(result); });
@@ -100,9 +108,7 @@ function add(id, data, callback) {
assert.strictEqual(typeof id, 'string');
assert(data.encryptionVersion === null || typeof data.encryptionVersion === 'number');
assert.strictEqual(typeof data.packageVersion, 'string');
assert.strictEqual(typeof data.type, 'string');
assert.strictEqual(typeof data.identifier, 'string');
assert.strictEqual(typeof data.state, 'string');
assert(data.type === exports.BACKUP_TYPE_APP || data.type === exports.BACKUP_TYPE_BOX);
assert(util.isArray(data.dependsOn));
assert.strictEqual(typeof data.manifest, 'object');
assert.strictEqual(typeof data.format, 'string');
@@ -111,8 +117,8 @@ function add(id, data, callback) {
var creationTime = data.creationTime || new Date(); // allow tests to set the time
var manifestJson = JSON.stringify(data.manifest);
database.query('INSERT INTO backups (id, identifier, encryptionVersion, packageVersion, type, creationTime, state, dependsOn, manifestJson, format) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, data.identifier, data.encryptionVersion, data.packageVersion, data.type, creationTime, data.state, data.dependsOn.join(','), manifestJson, data.format ],
database.query('INSERT INTO backups (id, encryptionVersion, packageVersion, type, creationTime, state, dependsOn, manifestJson, format) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)',
[ id, data.encryptionVersion, data.packageVersion, data.type, creationTime, exports.BACKUP_STATE_NORMAL, data.dependsOn.join(','), manifestJson, data.format ],
function (error) {
if (error && error.code === 'ER_DUP_ENTRY') return callback(new BoxError(BoxError.ALREADY_EXISTS));
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));

View File

@@ -4,7 +4,8 @@ exports = module.exports = {
testConfig: testConfig,
testProviderConfig: testProviderConfig,
getByIdentifierAndStatePaged,
getByStatePaged: getByStatePaged,
getByAppIdPaged: getByAppIdPaged,
get: get,
@@ -33,15 +34,6 @@ exports = module.exports = {
generateEncryptionKeysSync: generateEncryptionKeysSync,
BACKUP_IDENTIFIER_BOX: 'box',
BACKUP_TYPE_APP: 'app',
BACKUP_TYPE_BOX: 'box',
BACKUP_STATE_NORMAL: 'normal', // should rename to created to avoid listing in UI?
BACKUP_STATE_CREATING: 'creating',
BACKUP_STATE_ERROR: 'error',
// for testing
_getBackupFilePath: getBackupFilePath,
_restoreFsMetadata: restoreFsMetadata,
@@ -61,15 +53,18 @@ var addons = require('./addons.js'),
database = require('./database.js'),
DataLayout = require('./datalayout.js'),
debug = require('debug')('box:backups'),
df = require('@sindresorhus/df'),
ejs = require('ejs'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
locker = require('./locker.js'),
moment = require('moment'),
mkdirp = require('mkdirp'),
once = require('once'),
path = require('path'),
paths = require('./paths.js'),
progressStream = require('progress-stream'),
prettyBytes = require('pretty-bytes'),
safe = require('safetydance'),
shell = require('./shell.js'),
settings = require('./settings.js'),
@@ -78,8 +73,7 @@ var addons = require('./addons.js'),
tasks = require('./tasks.js'),
TransformStream = require('stream').Transform,
util = require('util'),
zlib = require('zlib'),
_ = require('underscore');
zlib = require('zlib');
const BACKUP_UPLOAD_CMD = path.join(__dirname, 'scripts/backupupload.js');
const COLLECTD_CONFIG_EJS = fs.readFileSync(__dirname + '/collectd/cloudron-backup.ejs', { encoding: 'utf8' });
@@ -93,9 +87,6 @@ function debugApp(app) {
// choose which storage backend we use for test purpose we use s3
function api(provider) {
switch (provider) {
case 'nfs': return require('./storage/filesystem.js');
case 'cifs': return require('./storage/filesystem.js');
case 'sshfs': return require('./storage/filesystem.js');
case 's3': return require('./storage/s3.js');
case 'gcs': return require('./storage/gcs.js');
case 'filesystem': return require('./storage/filesystem.js');
@@ -105,7 +96,6 @@ function api(provider) {
case 'exoscale-sos': return require('./storage/s3.js');
case 'wasabi': return require('./storage/s3.js');
case 'scaleway-objectstorage': return require('./storage/s3.js');
case 'backblaze-b2': return require('./storage/s3.js');
case 'linode-objectstorage': return require('./storage/s3.js');
case 'ovh-objectstorage': return require('./storage/s3.js');
case 'noop': return require('./storage/noop.js');
@@ -153,7 +143,6 @@ function testConfig(backupConfig, callback) {
const policy = backupConfig.retentionPolicy;
if (!policy) return callback(new BoxError(BoxError.BAD_FIELD, 'retentionPolicy is required', { field: 'retentionPolicy' }));
if (!['keepWithinSecs','keepDaily','keepWeekly','keepMonthly','keepYearly'].find(k => !!policy[k])) return callback(new BoxError(BoxError.BAD_FIELD, 'properties missing', { field: 'retentionPolicy' }));
if ('keepWithinSecs' in policy && typeof policy.keepWithinSecs !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepWithinSecs must be a number', { field: 'retentionPolicy' }));
if ('keepDaily' in policy && typeof policy.keepDaily !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepDaily must be a number', { field: 'retentionPolicy' }));
if ('keepWeekly' in policy && typeof policy.keepWeekly !== 'number') return callback(new BoxError(BoxError.BAD_FIELD, 'keepWeekly must be a number', { field: 'retentionPolicy' }));
@@ -186,14 +175,26 @@ function generateEncryptionKeysSync(password) {
};
}
function getByIdentifierAndStatePaged(identifier, state, page, perPage, callback) {
assert.strictEqual(typeof identifier, 'string');
function getByStatePaged(state, page, perPage, callback) {
assert.strictEqual(typeof state, 'string');
assert(typeof page === 'number' && page > 0);
assert(typeof perPage === 'number' && perPage > 0);
assert.strictEqual(typeof callback, 'function');
backupdb.getByIdentifierAndStatePaged(identifier, state, page, perPage, function (error, results) {
backupdb.getByTypeAndStatePaged(backupdb.BACKUP_TYPE_BOX, state, page, perPage, function (error, results) {
if (error) return callback(error);
callback(null, results);
});
}
function getByAppIdPaged(page, perPage, appId, callback) {
assert(typeof page === 'number' && page > 0);
assert(typeof perPage === 'number' && perPage > 0);
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof callback, 'function');
backupdb.getByAppIdPaged(page, perPage, appId, function (error, results) {
if (error) return callback(error);
callback(null, results);
@@ -211,19 +212,16 @@ function get(backupId, callback) {
});
}
// This is not part of the storage api, since we don't want to pull the "format" logistics into that
function getBackupFilePath(backupConfig, backupId, format) {
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof backupId, 'string');
assert.strictEqual(typeof format, 'string');
const backupPath = api(backupConfig.provider).getBackupPath(backupConfig);
if (format === 'tgz') {
const fileType = backupConfig.encryption ? '.tar.gz.enc' : '.tar.gz';
return path.join(backupPath, backupId+fileType);
return path.join(backupConfig.prefix || backupConfig.backupFolder || '', backupId+fileType);
} else {
return path.join(backupPath, backupId);
return path.join(backupConfig.prefix || backupConfig.backupFolder || '', backupId);
}
}
@@ -565,6 +563,34 @@ function saveFsMetadata(dataLayout, metadataFile, callback) {
callback();
}
// the du call in the function below requires root
function checkFreeDiskSpace(backupConfig, dataLayout, callback) {
assert.strictEqual(typeof backupConfig, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof callback, 'function');
if (backupConfig.provider !== 'filesystem') return callback();
let used = 0;
for (let localPath of dataLayout.localPaths()) {
debug(`checkFreeDiskSpace: getting disk usage of ${localPath}`);
let result = safe.child_process.execSync(`du -Dsb ${localPath}`, { encoding: 'utf8' });
if (!result) return callback(new BoxError(BoxError.FS_ERROR, safe.error));
used += parseInt(result, 10);
}
debug(`checkFreeDiskSpace: ${used} bytes`);
df.file(backupConfig.backupFolder).then(function (diskUsage) {
const needed = used + (1024 * 1024 * 1024); // check if there is atleast 1GB left afterwards
if (diskUsage.available <= needed) return callback(new BoxError(BoxError.FS_ERROR, `Not enough disk space for backup. Needed: ${prettyBytes(needed)} Available: ${prettyBytes(diskUsage.available)}`));
callback(null);
}).catch(function (error) {
callback(new BoxError(BoxError.FS_ERROR, error));
});
}
// this function is called via backupupload (since it needs root to traverse app's directory)
function upload(backupId, format, dataLayoutString, progressCallback, callback) {
assert.strictEqual(typeof backupId, 'string');
@@ -580,7 +606,7 @@ function upload(backupId, format, dataLayoutString, progressCallback, callback)
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(error);
api(backupConfig.provider).checkPreconditions(backupConfig, dataLayout, function (error) {
checkFreeDiskSpace(backupConfig, dataLayout, function (error) {
if (error) return callback(error);
if (format === 'tgz') {
@@ -679,7 +705,7 @@ function restoreFsMetadata(dataLayout, metadataFile, callback) {
if (metadata === null) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Error parsing fsmetadata.json:' + safe.error.message));
async.eachSeries(metadata.emptyDirs, function createPath(emptyDir, iteratorDone) {
fs.mkdir(dataLayout.toLocalPath(emptyDir), { recursive: true }, iteratorDone);
mkdirp(dataLayout.toLocalPath(emptyDir), iteratorDone);
}, function (error) {
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, `unable to create path: ${error.message}`));
@@ -711,7 +737,7 @@ function downloadDir(backupConfig, backupFilePath, dataLayout, progressCallback,
}
const destFilePath = dataLayout.toLocalPath('./' + relativePath);
fs.mkdir(path.dirname(destFilePath), { recursive: true }, function (error) {
mkdirp(path.dirname(destFilePath), function (error) {
if (error) return done(new BoxError(BoxError.FS_ERROR, error.message));
async.retry({ times: 5, interval: 20000 }, function (retryCallback) {
@@ -928,8 +954,6 @@ function uploadBoxSnapshot(backupConfig, progressCallback, callback) {
progressTag: 'box'
};
progressCallback({ message: 'Uploading box snapshot' });
runBackupUpload(uploadConfig, progressCallback, function (error) {
if (error) return callback(error);
@@ -958,9 +982,7 @@ function rotateBoxBackup(backupConfig, tag, appBackupIds, progressCallback, call
const data = {
encryptionVersion: backupConfig.encryption ? 2 : null,
packageVersion: constants.VERSION,
type: exports.BACKUP_TYPE_BOX,
state: exports.BACKUP_STATE_CREATING,
identifier: 'box',
type: backupdb.BACKUP_TYPE_BOX,
dependsOn: appBackupIds,
manifest: null,
format: format
@@ -971,9 +993,9 @@ function rotateBoxBackup(backupConfig, tag, appBackupIds, progressCallback, call
var copy = api(backupConfig.provider).copy(backupConfig, getBackupFilePath(backupConfig, 'snapshot/box', format), getBackupFilePath(backupConfig, backupId, format));
copy.on('progress', (message) => progressCallback({ message: `box: ${message}` }));
copy.on('done', function (copyBackupError) {
const state = copyBackupError ? exports.BACKUP_STATE_ERROR : exports.BACKUP_STATE_NORMAL;
const state = copyBackupError ? backupdb.BACKUP_STATE_ERROR : backupdb.BACKUP_STATE_NORMAL;
backupdb.update(backupId, { state }, function (error) {
backupdb.update(backupId, { state: state }, function (error) {
if (copyBackupError) return callback(copyBackupError);
if (error) return callback(error);
@@ -1004,10 +1026,6 @@ function backupBoxWithAppBackupIds(appBackupIds, tag, progressCallback, callback
function canBackupApp(app) {
// only backup apps that are installed or specific pending states
// stopped apps cannot be backed up because addons might be down (redis)
if (app.runState === apps.RSTATE_STOPPED) return false;
// we used to check the health here but that doesn't work for stopped apps. it's better to just fail
// and inform the user if the backup fails and the app addons have not been setup yet.
return app.installationState === apps.ISTATE_INSTALLED ||
@@ -1054,9 +1072,7 @@ function rotateAppBackup(backupConfig, app, tag, options, progressCallback, call
const data = {
encryptionVersion: backupConfig.encryption ? 2 : null,
packageVersion: manifest.version,
type: exports.BACKUP_TYPE_APP,
state: exports.BACKUP_STATE_CREATING,
identifier: app.id,
type: backupdb.BACKUP_TYPE_APP,
dependsOn: [ ],
manifest,
format: format
@@ -1068,9 +1084,9 @@ function rotateAppBackup(backupConfig, app, tag, options, progressCallback, call
var copy = api(backupConfig.provider).copy(backupConfig, getBackupFilePath(backupConfig, `snapshot/app_${app.id}`, format), getBackupFilePath(backupConfig, backupId, format));
copy.on('progress', (message) => progressCallback({ message: `${message} (${app.fqdn})` }));
copy.on('done', function (copyBackupError) {
const state = copyBackupError ? exports.BACKUP_STATE_ERROR : exports.BACKUP_STATE_NORMAL;
const state = copyBackupError ? backupdb.BACKUP_STATE_ERROR : backupdb.BACKUP_STATE_NORMAL;
backupdb.update(backupId, { preserveSecs: options.preserveSecs || 0, state }, function (error) {
backupdb.update(backupId, { preserveSecs: options.preserveSecs || 0, state: state }, function (error) {
if (copyBackupError) return callback(copyBackupError);
if (error) return callback(error);
@@ -1088,6 +1104,8 @@ function uploadAppSnapshot(backupConfig, app, progressCallback, callback) {
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
if (!canBackupApp(app)) return callback(); // nothing to do
var startTime = new Date();
snapshotApp(app, progressCallback, function (error) {
@@ -1099,8 +1117,6 @@ function uploadAppSnapshot(backupConfig, app, progressCallback, callback) {
const dataLayout = new DataLayout(appDataDir, app.dataDir ? [{ localDir: app.dataDir, remoteDir: 'data' }] : []);
progressCallback({ message: `Uploading app snapshot ${app.fqdn}`});
const uploadConfig = {
backupId,
format: backupConfig.format,
@@ -1125,16 +1141,7 @@ function backupAppWithTag(app, tag, options, progressCallback, callback) {
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
if (!canBackupApp(app)) { // if we cannot backup, reuse it's most recent backup
getByIdentifierAndStatePaged(app.id, exports.BACKUP_STATE_NORMAL, 1, 1, function (error, results) {
if (error) return callback(error);
if (results.length === 0) return callback(null, null); // no backup to re-use
callback(null, results[0].id);
});
return;
}
if (!canBackupApp(app)) return callback(); // nothing to do
settings.getBackupConfig(function (error, backupConfig) {
if (error) return callback(error);
@@ -1183,7 +1190,7 @@ function backupBoxAndApps(progressCallback, callback) {
}
backupAppWithTag(app, tag, { /* options */ }, (progress) => progressCallback({ percent: percent, message: progress.message }), function (error, backupId) {
if (error) {
if (error && error.reason !== BoxError.BAD_STATE) {
debugApp(app, 'Unable to backup', error);
return iteratorCallback(error);
}
@@ -1232,7 +1239,7 @@ function ensureBackup(auditSource, callback) {
debug('ensureBackup: %j', auditSource);
getByIdentifierAndStatePaged(exports.BACKUP_IDENTIFIER_BOX, exports.BACKUP_STATE_NORMAL, 1, 1, function (error, backups) {
getByStatePaged(backupdb.BACKUP_STATE_NORMAL, 1, 1, function (error, backups) {
if (error) {
debug('Unable to list backups', error);
return callback(error);
@@ -1252,22 +1259,16 @@ function ensureBackup(auditSource, callback) {
}
// backups must be descending in creationTime
function applyBackupRetentionPolicy(backups, policy, referencedBackupIds) {
function applyBackupRetentionPolicy(backups, policy) {
assert(Array.isArray(backups));
assert.strictEqual(typeof policy, 'object');
assert(Array.isArray(referencedBackupIds));
const now = new Date();
for (const backup of backups) {
if (backup.state === exports.BACKUP_STATE_ERROR) {
backup.discardReason = 'error';
} else if (backup.state === exports.BACKUP_STATE_CREATING) {
if ((now - backup.creationTime) < 48*60*60*1000) backup.keepReason = 'creating';
else backup.discardReason = 'creating-too-long';
} else if (referencedBackupIds.includes(backup.id)) {
backup.keepReason = 'reference';
} else if ((now - backup.creationTime) < (backup.preserveSecs * 1000)) {
if (backup.keepReason) continue; // already kept for some other reason
if ((now - backup.creationTime) < (backup.preserveSecs * 1000)) {
backup.keepReason = 'preserveSecs';
} else if ((now - backup.creationTime < policy.keepWithinSecs * 1000) || policy.keepWithinSecs < 0) {
backup.keepReason = 'keepWithinSecs';
@@ -1289,7 +1290,7 @@ function applyBackupRetentionPolicy(backups, policy, referencedBackupIds) {
let lastPeriod = null, keptSoFar = 0;
for (const backup of backups) {
if (backup.discardReason || backup.keepReason) continue; // already kept or discarded for some reason
if (backup.keepReason) continue; // already kept for some other reason
const period = moment(backup.creationTime).format(KEEP_FORMATS[format]);
if (period === lastPeriod) continue; // already kept for this period
@@ -1299,13 +1300,8 @@ function applyBackupRetentionPolicy(backups, policy, referencedBackupIds) {
}
}
if (policy.keepLatest) {
let latestNormalBackup = backups.find(b => b.state === exports.BACKUP_STATE_NORMAL);
if (latestNormalBackup && !latestNormalBackup.keepReason) latestNormalBackup.keepReason = 'latest';
}
for (const backup of backups) {
debug(`applyBackupRetentionPolicy: ${backup.id} ${backup.type} ${backup.keepReason || backup.discardReason || 'unprocessed'}`);
if (backup.keepReason) debug(`applyBackupRetentionPolicy: ${backup.id} ${backup.type} ${backup.keepReason}`);
}
}
@@ -1354,52 +1350,60 @@ function cleanupAppBackups(backupConfig, referencedAppBackupIds, progressCallbac
let removedAppBackupIds = [];
apps.getAll(function (error, allApps) {
// we clean app backups of any state because the ones to keep are determined by the box cleanup code
backupdb.getByTypePaged(backupdb.BACKUP_TYPE_APP, 1, 1000, function (error, appBackups) {
if (error) return callback(error);
const allAppIds = allApps.map(a => a.id);
for (const appBackup of appBackups) { // set the reason so that policy filter can skip it
if (referencedAppBackupIds.includes(appBackup.id)) appBackup.keepReason = 'reference';
}
backupdb.getByTypePaged(exports.BACKUP_TYPE_APP, 1, 1000, function (error, appBackups) {
if (error) return callback(error);
applyBackupRetentionPolicy(appBackups, backupConfig.retentionPolicy);
// collate the backups by app id. note that the app could already have been uninstalled
let appBackupsById = {};
for (const appBackup of appBackups) {
if (!appBackupsById[appBackup.identifier]) appBackupsById[appBackup.identifier] = [];
appBackupsById[appBackup.identifier].push(appBackup);
}
async.eachSeries(appBackups, function iterator(appBackup, iteratorDone) {
if (appBackup.keepReason) return iteratorDone();
// apply backup policy per app. keep latest backup only for existing apps
let appBackupsToRemove = [];
for (const appId of Object.keys(appBackupsById)) {
applyBackupRetentionPolicy(appBackupsById[appId], _.extend({ keepLatest: allAppIds.includes(appId) }, backupConfig.retentionPolicy), referencedAppBackupIds);
appBackupsToRemove = appBackupsToRemove.concat(appBackupsById[appId].filter(b => !b.keepReason));
}
progressCallback({ message: `Removing app backup ${appBackup.id}`});
async.eachSeries(appBackupsToRemove, function iterator(appBackup, iteratorDone) {
progressCallback({ message: `Removing app backup (${appBackup.identifier}): ${appBackup.id}`});
removedAppBackupIds.push(appBackup.id);
cleanupBackup(backupConfig, appBackup, progressCallback, iteratorDone);
}, function () {
debug('cleanupAppBackups: done');
removedAppBackupIds.push(appBackup.id);
cleanupBackup(backupConfig, appBackup, progressCallback, iteratorDone);
}, function () {
debug('cleanupAppBackups: done');
callback(null, removedAppBackupIds);
});
callback(null, removedAppBackupIds);
});
});
}
function cleanupBoxBackups(backupConfig, progressCallback, callback) {
function cleanupBoxBackups(backupConfig, progressCallback, auditSource, callback) {
assert.strictEqual(typeof backupConfig, 'object');
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof auditSource, 'object');
assert.strictEqual(typeof callback, 'function');
let referencedAppBackupIds = [], removedBoxBackupIds = [];
backupdb.getByTypePaged(exports.BACKUP_TYPE_BOX, 1, 1000, function (error, boxBackups) {
backupdb.getByTypePaged(backupdb.BACKUP_TYPE_BOX, 1, 1000, function (error, boxBackups) {
if (error) return callback(error);
applyBackupRetentionPolicy(boxBackups, _.extend({ keepLatest: true }, backupConfig.retentionPolicy), [] /* references */);
if (boxBackups.length === 0) return callback(null, { removedBoxBackupIds, referencedAppBackupIds });
// search for the first valid backup
var i;
for (i = 0; i < boxBackups.length; i++) {
if (boxBackups[i].state === backupdb.BACKUP_STATE_NORMAL) break;
}
// keep the first valid backup
if (i !== boxBackups.length) {
debug('cleanupBoxBackups: preserving box backup %s (%j)', boxBackups[i].id, boxBackups[i].dependsOn);
referencedAppBackupIds = boxBackups[i].dependsOn;
boxBackups.splice(i, 1);
} else {
debug('cleanupBoxBackups: no box backup to preserve');
}
applyBackupRetentionPolicy(boxBackups, backupConfig.retentionPolicy);
async.eachSeries(boxBackups, function iterator(boxBackup, iteratorNext) {
if (boxBackup.keepReason) {
@@ -1468,7 +1472,8 @@ function cleanupSnapshots(backupConfig, callback) {
});
}
function cleanup(progressCallback, callback) {
function cleanup(auditSource, progressCallback, callback) {
assert.strictEqual(typeof auditSource, 'object');
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
@@ -1482,7 +1487,7 @@ function cleanup(progressCallback, callback) {
progressCallback({ percent: 10, message: 'Cleaning box backups' });
cleanupBoxBackups(backupConfig, progressCallback, function (error, { removedBoxBackupIds, referencedAppBackupIds }) {
cleanupBoxBackups(backupConfig, progressCallback, auditSource, function (error, { removedBoxBackupIds, referencedAppBackupIds }) {
if (error) return callback(error);
progressCallback({ percent: 40, message: 'Cleaning app backups' });
@@ -1504,7 +1509,7 @@ function cleanup(progressCallback, callback) {
function startCleanupTask(auditSource, callback) {
tasks.add(tasks.TASK_CLEAN_BACKUPS, [], function (error, taskId) {
tasks.add(tasks.TASK_CLEAN_BACKUPS, [ auditSource ], function (error, taskId) {
if (error) return callback(error);
tasks.startTask(taskId, {}, (error, result) => { // result is { removedBoxBackups, removedAppBackups }

View File

@@ -139,11 +139,10 @@ function getConfig(callback) {
mailFqdn: settings.mailFqdn(),
version: constants.VERSION,
isDemo: settings.isDemo(),
provider: settings.provider(),
cloudronName: allSettings[settings.CLOUDRON_NAME_KEY],
footer: allSettings[settings.FOOTER_KEY] || constants.FOOTER,
features: appstore.getFeatures(),
profileLocked: allSettings[settings.DIRECTORY_CONFIG_KEY].lockUserProfiles,
mandatory2FA: allSettings[settings.DIRECTORY_CONFIG_KEY].mandatory2FA
features: appstore.getFeatures()
});
});
}

View File

@@ -1,27 +1,16 @@
'use strict';
// IMPORTANT: These patterns are together because they spin tasks which acquire a lock
// If the patterns overlap all the time, then the task may not ever get a chance to run!
// If you change this change dashboard patterns in settings.html
const DEFAULT_CLEANUP_BACKUPS_PATTERN = '00 30 1,3,5,23 * * *',
DEFAULT_BOX_ENSURE_BACKUP_PATTERN_LT_6HOURS = '00 45 1,7,13,19 * * *',
DEFAULT_BOX_ENSURE_BACKUP_PATTERN_GT_6HOURS = '00 45 1,3,5,23 * * *',
DEFAULT_BOX_AUTOUPDATE_PATTERN = '00 00 1,3,5,23 * * *',
DEFAULT_APP_AUTOUPDATE_PATTERN = '00 15 1,3,5,23 * * *';
exports = module.exports = {
startJobs,
startJobs: startJobs,
stopJobs,
stopJobs: stopJobs,
handleSettingsChanged,
DEFAULT_BOX_AUTOUPDATE_PATTERN,
DEFAULT_APP_AUTOUPDATE_PATTERN
handleSettingsChanged: handleSettingsChanged
};
var appHealthMonitor = require('./apphealthmonitor.js'),
apps = require('./apps.js'),
appstore = require('./appstore.js'),
assert = require('assert'),
async = require('async'),
auditSource = require('./auditsource.js'),
@@ -40,6 +29,7 @@ var appHealthMonitor = require('./apphealthmonitor.js'),
updateChecker = require('./updatechecker.js');
var gJobs = {
alive: null, // send periodic stats
appAutoUpdater: null,
boxAutoUpdater: null,
appUpdateChecker: null,
@@ -71,6 +61,12 @@ function startJobs(callback) {
assert.strictEqual(typeof callback, 'function');
const randomMinute = Math.floor(60*Math.random());
gJobs.alive = new CronJob({
cronTime: '00 ' + randomMinute + ' * * * *', // every hour on a random minute
onTick: appstore.sendAliveStatus,
start: true
});
gJobs.systemChecks = new CronJob({
cronTime: '00 30 * * * *', // every 30 minutes. if you change this interval, change the notification messages with correct duration
onTick: () => cloudron.runSystemChecks(NOOP_CALLBACK),
@@ -84,13 +80,13 @@ function startJobs(callback) {
});
gJobs.boxUpdateCheckerJob = new CronJob({
cronTime: '00 ' + randomMinute + ' 1,3,5,21,23 * * *', // 5 times
cronTime: '00 ' + randomMinute + ' 23 * * *', // once an day
onTick: () => updateChecker.checkBoxUpdates({ automatic: true }, NOOP_CALLBACK),
start: true
});
gJobs.appUpdateChecker = new CronJob({
cronTime: '00 ' + randomMinute + ' 2,4,6,20,22 * * *', // 5 times
cronTime: '00 ' + randomMinute + ' 22 * * *', // once an day
onTick: () => updateChecker.checkAppUpdates({ automatic: true }, NOOP_CALLBACK),
start: true
});
@@ -102,7 +98,7 @@ function startJobs(callback) {
});
gJobs.cleanupBackups = new CronJob({
cronTime: DEFAULT_CLEANUP_BACKUPS_PATTERN,
cronTime: '00 45 1,3,5,23 * * *', // every 6 hours. try not to overlap with ensureBackup job
onTick: backups.startCleanupTask.bind(null, auditSource.CRON, NOOP_CALLBACK),
start: true
});
@@ -181,9 +177,9 @@ function backupConfigChanged(value, tz) {
if (gJobs.backup) gJobs.backup.stop();
let pattern;
if (value.intervalSecs <= 6 * 60 * 60) {
pattern = DEFAULT_BOX_ENSURE_BACKUP_PATTERN_LT_6HOURS; // no option but to backup in the middle of the day
pattern = '00 00 1,7,13,19 * * *'; // no option but to backup in the middle of the day
} else {
pattern = DEFAULT_BOX_ENSURE_BACKUP_PATTERN_GT_6HOURS; // avoid middle of the day backups. it's 45 to not overlap auto-updates
pattern = '00 00 1,3,5,23 * * *'; // avoid middle of the day backups
}
gJobs.backup = new CronJob({

View File

@@ -17,12 +17,12 @@ var assert = require('assert'),
BoxError = require('./boxerror.js'),
child_process = require('child_process'),
constants = require('./constants.js'),
debug = require('debug')('box:database'),
mysql = require('mysql'),
once = require('once'),
util = require('util');
var gConnectionPool = null;
var gConnectionPool = null,
gDefaultConnection = null;
const gDatabase = {
hostname: '127.0.0.1',
@@ -42,37 +42,59 @@ function initialize(callback) {
gDatabase.hostname = require('child_process').execSync('docker inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" mysql-server').toString().trim();
}
// https://github.com/mysqljs/mysql#pool-options
gConnectionPool = mysql.createPool({
connectionLimit: 5,
connectionLimit: 5, // this has to be > 1 since we store one connection as 'default'. the rest for transactions
host: gDatabase.hostname,
user: gDatabase.username,
password: gDatabase.password,
port: gDatabase.port,
database: gDatabase.name,
multipleStatements: false,
waitForConnections: true, // getConnection() will wait until a connection is avaiable
ssl: false,
timezone: 'Z' // mysql follows the SYSTEM timezone. on Cloudron, this is UTC
});
gConnectionPool.on('connection', function (connection) {
// connection objects are re-used. so we have to attach to the event here (once) to prevent crash
// note the pool also has an 'acquire' event but that is called whenever we do a getConnection()
connection.on('error', (error) => debug(`Connection ${connection.threadId} error: ${error.message} ${error.code}`));
connection.query('USE ' + gDatabase.name);
connection.query('SET SESSION sql_mode = \'strict_all_tables\'');
});
callback(null);
reconnect(callback);
}
function uninitialize(callback) {
if (!gConnectionPool) return callback(null);
if (gConnectionPool) {
gConnectionPool.end(callback);
gConnectionPool = null;
} else {
callback(null);
}
}
gConnectionPool.end(callback);
gConnectionPool = null;
function reconnect(callback) {
callback = callback ? once(callback) : function () {};
gConnectionPool.getConnection(function (error, connection) {
if (error) {
console.error('Unable to reestablish connection to database. Try again in a bit.', error.message);
return setTimeout(reconnect.bind(null, callback), 1000);
}
connection.on('error', function (error) {
// by design, we catch all normal errors by providing callbacks.
// this function should be invoked only when we have no callbacks pending and we have a fatal error
assert(error.fatal, 'Non-fatal error on connection object');
console.error('Unhandled mysql connection error.', error);
// This is most likely an issue an can cause double callbacks from reconnect()
setTimeout(reconnect.bind(null, callback), 1000);
});
gDefaultConnection = connection;
callback(null);
});
}
function clear(callback) {
@@ -85,43 +107,80 @@ function clear(callback) {
child_process.exec(cmd, callback);
}
function query() {
const args = Array.prototype.slice.call(arguments);
const callback = args[args.length - 1];
function beginTransaction(callback) {
assert.strictEqual(typeof callback, 'function');
if (constants.TEST && !gConnectionPool) return callback(new BoxError(BoxError.DATABASE_ERROR, 'database.js not initialized'));
if (gConnectionPool === null) return callback(new BoxError(BoxError.DATABASE_ERROR, 'No database connection pool.'));
gConnectionPool.query.apply(gConnectionPool, args); // this is same as getConnection/query/release
gConnectionPool.getConnection(function (error, connection) {
if (error) {
console.error('Unable to get connection to database. Try again in a bit.', error.message);
return setTimeout(beginTransaction.bind(null, callback), 1000);
}
connection.beginTransaction(function (error) {
if (error) return callback(error);
return callback(null, connection);
});
});
}
function rollback(connection, callback) {
assert.strictEqual(typeof callback, 'function');
connection.rollback(function (error) {
if (error) console.error(error); // can this happen?
connection.release();
callback(null);
});
}
// FIXME: if commit fails, is it supposed to return an error ?
function commit(connection, callback) {
assert.strictEqual(typeof callback, 'function');
connection.commit(function (error) {
if (error) return rollback(connection, callback);
connection.release();
return callback(null);
});
}
function query() {
var args = Array.prototype.slice.call(arguments);
var callback = args[args.length - 1];
assert.strictEqual(typeof callback, 'function');
if (gDefaultConnection === null) return callback(new BoxError(BoxError.DATABASE_ERROR, 'No connection to database'));
args[args.length -1 ] = function (error, result) {
if (error && error.fatal) {
gDefaultConnection = null;
setTimeout(reconnect, 1000);
}
callback(error, result);
};
gDefaultConnection.query.apply(gDefaultConnection, args);
}
function transaction(queries, callback) {
assert(util.isArray(queries));
assert.strictEqual(typeof callback, 'function');
callback = once(callback);
gConnectionPool.getConnection(function (error, connection) {
beginTransaction(function (error, conn) {
if (error) return callback(error);
const releaseConnection = (error) => { connection.release(); callback(error); };
async.mapSeries(queries, function iterator(query, done) {
conn.query(query.query, query.args, done);
}, function seriesDone(error, results) {
if (error) return rollback(conn, callback.bind(null, error));
connection.beginTransaction(function (error) {
if (error) return releaseConnection(error);
async.mapSeries(queries, function iterator(query, done) {
connection.query(query.query, query.args, done);
}, function seriesDone(error, results) {
if (error) return connection.rollback(() => releaseConnection(error));
connection.commit(function (error) {
if (error) return connection.rollback(() => releaseConnection(error));
connection.release();
callback(null, results);
});
});
commit(conn, callback.bind(null, null, results));
});
});
}
@@ -144,7 +203,7 @@ function exportToFile(file, callback) {
// latest mysqldump enables column stats by default which is not present in MySQL 5.7 server
// this option must not be set in production cloudrons which still use the old mysqldump
const disableColStats = (constants.TEST && require('fs').readFileSync('/etc/lsb-release', 'utf-8').includes('20.04')) ? '--column-statistics=0' : '';
const disableColStats = (constants.TEST && process.env.DESKTOP_SESSION !== 'ubuntu') ? '--column-statistics=0' : '';
var cmd = `/usr/bin/mysqldump -h "${gDatabase.hostname}" -u root -p${gDatabase.password} ${disableColStats} --single-transaction --routines --triggers ${gDatabase.name} > "${file}"`;

View File

@@ -306,8 +306,7 @@ function createSubcontainer(app, name, cmd, options, callback) {
Dns: ['172.18.0.1'], // use internal dns
DnsSearch: ['.'], // use internal dns
SecurityOpt: [ 'apparmor=docker-cloudron-app' ],
CapAdd: [],
CapDrop: []
CapDrop: [ 'NET_RAW' ] // https://docs-stage.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities
},
NetworkingConfig: {
EndpointsConfig: {
@@ -319,11 +318,11 @@ function createSubcontainer(app, name, cmd, options, callback) {
};
var capabilities = manifest.capabilities || [];
// https://docs-stage.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities
if (capabilities.includes('net_admin')) containerOptions.HostConfig.CapAdd.push('NET_ADMIN', 'NET_RAW');
if (capabilities.includes('mlock')) containerOptions.HostConfig.CapAdd.push('IPC_LOCK'); // mlock prevents swapping
if (!capabilities.includes('ping')) containerOptions.HostConfig.CapDrop.push('NET_RAW'); // NET_RAW is included by default by Docker
if (capabilities.includes('net_admin')) {
containerOptions.HostConfig.CapAdd = [
'NET_ADMIN', 'NET_RAW'
];
}
containerOptions = _.extend(containerOptions, options);

View File

@@ -66,7 +66,7 @@ function add(name, data, callback) {
];
database.transaction(queries, function (error) {
if (error && error.code === 'ER_DUP_ENTRY') return callback(new BoxError(BoxError.ALREADY_EXISTS, 'Domain already exists'));
if (error && error.code === 'ER_DUP_ENTRY') return callback(new BoxError(BoxError.ALREADY_EXISTS, error));
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
callback(null);

View File

@@ -20,9 +20,7 @@ var assert = require('assert'),
BoxError = require('./boxerror.js'),
constants = require('./constants.js'),
debug = require('debug')('box:externalldap'),
groups = require('./groups.js'),
ldap = require('ldapjs'),
once = require('once'),
settings = require('./settings.js'),
tasks = require('./tasks.js'),
users = require('./users.js');
@@ -42,14 +40,14 @@ function translateUser(ldapConfig, ldapUser) {
return {
username: ldapUser[ldapConfig.usernameField],
email: ldapUser.mail || ldapUser.mailPrimaryAddress,
email: ldapUser.mail,
displayName: ldapUser.cn // user.giveName + ' ' + user.sn
};
}
function validUserRequirements(user) {
if (!user.username || !user.email || !user.displayName) {
debug(`[Invalid LDAP user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
debug(`[LDAP user empty username/email/displayName] username=${user.username} email=${user.email} displayName=${user.displayName}`);
return false;
} else {
return true;
@@ -57,92 +55,40 @@ function validUserRequirements(user) {
}
// performs service bind if required
function getClient(externalLdapConfig, doBindAuth, callback) {
function getClient(externalLdapConfig, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof doBindAuth, 'boolean');
assert.strictEqual(typeof callback, 'function');
// ensure we only callback once since we also have to listen to client.error events
callback = once(callback);
// basic validation to not crash
try { ldap.parseDN(externalLdapConfig.baseDn); } catch (e) { return callback(new BoxError(BoxError.BAD_FIELD, 'invalid baseDn')); }
try { ldap.parseFilter(externalLdapConfig.filter); } catch (e) { return callback(new BoxError(BoxError.BAD_FIELD, 'invalid filter')); }
var config = {
url: externalLdapConfig.url,
tlsOptions: {
rejectUnauthorized: externalLdapConfig.acceptSelfSignedCerts ? false : true
}
};
var client;
try {
client = ldap.createClient(config);
client = ldap.createClient({ url: externalLdapConfig.url });
} catch (e) {
if (e instanceof ldap.ProtocolError) return callback(new BoxError(BoxError.BAD_FIELD, 'url protocol is invalid'));
return callback(new BoxError(BoxError.INTERNAL_ERROR, e));
}
// ensure we don't just crash
client.on('error', function (error) {
callback(new BoxError(BoxError.EXTERNAL_ERROR, error));
});
// skip bind auth if none exist or if not wanted
if (!externalLdapConfig.bindDn || !doBindAuth) return callback(null, client);
if (!externalLdapConfig.bindDn) return callback(null, client);
client.bind(externalLdapConfig.bindDn, externalLdapConfig.bindPassword, function (error) {
if (error instanceof ldap.InvalidCredentialsError) return callback(new BoxError(BoxError.INVALID_CREDENTIALS));
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error));
callback(null, client);
callback(null, client, externalLdapConfig);
});
}
function ldapGetByDN(externalLdapConfig, dn, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof dn, 'string');
assert.strictEqual(typeof callback, 'function');
getClient(externalLdapConfig, true, function (error, client) {
if (error) return callback(error);
let searchOptions = {
paged: true,
scope: 'sub' // We may have to make this configurable
};
debug(`Get object at ${dn}`);
client.search(dn, searchOptions, function (error, result) {
if (error instanceof ldap.NoSuchObjectError) return callback(new BoxError(BoxError.NOT_FOUND));
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error));
let ldapObjects = [];
result.on('searchEntry', entry => ldapObjects.push(entry.object));
result.on('error', error => callback(new BoxError(BoxError.EXTERNAL_ERROR, error)));
result.on('end', function (result) {
client.unbind();
if (result.status !== 0) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Server returned status ' + result.status));
if (ldapObjects.length === 0) return callback(new BoxError(BoxError.NOT_FOUND));
callback(null, ldapObjects[0]);
});
});
});
}
// TODO support search by email
function ldapUserSearch(externalLdapConfig, options, callback) {
function ldapSearch(externalLdapConfig, options, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof callback, 'function');
getClient(externalLdapConfig, true, function (error, client) {
getClient(externalLdapConfig, function (error, client) {
if (error) return callback(error);
let searchOptions = {
@@ -178,48 +124,6 @@ function ldapUserSearch(externalLdapConfig, options, callback) {
});
}
function ldapGroupSearch(externalLdapConfig, options, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof options, 'object');
assert.strictEqual(typeof callback, 'function');
getClient(externalLdapConfig, true, function (error, client) {
if (error) return callback(error);
let searchOptions = {
paged: true,
scope: 'sub' // We may have to make this configurable
};
if (externalLdapConfig.groupFilter) searchOptions.filter = ldap.parseFilter(externalLdapConfig.groupFilter);
if (options.filter) { // https://github.com/ldapjs/node-ldapjs/blob/master/docs/filters.md
let extraFilter = ldap.parseFilter(options.filter);
searchOptions.filter = new ldap.AndFilter({ filters: [ extraFilter, searchOptions.filter ] });
}
debug(`Listing groups at ${externalLdapConfig.groupBaseDn} with filter ${searchOptions.filter.toString()}`);
client.search(externalLdapConfig.groupBaseDn, searchOptions, function (error, result) {
if (error instanceof ldap.NoSuchObjectError) return callback(new BoxError(BoxError.NOT_FOUND));
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error));
let ldapGroups = [];
result.on('searchEntry', entry => ldapGroups.push(entry.object));
result.on('error', error => callback(new BoxError(BoxError.EXTERNAL_ERROR, error)));
result.on('end', function (result) {
client.unbind();
if (result.status !== 0) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Server returned status ' + result.status));
callback(null, ldapGroups);
});
});
});
}
function testConfig(config, callback) {
assert.strictEqual(typeof config, 'object');
assert.strictEqual(typeof callback, 'function');
@@ -237,20 +141,7 @@ function testConfig(config, callback) {
if (!config.filter) return callback(new BoxError(BoxError.BAD_FIELD, 'filter must not be empty'));
try { ldap.parseFilter(config.filter); } catch (e) { return callback(new BoxError(BoxError.BAD_FIELD, 'invalid filter')); }
if ('syncGroups' in config && typeof config.syncGroups !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 'syncGroups must be a boolean'));
if ('acceptSelfSignedCerts' in config && typeof config.acceptSelfSignedCerts !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 'acceptSelfSignedCerts must be a boolean'));
if (config.syncGroups) {
if (!config.groupBaseDn) return callback(new BoxError(BoxError.BAD_FIELD, 'groupBaseDn must not be empty'));
try { ldap.parseDN(config.groupBaseDn); } catch (e) { return callback(new BoxError(BoxError.BAD_FIELD, 'invalid groupBaseDn')); }
if (!config.groupFilter) return callback(new BoxError(BoxError.BAD_FIELD, 'groupFilter must not be empty'));
try { ldap.parseFilter(config.groupFilter); } catch (e) { return callback(new BoxError(BoxError.BAD_FIELD, 'invalid groupFilter')); }
if (!config.groupnameField || typeof config.groupnameField !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'groupFilter must not be empty'));
}
getClient(config, true, function (error, client) {
getClient(config, function (error, client) {
if (error) return callback(error);
var opts = {
@@ -276,7 +167,7 @@ function search(identifier, callback) {
if (error) return callback(error);
if (externalLdapConfig.provider === 'noop') return callback(new BoxError(BoxError.BAD_STATE, 'not enabled'));
ldapUserSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${identifier}` }, function (error, ldapUsers) {
ldapSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${identifier}` }, function (error, ldapUsers) {
if (error) return callback(error);
// translate ldap properties to ours
@@ -297,7 +188,7 @@ function createAndVerifyUserIfNotExist(identifier, password, callback) {
if (externalLdapConfig.provider === 'noop') return callback(new BoxError(BoxError.BAD_STATE, 'not enabled'));
if (!externalLdapConfig.autoCreate) return callback(new BoxError(BoxError.BAD_STATE, 'auto create not enabled'));
ldapUserSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${identifier}` }, function (error, ldapUsers) {
ldapSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${identifier}` }, function (error, ldapUsers) {
if (error) return callback(error);
if (ldapUsers.length === 0) return callback(new BoxError(BoxError.NOT_FOUND));
if (ldapUsers.length > 1) return callback(new BoxError(BoxError.CONFLICT));
@@ -329,20 +220,17 @@ function verifyPassword(user, password, callback) {
if (error) return callback(error);
if (externalLdapConfig.provider === 'noop') return callback(new BoxError(BoxError.BAD_STATE, 'not enabled'));
ldapUserSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${user.username}` }, function (error, ldapUsers) {
ldapSearch(externalLdapConfig, { filter: `${externalLdapConfig.usernameField}=${user.username}` }, function (error, ldapUsers) {
if (error) return callback(error);
if (ldapUsers.length === 0) return callback(new BoxError(BoxError.NOT_FOUND));
if (ldapUsers.length > 1) return callback(new BoxError(BoxError.CONFLICT));
getClient(externalLdapConfig, false, function (error, client) {
if (error) return callback(error);
let client = ldap.createClient({ url: externalLdapConfig.url });
client.bind(ldapUsers[0].dn, password, function (error) {
if (error instanceof ldap.InvalidCredentialsError) return callback(new BoxError(BoxError.INVALID_CREDENTIALS));
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error));
client.bind(ldapUsers[0].dn, password, function (error) {
if (error instanceof ldap.InvalidCredentialsError) return callback(new BoxError(BoxError.INVALID_CREDENTIALS));
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error));
callback(null, translateUser(externalLdapConfig, ldapUsers[0]));
});
callback(null, translateUser(externalLdapConfig, ldapUsers[0]));
});
});
});
@@ -367,194 +255,6 @@ function startSyncer(callback) {
});
}
function syncUsers(externalLdapConfig, progressCallback, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
ldapUserSearch(externalLdapConfig, {}, function (error, ldapUsers) {
if (error) return callback(error);
debug(`Found ${ldapUsers.length} users`);
let percent = 10;
let step = 30/(ldapUsers.length+1); // ensure no divide by 0
// we ignore all errors here and just log them for now
async.eachSeries(ldapUsers, function (user, iteratorCallback) {
user = translateUser(externalLdapConfig, user);
if (!validUserRequirements(user)) return iteratorCallback();
percent += step;
progressCallback({ percent, message: `Syncing... ${user.username}` });
users.getByUsername(user.username, function (error, result) {
if (error && error.reason !== BoxError.NOT_FOUND) return iteratorCallback(error);
if (!result) {
debug(`[adding user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
users.create(user.username, null /* password */, user.email, user.displayName, { source: 'ldap' }, auditSource.EXTERNAL_LDAP_TASK, function (error) {
if (error) console.error('Failed to create user', user, error.message);
iteratorCallback();
});
} else if (result.source !== 'ldap') {
debug(`[conflicting user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
iteratorCallback();
} else if (result.email !== user.email || result.displayName !== user.displayName) {
debug(`[updating user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
users.update(result, { email: user.email, fallbackEmail: user.email, displayName: user.displayName }, auditSource.EXTERNAL_LDAP_TASK, function (error) {
if (error) debug('Failed to update user', user, error);
iteratorCallback();
});
} else {
// user known and up-to-date
debug(`[up-to-date user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
iteratorCallback();
}
});
}, callback);
});
}
function syncGroups(externalLdapConfig, progressCallback, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
if (!externalLdapConfig.syncGroups) {
debug('Group sync is disabled');
progressCallback({ percent: 70, message: 'Skipping group sync...' });
return callback(null, []);
}
ldapGroupSearch(externalLdapConfig, {}, function (error, ldapGroups) {
if (error) return callback(error);
debug(`Found ${ldapGroups.length} groups`);
let percent = 40;
let step = 30/(ldapGroups.length+1); // ensure no divide by 0
// we ignore all non internal errors here and just log them for now
async.eachSeries(ldapGroups, function (ldapGroup, iteratorCallback) {
var groupName = ldapGroup[externalLdapConfig.groupnameField];
if (!groupName) return iteratorCallback();
// some servers return empty array for unknown properties :-/
if (typeof groupName !== 'string') return iteratorCallback();
// groups are lowercase
groupName = groupName.toLowerCase();
percent += step;
progressCallback({ percent, message: `Syncing... ${groupName}` });
groups.getByName(groupName, function (error, result) {
if (error && error.reason !== BoxError.NOT_FOUND) return iteratorCallback(error);
if (!result) {
debug(`[adding group] groupname=${groupName}`);
groups.create(groupName, 'ldap', function (error) {
if (error) console.error('Failed to create group', groupName, error);
iteratorCallback();
});
} else {
debug(`[up-to-date group] groupname=${groupName}`);
iteratorCallback();
}
});
}, function (error) {
if (error) return callback(error);
debug('sync: ldap sync is done', error);
callback(error);
});
});
}
function syncGroupUsers(externalLdapConfig, progressCallback, callback) {
assert.strictEqual(typeof externalLdapConfig, 'object');
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
if (!externalLdapConfig.syncGroups) {
debug('Group users sync is disabled');
progressCallback({ percent: 99, message: 'Skipping group users sync...' });
return callback(null, []);
}
groups.getAll(function (error, result) {
if (error) return callback(error);
var ldapGroups = result.filter(function (g) { return g.source === 'ldap'; });
debug(`Found ${ldapGroups.length} groups to sync users`);
async.eachSeries(ldapGroups, function (group, iteratorCallback) {
debug(`Sync users for group ${group.name}`);
ldapGroupSearch(externalLdapConfig, {}, function (error, result) {
if (error) return callback(error);
if (!result || result.length === 0) {
console.error(`Unable to find group ${group.name} ignoring for now.`);
return callback();
}
// since our group names are lowercase we cannot use potentially case matching ldap filters
let found = result.find(function (r) {
if (!r[externalLdapConfig.groupnameField]) return false;
return r[externalLdapConfig.groupnameField].toLowerCase() === group.name;
});
if (!found) {
console.error(`Unable to find group ${group.name} ignoring for now.`);
return callback();
}
var ldapGroupMembers = found.member || found.uniqueMember || [];
debug(`Group ${group.name} has ${ldapGroupMembers.length} members.`);
async.eachSeries(ldapGroupMembers, function (memberDn, iteratorCallback) {
ldapGetByDN(externalLdapConfig, memberDn, function (error, result) {
if (error) {
console.error(`Failed to get ${memberDn}:`, error);
return iteratorCallback();
}
debug(`Found member object at ${memberDn} adding to group ${group.name}`);
const username = result[externalLdapConfig.usernameField];
if (!username) return iteratorCallback();
users.getByUsername(username, function (error, result) {
if (error) {
console.error(`Failed to get user by username ${username}`, error);
return iteratorCallback();
}
groups.addMember(group.id, result.id, function (error) {
if (error && error.reason !== BoxError.ALREADY_EXISTS) console.error('Failed to add member', error);
iteratorCallback();
});
});
});
}, function (error) {
if (error) console.error(error);
iteratorCallback();
});
});
}, callback);
});
}
function sync(progressCallback, callback) {
assert.strictEqual(typeof progressCallback, 'function');
assert.strictEqual(typeof callback, 'function');
@@ -565,18 +265,58 @@ function sync(progressCallback, callback) {
if (error) return callback(error);
if (externalLdapConfig.provider === 'noop') return callback(new BoxError(BoxError.BAD_STATE, 'not enabled'));
async.series([
syncUsers.bind(null, externalLdapConfig, progressCallback),
syncGroups.bind(null, externalLdapConfig, progressCallback),
syncGroupUsers.bind(null, externalLdapConfig, progressCallback)
], function (error) {
ldapSearch(externalLdapConfig, {}, function (error, ldapUsers) {
if (error) return callback(error);
progressCallback({ percent: 100, message: 'Done' });
debug(`Found ${ldapUsers.length} users`);
let percent = 10;
let step = 90/(ldapUsers.length+1); // ensure no divide by 0
debug('sync: ldap sync is done', error);
// we ignore all errors here and just log them for now
async.eachSeries(ldapUsers, function (user, iteratorCallback) {
user = translateUser(externalLdapConfig, user);
callback(error);
if (!validUserRequirements(user)) return iteratorCallback();
percent += step;
progressCallback({ percent, message: `Syncing... ${user.username}` });
users.getByUsername(user.username, function (error, result) {
if (error && error.reason !== BoxError.NOT_FOUND) {
debug(`Could not find user with username ${user.username}: ${error.message}`);
return iteratorCallback();
}
if (error) {
debug(`[adding user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
users.create(user.username, null /* password */, user.email, user.displayName, { source: 'ldap' }, auditSource.EXTERNAL_LDAP_TASK, function (error) {
if (error) console.error('Failed to create user', user, error);
iteratorCallback();
});
} else if (result.source !== 'ldap') {
debug(`[conflicting user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
iteratorCallback();
} else if (result.email !== user.email || result.displayName !== user.displayName) {
debug(`[updating user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
users.update(result, { email: user.email, fallbackEmail: user.email, displayName: user.displayName }, auditSource.EXTERNAL_LDAP_TASK, function (error) {
if (error) debug('Failed to update user', user, error);
iteratorCallback();
});
} else {
// user known and up-to-date
debug(`[up-to-date user] username=${user.username} email=${user.email} displayName=${user.displayName}`);
iteratorCallback();
}
});
}, function (error) {
debug('sync: ldap sync is done', error);
callback(error);
});
});
});
}

View File

@@ -26,7 +26,7 @@ function startGraphite(existingInfra, callback) {
--log-opt syslog-address=udp://127.0.0.1:2514 \
--log-opt syslog-format=rfc5424 \
--log-opt tag=graphite \
-m 150m \
-m 75m \
--memory-swap 150m \
--dns 172.18.0.1 \
--dns-search=. \

View File

@@ -2,7 +2,6 @@
exports = module.exports = {
get: get,
getByName: getByName,
getWithMembers: getWithMembers,
getAll: getAll,
getAllWithMembers: getAllWithMembers,
@@ -20,6 +19,8 @@ exports = module.exports = {
getMembership: getMembership,
setMembership: setMembership,
getGroups: getGroups,
_clear: clear
};
@@ -27,7 +28,7 @@ var assert = require('assert'),
BoxError = require('./boxerror.js'),
database = require('./database.js');
var GROUPS_FIELDS = [ 'id', 'name', 'source' ].join(',');
var GROUPS_FIELDS = [ 'id', 'name' ].join(',');
function get(groupId, callback) {
assert.strictEqual(typeof groupId, 'string');
@@ -41,18 +42,6 @@ function get(groupId, callback) {
});
}
function getByName(name, callback) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof callback, 'function');
database.query('SELECT ' + GROUPS_FIELDS + ' FROM userGroups WHERE name = ?', [ name ], function (error, result) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
if (result.length === 0) return callback(new BoxError(BoxError.NOT_FOUND, 'Group not found'));
callback(null, result[0]);
});
}
function getWithMembers(groupId, callback) {
assert.strictEqual(typeof groupId, 'string');
assert.strictEqual(typeof callback, 'function');
@@ -74,7 +63,7 @@ function getWithMembers(groupId, callback) {
function getAll(callback) {
assert.strictEqual(typeof callback, 'function');
database.query('SELECT ' + GROUPS_FIELDS + ' FROM userGroups ORDER BY name', function (error, results) {
database.query('SELECT ' + GROUPS_FIELDS + ' FROM userGroups', function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
callback(null, results);
@@ -94,13 +83,12 @@ function getAllWithMembers(callback) {
});
}
function add(id, name, source, callback) {
function add(id, name, callback) {
assert.strictEqual(typeof id, 'string');
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof source, 'string');
assert.strictEqual(typeof callback, 'function');
database.query('INSERT INTO userGroups (id, name, source) VALUES (?, ?, ?)', [ id, name, source ], function (error, result) {
database.query('INSERT INTO userGroups (id, name) VALUES (?, ?)', [ id, name ], function (error, result) {
if (error && error.code === 'ER_DUP_ENTRY') return callback(new BoxError(BoxError.ALREADY_EXISTS, error));
if (error || result.affectedRows !== 1) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
@@ -272,3 +260,15 @@ function isMember(groupId, userId, callback) {
callback(null, result.length !== 0);
});
}
function getGroups(userId, callback) {
assert.strictEqual(typeof userId, 'string');
assert.strictEqual(typeof callback, 'function');
database.query('SELECT ' + GROUPS_FIELDS + ' ' +
' FROM userGroups INNER JOIN groupMembers ON userGroups.id = groupMembers.groupId AND groupMembers.userId = ?', [ userId ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
callback(null, results);
});
}

View File

@@ -4,7 +4,6 @@ exports = module.exports = {
create: create,
remove: remove,
get: get,
getByName: getByName,
update: update,
getWithMembers: getWithMembers,
getAll: getAll,
@@ -16,6 +15,8 @@ exports = module.exports = {
removeMember: removeMember,
isMember: isMember,
getGroups: getGroups,
setMembership: setMembership,
getMembership: getMembership,
@@ -44,17 +45,8 @@ function validateGroupname(name) {
return null;
}
function validateGroupSource(source) {
assert.strictEqual(typeof source, 'string');
if (source !== '' && source !== 'ldap') return new BoxError(BoxError.BAD_FIELD, 'source must be "" or "ldap"', { field: source });
return null;
}
function create(name, source, callback) {
function create(name, callback) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof source, 'string');
assert.strictEqual(typeof callback, 'function');
// we store names in lowercase
@@ -63,11 +55,8 @@ function create(name, source, callback) {
var error = validateGroupname(name);
if (error) return callback(error);
error = validateGroupSource(source);
if (error) return callback(error);
var id = 'gid-' + uuid.v4();
groupdb.add(id, name, source, function (error) {
groupdb.add(id, name, function (error) {
if (error) return callback(error);
callback(null, { id: id, name: name });
@@ -96,17 +85,6 @@ function get(id, callback) {
});
}
function getByName(name, callback) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof callback, 'function');
groupdb.getByName(name, function (error, result) {
if (error) return callback(error);
return callback(null, result);
});
}
function getWithMembers(id, callback) {
assert.strictEqual(typeof id, 'string');
assert.strictEqual(typeof callback, 'function');
@@ -239,6 +217,17 @@ function update(groupId, data, callback) {
});
}
function getGroups(userId, callback) {
assert.strictEqual(typeof userId, 'string');
assert.strictEqual(typeof callback, 'function');
groupdb.getGroups(userId, function (error, results) {
if (error) return callback(error);
callback(null, results);
});
}
function count(callback) {
assert.strictEqual(typeof callback, 'function');

View File

@@ -16,12 +16,12 @@ exports = module.exports = {
// docker inspect --format='{{index .RepoDigests 0}}' $IMAGE to get the sha256
'images': {
'turn': { repo: 'cloudron/turn', tag: 'cloudron/turn:1.1.0@sha256:e1dd22aa6eef5beb7339834b200a8bb787ffc2264ce11139857a054108fefb4f' },
'mysql': { repo: 'cloudron/mysql', tag: 'cloudron/mysql:2.3.1@sha256:c1145d43c8a912fe6f5a5629a4052454a4aa6f23391c1efbffeec9d12d72a256' },
'postgresql': { repo: 'cloudron/postgresql', tag: 'cloudron/postgresql:2.3.0@sha256:4112cc31a09b465bdfbd715fedab4bc86898246b4615d789dfb1d2cb728e3872' },
'mysql': { repo: 'cloudron/mysql', tag: 'cloudron/mysql:2.3.0@sha256:0cc39004b80eb5ee570b9fd4f38859410a49692e93e0f0d8b104b0b524d7030a' },
'postgresql': { repo: 'cloudron/postgresql', tag: 'cloudron/postgresql:2.1.1@sha256:6ea1bcf9b655d628230acda01d46cf21883b112111d89583c94138646895c14c' },
'mongodb': { repo: 'cloudron/mongodb', tag: 'cloudron/mongodb:2.2.0@sha256:205486ff0f6bf6854610572df401cf3651bc62baf28fd26e9c5632497f10c2cb' },
'redis': { repo: 'cloudron/redis', tag: 'cloudron/redis:2.3.0@sha256:0e31ec817e235b1814c04af97b1e7cf0053384aca2569570ce92bef0d95e94d2' },
'mail': { repo: 'cloudron/mail', tag: 'cloudron/mail:2.9.4@sha256:0e169b97a0584a76197d2bbc039d8698bf93f815588b3b43c251bd83dd545465' },
'redis': { repo: 'cloudron/redis', tag: 'cloudron/redis:2.2.0@sha256:cfdcc1a54cf29818cac99eacedc2ecf04e44995be3d06beea11dcaa09d90ed8d' },
'mail': { repo: 'cloudron/mail', tag: 'cloudron/mail:2.9.3@sha256:2c062e8883cadae4b4c22553f9db141fc441d6c627aa85e2a5ad71e3fcbf2b42' },
'graphite': { repo: 'cloudron/graphite', tag: 'cloudron/graphite:2.3.0@sha256:b7bc1ca4f4d0603a01369a689129aa273a938ce195fe43d00d42f4f2d5212f50' },
'sftp': { repo: 'cloudron/sftp', tag: 'cloudron/sftp:2.0.1@sha256:1770cb4c6ba2d324f6ff0d7fd8daeb737064386fc86b8cef425ef48334a67b91' }
'sftp': { repo: 'cloudron/sftp', tag: 'cloudron/sftp:1.1.0@sha256:0c1fe4dd6121900624dcb383251ecb0084c3810e095064933de671409d8d6d7b' }
}
};

View File

@@ -346,7 +346,7 @@ function mailboxSearch(req, res, next) {
if (error) return callback(error);
aliases.forEach(function (a, idx) {
obj.attributes['mail' + idx] = `${a.name}@${a.domain}`;
obj.attributes['mail' + idx] = `${a}@${mailbox.domain}`;
});
// ensure all filter values are also lowercase

View File

@@ -1,53 +1,52 @@
'use strict';
exports = module.exports = {
getStatus,
checkConfiguration,
getStatus: getStatus,
checkConfiguration: checkConfiguration,
getDomains,
getDomains: getDomains,
getDomain,
clearDomains,
getDomain: getDomain,
clearDomains: clearDomains,
onDomainAdded,
onDomainRemoved,
onMailFqdnChanged,
onDomainAdded: onDomainAdded,
onDomainRemoved: onDomainRemoved,
removePrivateFields,
removePrivateFields: removePrivateFields,
setDnsRecords,
setDnsRecords: setDnsRecords,
onMailFqdnChanged: onMailFqdnChanged,
validateName,
validateName: validateName,
setMailFromValidation,
setCatchAllAddress,
setMailRelay,
setMailEnabled,
setMailFromValidation: setMailFromValidation,
setCatchAllAddress: setCatchAllAddress,
setMailRelay: setMailRelay,
setMailEnabled: setMailEnabled,
startMail: restartMail,
restartMail,
handleCertChanged,
getMailAuth,
restartMail: restartMail,
handleCertChanged: handleCertChanged,
getMailAuth: getMailAuth,
sendTestMail,
sendTestMail: sendTestMail,
getMailboxCount,
listMailboxes,
removeMailboxes,
getMailbox,
addMailbox,
updateMailboxOwner,
removeMailbox,
listMailboxes: listMailboxes,
removeMailboxes: removeMailboxes,
getMailbox: getMailbox,
addMailbox: addMailbox,
updateMailboxOwner: updateMailboxOwner,
removeMailbox: removeMailbox,
getAliases,
setAliases,
getAliases: getAliases,
setAliases: setAliases,
getLists,
getList,
addList,
updateList,
removeList,
resolveList,
getLists: getLists,
getList: getList,
addList: addList,
updateList: updateList,
removeList: removeList,
resolveList: resolveList,
_readDkimPublicKeySync: readDkimPublicKeySync
};
@@ -1033,25 +1032,13 @@ function sendTestMail(domain, to, callback) {
});
}
function listMailboxes(domain, search, page, perPage, callback) {
function listMailboxes(domain, page, perPage, callback) {
assert.strictEqual(typeof domain, 'string');
assert(typeof search === 'string' || search === null);
assert.strictEqual(typeof page, 'number');
assert.strictEqual(typeof perPage, 'number');
assert.strictEqual(typeof callback, 'function');
mailboxdb.listMailboxes(domain, search, page, perPage, function (error, result) {
if (error) return callback(error);
callback(null, result);
});
}
function getMailboxCount(domain, callback) {
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof callback, 'function');
mailboxdb.getMailboxCount(domain, function (error, result) {
mailboxdb.listMailboxes(domain, page, perPage, function (error, result) {
if (error) return callback(error);
callback(null, result);
@@ -1178,14 +1165,11 @@ function setAliases(name, domain, aliases, callback) {
});
}
function getLists(domain, search, page, perPage, callback) {
function getLists(domain, callback) {
assert.strictEqual(typeof domain, 'string');
assert(typeof search === 'string' || search === null);
assert.strictEqual(typeof page, 'number');
assert.strictEqual(typeof perPage, 'number');
assert.strictEqual(typeof callback, 'function');
mailboxdb.getLists(domain, search, page, perPage, function (error, result) {
mailboxdb.getLists(domain, function (error, result) {
if (error) return callback(error);
callback(null, result);

View File

@@ -6,7 +6,7 @@ Dear <%= cloudronName %> Admin,
If this message appears repeatedly, give the app more memory.
* To increase an app's memory limit - https://cloudron.io/documentation/apps/#memory-limit
* To increase an app's memory limit - https://cloudron.io/documentation/apps/#increasing-the-memory-limit-of-an-app
* To increase a service's memory limit - https://cloudron.io/documentation/troubleshooting/#services
Out of memory event:

View File

@@ -8,7 +8,7 @@ be reset. If you did not request this reset, please ignore this message.
To reset your password, please visit the following page:
<%- resetLink %>
Please note that the password reset link will expire in 24 hours.
Powered by https://cloudron.io
@@ -29,10 +29,6 @@ Powered by https://cloudron.io
<a href="<%= resetLink %>">Click to reset your password</a>
</p>
<br/>
Please note that the password reset link will expire in 24 hours.
<br/>
<br/>

View File

@@ -11,7 +11,6 @@ Follow the link to get started.
You are receiving this email because you were invited by <%= invitor.email %>.
<% } %>
Please note that the invite link will expire in 24 hours.
Powered by https://cloudron.io
@@ -37,9 +36,6 @@ Powered by https://cloudron.io
You are receiving this email because you were invited by <%= invitor.email %>.
<% } %>
<br/>
Please note that the invite link will expire in 24 hours.
<br/>
Powered by <a href="https://cloudron.io">Cloudron</a>

View File

@@ -1,32 +1,31 @@
'use strict';
exports = module.exports = {
addMailbox,
addList,
addMailbox: addMailbox,
addList: addList,
updateMailboxOwner,
updateList,
del,
updateMailboxOwner: updateMailboxOwner,
updateList: updateList,
del: del,
getMailboxCount,
listMailboxes,
getLists,
listMailboxes: listMailboxes,
getLists: getLists,
listAllMailboxes,
listAllMailboxes: listAllMailboxes,
get,
getMailbox,
getList,
getAlias,
get: get,
getMailbox: getMailbox,
getList: getList,
getAlias: getAlias,
getAliasesForName,
setAliasesForName,
getAliasesForName: getAliasesForName,
setAliasesForName: setAliasesForName,
getByOwnerId,
delByOwnerId,
delByDomain,
getByOwnerId: getByOwnerId,
delByOwnerId: delByOwnerId,
delByDomain: delByDomain,
updateName,
updateName: updateName,
_clear: clear,
@@ -38,7 +37,6 @@ exports = module.exports = {
var assert = require('assert'),
BoxError = require('./boxerror.js'),
database = require('./database.js'),
mysql = require('mysql'),
safe = require('safetydance'),
util = require('util');
@@ -205,44 +203,14 @@ function getMailbox(name, domain, callback) {
});
}
function getMailboxCount(domain, callback) {
function listMailboxes(domain, page, perPage, callback) {
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof callback, 'function');
database.query('SELECT COUNT(*) AS total FROM mailboxes WHERE type = ? AND domain = ?', [ exports.TYPE_MAILBOX, domain ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
callback(null, results[0].total);
});
}
function listMailboxes(domain, search, page, perPage, callback) {
assert.strictEqual(typeof domain, 'string');
assert(typeof search === 'string' || search === null);
assert.strictEqual(typeof page, 'number');
assert.strictEqual(typeof perPage, 'number');
assert.strictEqual(typeof callback, 'function');
let query = `SELECT ${MAILBOX_FIELDS} FROM mailboxes WHERE type = ? AND domain = ?`;
if (search) query += ' AND (name LIKE ' + mysql.escape('%' + search + '%') + ')';
query += 'ORDER BY name LIMIT ?,?';
database.query(query, [ exports.TYPE_MAILBOX, domain, (page-1)*perPage, perPage ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
results.forEach(function (result) { postProcess(result); });
callback(null, results);
});
}
function listAllMailboxes(page, perPage, callback) {
assert.strictEqual(typeof page, 'number');
assert.strictEqual(typeof perPage, 'number');
assert.strictEqual(typeof callback, 'function');
database.query(`SELECT ${MAILBOX_FIELDS} FROM mailboxes WHERE type = ? ORDER BY name LIMIT ?,?`,
[ exports.TYPE_MAILBOX, (page-1)*perPage, perPage ], function (error, results) {
database.query(`SELECT ${MAILBOX_FIELDS} FROM mailboxes WHERE type = ? AND domain = ? ORDER BY name LIMIT ${(page-1)*perPage},${perPage}`,
[ exports.TYPE_MAILBOX, domain ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
results.forEach(function (result) { postProcess(result); });
@@ -251,25 +219,33 @@ function listAllMailboxes(page, perPage, callback) {
});
}
function getLists(domain, search, page, perPage, callback) {
assert.strictEqual(typeof domain, 'string');
assert(typeof search === 'string' || search === null);
function listAllMailboxes(page, perPage, callback) {
assert.strictEqual(typeof page, 'number');
assert.strictEqual(typeof perPage, 'number');
assert.strictEqual(typeof callback, 'function');
let query = `SELECT ${MAILBOX_FIELDS} FROM mailboxes WHERE type = ? AND domain = ?`;
if (search) query += ' AND (name LIKE ' + mysql.escape('%' + search + '%') + ' OR membersJson LIKE ' + mysql.escape('%' + search + '%') + ')';
database.query(`SELECT ${MAILBOX_FIELDS} FROM mailboxes WHERE type = ? ORDER BY name LIMIT ${(page-1)*perPage},${perPage}`,
[ exports.TYPE_MAILBOX ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
query += 'ORDER BY name LIMIT ?,?';
results.forEach(function (result) { postProcess(result); });
database.query(query, [ exports.TYPE_LIST, domain, (page-1)*perPage, perPage ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
callback(null, results);
});
}
results.forEach(function (result) { postProcess(result); });
function getLists(domain, callback) {
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof callback, 'function');
callback(null, results);
});
database.query('SELECT ' + MAILBOX_FIELDS + ' FROM mailboxes WHERE type = ? AND domain = ?',
[ exports.TYPE_LIST, domain ], function (error, results) {
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
results.forEach(function (result) { postProcess(result); });
callback(null, results);
});
}
function getList(name, domain, callback) {

View File

@@ -7,7 +7,6 @@ map $http_upgrade $connection_upgrade {
# http server
server {
listen 80;
server_tokens off; # hide version
<% if (hasIPv6) { -%>
listen [::]:80;
<% } -%>
@@ -43,19 +42,18 @@ server {
server {
<% if (vhost) { -%>
server_name <%= vhost %>;
listen 443 ssl http2;
listen 443 http2;
<% if (hasIPv6) { -%>
listen [::]:443 ssl http2;
listen [::]:443 http2;
<% } -%>
<% } else { -%>
listen 443 ssl http2 default_server;
listen 443 http2 default_server;
<% if (hasIPv6) { -%>
listen [::]:443 ssl http2 default_server;
listen [::]:443 http2 default_server;
<% } -%>
<% } -%>
server_tokens off; # hide version
ssl on;
# paths are relative to prefix and not to this file
ssl_certificate <%= certFilePath %>;
ssl_certificate_key <%= keyFilePath %>;
@@ -195,11 +193,6 @@ server {
client_max_body_size 0;
}
location ~ ^/api/v1/apps/.*/files/ {
proxy_pass http://127.0.0.1:3000;
client_max_body_size 0;
}
# graphite paths (uncomment block below and visit /graphite-web/dashboard)
# remember to comment out the CSP policy as well to access the graphite dashboard
# location ~ ^/graphite-web/ {

View File

@@ -156,7 +156,7 @@ function oomEvent(eventId, app, addon, containerId, event, callback) {
if (app) {
program = `App ${app.fqdn}`;
title = `The application ${app.fqdn} (${app.manifest.title}) ran out of memory.`;
message = 'The application has been restarted automatically. If you see this notification often, consider increasing the [memory limit](https://cloudron.io/documentation/apps/#memory-limit)';
message = 'The application has been restarted automatically. If you see this notification often, consider increasing the [memory limit](https://cloudron.io/documentation/apps/#increasing-the-memory-limit-of-an-app)';
} else if (addon) {
program = `${addon.name} service`;
title = `The ${addon.name} service ran out of memory`;
@@ -273,7 +273,7 @@ function alert(id, title, message, callback) {
assert.strictEqual(typeof message, 'string');
assert.strictEqual(typeof callback, 'function');
debug(`alert: id=${id} title=${title}`);
debug(`alert: id=${id} title=${title} message=${message}`);
const acknowledged = !message;

View File

@@ -17,6 +17,7 @@ exports = module.exports = {
CLOUDRON_DEFAULT_AVATAR_FILE: path.join(__dirname + '/../assets/avatar.png'),
INFRA_VERSION_FILE: path.join(baseDir(), 'platformdata/INFRA_VERSION'),
LICENSE_FILE: '/etc/cloudron/LICENSE',
PROVIDER_FILE: '/etc/cloudron/PROVIDER',
PLATFORM_DATA_DIR: path.join(baseDir(), 'platformdata'),

View File

@@ -4,10 +4,13 @@ exports = module.exports = {
setup: setup,
restore: restore,
activate: activate,
getStatus: getStatus
getStatus: getStatus,
autoRegister: autoRegister
};
var assert = require('assert'),
var appstore = require('./appstore.js'),
assert = require('assert'),
async = require('async'),
backups = require('./backups.js'),
BoxError = require('./boxerror.js'),
@@ -16,7 +19,10 @@ var assert = require('assert'),
debug = require('debug')('box:provision'),
domains = require('./domains.js'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
mail = require('./mail.js'),
paths = require('./paths.js'),
safe = require('safetydance'),
semver = require('semver'),
settings = require('./settings.js'),
sysinfo = require('./sysinfo.js'),
@@ -47,6 +53,27 @@ function setProgress(task, message, callback) {
callback();
}
function autoRegister(domain, callback) {
assert.strictEqual(typeof domain, 'string');
assert.strictEqual(typeof callback, 'function');
if (!fs.existsSync(paths.LICENSE_FILE)) return callback();
const license = safe.fs.readFileSync(paths.LICENSE_FILE, 'utf8');
if (!license) return callback(new BoxError(BoxError.LICENSE_ERROR, 'Cannot read license'));
debug('Auto-registering cloudron');
appstore.registerWithLicense(license.trim(), domain, function (error) {
if (error && error.reason !== BoxError.CONFLICT) { // not already registered
debug('Failed to auto-register cloudron', error);
return callback(new BoxError(BoxError.LICENSE_ERROR, 'Failed to auto-register Cloudron with license. Please contact support@cloudron.io'));
}
callback();
});
}
function unprovision(callback) {
assert.strictEqual(typeof callback, 'function');
@@ -107,6 +134,7 @@ function setup(dnsConfig, sysinfoConfig, auditSource, callback) {
callback(); // now that args are validated run the task in the background
async.series([
autoRegister.bind(null, domain),
settings.setSysinfoConfig.bind(null, sysinfoConfig),
domains.prepareDashboardDomain.bind(null, domain, auditSource, (progress) => setProgress('setup', progress.message, NOOP_CALLBACK)),
cloudron.setDashboardDomain.bind(null, domain, auditSource),
@@ -226,11 +254,11 @@ function getStatus(callback) {
version: constants.VERSION,
apiServerOrigin: settings.apiServerOrigin(), // used by CaaS tool
webServerOrigin: settings.webServerOrigin(), // used by CaaS tool
provider: settings.provider(),
cloudronName: allSettings[settings.CLOUDRON_NAME_KEY],
footer: allSettings[settings.FOOTER_KEY] || constants.FOOTER,
adminFqdn: settings.adminDomain() ? settings.adminFqdn() : null,
activated: activated,
provider: settings.provider() // used by setup wizard of marketplace images
}, gProvisionStatus));
});
});

View File

@@ -7,6 +7,7 @@ exports = module.exports = {
};
let auditSource = require('../auditsource.js'),
backupdb = require('../backupdb.js'),
backups = require('../backups.js'),
BoxError = require('../boxerror.js'),
HttpError = require('connect-lastmile').HttpError,
@@ -19,7 +20,7 @@ function list(req, res, next) {
var perPage = typeof req.query.per_page !== 'undefined'? parseInt(req.query.per_page) : 25;
if (!perPage || perPage < 0) return next(new HttpError(400, 'per_page query param has to be a postive number'));
backups.getByIdentifierAndStatePaged(backups.BACKUP_IDENTIFIER_BOX, backups.BACKUP_STATE_NORMAL, page, perPage, function (error, result) {
backups.getByStatePaged(backupdb.BACKUP_STATE_NORMAL, page, perPage, function (error, result) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { backups: result }));

View File

@@ -86,8 +86,8 @@ function logout(req, res) {
function passwordResetRequest(req, res, next) {
if (!req.body.identifier || typeof req.body.identifier !== 'string') return next(new HttpError(401, 'A identifier must be non-empty string'));
users.sendPasswordResetByIdentifier(req.body.identifier, function (error) {
if (error && error.reason !== BoxError.NOT_FOUND) return next(BoxError.toHttpError(error));
users.resetPasswordByIdentifier(req.body.identifier, function (error) {
if (error && error.reason !== BoxError.NOT_FOUND) console.error(error);
next(new HttpSuccess(202, {}));
});
@@ -102,17 +102,16 @@ function passwordReset(req, res, next) {
users.getByResetToken(req.body.resetToken, function (error, userObject) {
if (error) return next(new HttpError(401, 'Invalid resetToken'));
// if you fix the duration here, the emails and UI have to be fixed as well
if (Date.now() - userObject.resetTokenCreationTime > 24 * 60 * 60 * 1000) return next(new HttpError(401, 'Token expired'));
if (!userObject.username) return next(new HttpError(409, 'No username set'));
// setPassword clears the resetToken
users.setPassword(userObject, req.body.password, function (error) {
if (error && error.reason === BoxError.BAD_FIELD) return next(new HttpError(400, error.message));
if (error) return next(BoxError.toHttpError(error));
if (error) return next(new HttpError(500, error));
tokens.add(tokens.ID_WEBADMIN, userObject.id, Date.now() + constants.DEFAULT_TOKEN_EXPIRATION, {}, function (error, result) {
if (error) return next(BoxError.toHttpError(error));
if (error) return next(new HttpError(500, error));
next(new HttpSuccess(202, { accessToken: result.accessToken }));
});
@@ -123,23 +122,37 @@ function passwordReset(req, res, next) {
function setupAccount(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (!req.body.email || typeof req.body.email !== 'string') return next(new HttpError(400, 'email must be a non-empty string'));
if (!req.body.resetToken || typeof req.body.resetToken !== 'string') return next(new HttpError(400, 'resetToken must be a non-empty string'));
if (!req.body.password || typeof req.body.password !== 'string') return next(new HttpError(400, 'password must be a non-empty string'));
// only sent if profile is not locked
if ('username' in req.body && typeof req.body.username !== 'string') return next(new HttpError(400, 'username must be a non-empty string'));
if ('displayName' in req.body && typeof req.body.displayName !== 'string') return next(new HttpError(400, 'displayName must be a non-empty string'));
if (!req.body.username || typeof req.body.username !== 'string') return next(new HttpError(400, 'username must be a non-empty string'));
if (!req.body.displayName || typeof req.body.displayName !== 'string') return next(new HttpError(400, 'displayName must be a non-empty string'));
users.getByResetToken(req.body.resetToken, function (error, userObject) {
if (error) return next(new HttpError(401, 'Invalid Reset Token'));
// if you fix the duration here, the emails and UI have to be fixed as well
if (Date.now() - userObject.resetTokenCreationTime > 24 * 60 * 60 * 1000) return next(new HttpError(401, 'Token expired'));
users.setupAccount(userObject, req.body, auditSource.fromRequest(req), function (error, accessToken) {
if (error) return next(BoxError.toHttpError(error));
users.update(userObject, { username: req.body.username, displayName: req.body.displayName }, auditSource.fromRequest(req), function (error) {
if (error && error.reason === BoxError.ALREADY_EXISTS) return next(new HttpError(409, 'Username already used'));
if (error && error.reason === BoxError.BAD_FIELD) return next(new HttpError(400, error.message));
if (error && error.reason === BoxError.NOT_FOUND) return next(new HttpError(404, 'No such user'));
if (error) return next(new HttpError(500, error));
next(new HttpSuccess(201, { accessToken }));
userObject.username = req.body.username;
userObject.displayName = req.body.displayName;
// setPassword clears the resetToken
users.setPassword(userObject, req.body.password, function (error) {
if (error && error.reason === BoxError.BAD_FIELD) return next(new HttpError(400, error.message));
if (error) return next(new HttpError(500, error));
tokens.add(tokens.ID_WEBADMIN, userObject.id, Date.now() + constants.DEFAULT_TOKEN_EXPIRATION, {}, function (error, result) {
if (error) return next(new HttpError(500, error));
next(new HttpSuccess(201, { accessToken: result.accessToken }));
});
});
});
});
}

View File

@@ -1,43 +0,0 @@
'use strict';
exports = module.exports = {
proxy
};
var assert = require('assert'),
BoxError = require('../boxerror.js'),
docker = require('../docker.js'),
middleware = require('../middleware/index.js'),
HttpError = require('connect-lastmile').HttpError,
safe = require('safetydance'),
url = require('url');
function proxy(req, res, next) {
assert.strictEqual(typeof req.params.id, 'string');
const appId = req.params.id;
req.clearTimeout();
docker.inspect('sftp', function (error, result) {
if (error)return next(BoxError.toHttpError(error));
const ip = safe.query(result, 'NetworkSettings.Networks.cloudron.IPAddress', null);
if (!ip) return next(new BoxError(BoxError.INACTIVE, 'Error getting IP of sftp service'));
req.url = req.originalUrl.replace(`/api/v1/apps/${appId}/files`, `/files/${appId}`);
const proxyOptions = url.parse(`https://${ip}:3000`);
proxyOptions.rejectUnauthorized = false;
const fileManagerProxy = middleware.proxy(proxyOptions);
fileManagerProxy(req, res, function (error) {
if (!error) return next();
if (error.code === 'ECONNREFUSED') return next(new HttpError(424, 'Unable to connect to filemanager server'));
if (error.code === 'ECONNRESET') return next(new HttpError(424, 'Unable to query filemanager server'));
next(new HttpError(500, error));
});
});
}

View File

@@ -20,9 +20,7 @@ function create(req, res, next) {
if (typeof req.body.name !== 'string') return next(new HttpError(400, 'name must be string'));
var source = ''; // means local
groups.create(req.body.name, source, function (error, group) {
groups.create(req.body.name, function (error, group) {
if (error) return next(BoxError.toHttpError(error));
var groupInfo = {

View File

@@ -10,7 +10,6 @@ exports = module.exports = {
cloudron: require('./cloudron.js'),
domains: require('./domains.js'),
eventlog: require('./eventlog.js'),
filemanager: require('./filemanager.js'),
graphs: require('./graphs.js'),
groups: require('./groups.js'),
mail: require('./mail.js'),

View File

@@ -1,35 +1,33 @@
'use strict';
exports = module.exports = {
getDomain,
getDomain: getDomain,
setDnsRecords,
setDnsRecords: setDnsRecords,
getStatus,
getStatus: getStatus,
setMailFromValidation,
setCatchAllAddress,
setMailRelay,
setMailEnabled,
setMailFromValidation: setMailFromValidation,
setCatchAllAddress: setCatchAllAddress,
setMailRelay: setMailRelay,
setMailEnabled: setMailEnabled,
sendTestMail,
sendTestMail: sendTestMail,
listMailboxes,
getMailbox,
addMailbox,
updateMailbox,
removeMailbox,
listMailboxes: listMailboxes,
getMailbox: getMailbox,
addMailbox: addMailbox,
updateMailbox: updateMailbox,
removeMailbox: removeMailbox,
getAliases,
setAliases,
getAliases: getAliases,
setAliases: setAliases,
getLists,
getList,
addList,
updateList,
removeList,
getMailboxCount
getLists: getLists,
getList: getList,
addList: addList,
updateList: updateList,
removeList: removeList,
};
var assert = require('assert'),
@@ -161,25 +159,13 @@ function listMailboxes(req, res, next) {
var perPage = typeof req.query.per_page !== 'undefined'? parseInt(req.query.per_page) : 25;
if (!perPage || perPage < 0) return next(new HttpError(400, 'per_page query param has to be a positive number'));
if (req.query.search && typeof req.query.search !== 'string') return next(new HttpError(400, 'search must be a string'));
mail.listMailboxes(req.params.domain, req.query.search || null, page, perPage, function (error, result) {
mail.listMailboxes(req.params.domain, page, perPage, function (error, result) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { mailboxes: result }));
});
}
function getMailboxCount(req, res, next) {
assert.strictEqual(typeof req.params.domain, 'string');
mail.getMailboxCount(req.params.domain, function (error, count) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { count }));
});
}
function getMailbox(req, res, next) {
assert.strictEqual(typeof req.params.domain, 'string');
assert.strictEqual(typeof req.params.name, 'string');
@@ -262,15 +248,7 @@ function setAliases(req, res, next) {
function getLists(req, res, next) {
assert.strictEqual(typeof req.params.domain, 'string');
const page = typeof req.query.page !== 'undefined' ? parseInt(req.query.page) : 1;
if (!page || page < 0) return next(new HttpError(400, 'page query param has to be a positive number'));
const perPage = typeof req.query.per_page !== 'undefined'? parseInt(req.query.per_page) : 25;
if (!perPage || perPage < 0) return next(new HttpError(400, 'per_page query param has to be a positive number'));
if (req.query.search && typeof req.query.search !== 'string') return next(new HttpError(400, 'search must be a string'));
mail.getLists(req.params.domain, req.query.search || null, page, perPage, function (error, result) {
mail.getLists(req.params.domain, function (error, result) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, { lists: result }));

View File

@@ -1,42 +1,35 @@
'use strict';
exports = module.exports = {
authorize,
get,
update,
getAvatar,
setAvatar,
clearAvatar,
changePassword,
setTwoFactorAuthenticationSecret,
enableTwoFactorAuthentication,
disableTwoFactorAuthentication,
get: get,
update: update,
getAvatar: getAvatar,
setAvatar: setAvatar,
clearAvatar: clearAvatar,
changePassword: changePassword,
setTwoFactorAuthenticationSecret: setTwoFactorAuthenticationSecret,
enableTwoFactorAuthentication: enableTwoFactorAuthentication,
disableTwoFactorAuthentication: disableTwoFactorAuthentication
};
var assert = require('assert'),
auditSource = require('../auditsource.js'),
BoxError = require('../boxerror.js'),
fs = require('fs'),
HttpError = require('connect-lastmile').HttpError,
HttpSuccess = require('connect-lastmile').HttpSuccess,
path = require('path'),
paths = require('../paths.js'),
safe = require('safetydance'),
users = require('../users.js'),
settings = require('../settings.js'),
_ = require('underscore');
function authorize(req, res, next) {
assert.strictEqual(typeof req.user, 'object');
settings.getDirectoryConfig(function (error, directoryConfig) {
if (error) return next(BoxError.toHttpError(error));
if (directoryConfig.lockUserProfiles) return next(new HttpError(403, 'admin has disallowed users from editing profiles'));
next();
});
}
function get(req, res, next) {
assert.strictEqual(typeof req.user, 'object');
const emailHash = require('crypto').createHash('md5').update(req.user.email).digest('hex');
next(new HttpSuccess(200, {
id: req.user.id,
username: req.user.username,
@@ -46,7 +39,7 @@ function get(req, res, next) {
twoFactorAuthenticationEnabled: req.user.twoFactorAuthenticationEnabled,
role: req.user.role,
source: req.user.source,
avatarUrl: users.getAvatarUrlSync(req.user)
avatarUrl: fs.existsSync(path.join(paths.PROFILE_ICONS_DIR, req.user.id)) ? `${settings.adminOrigin()}/api/v1/profile/avatar/${req.user.id}` : `https://www.gravatar.com/avatar/${emailHash}.jpg`
}));
}
@@ -72,27 +65,21 @@ function setAvatar(req, res, next) {
if (!req.files.avatar) return next(new HttpError(400, 'avatar is missing'));
users.setAvatar(req.user.id, req.files.avatar.path, function (error) {
if (error) return next(BoxError.toHttpError(error));
if (!safe.fs.renameSync(req.files.avatar.path, path.join(paths.PROFILE_ICONS_DIR, req.user.id))) return next(new HttpError(500, safe.error));
next(new HttpSuccess(202, {}));
});
next(new HttpSuccess(202, {}));
}
function clearAvatar(req, res, next) {
assert.strictEqual(typeof req.user, 'object');
users.clearAvatar(req.user.id, function (error) {
if (error) return next(BoxError.toHttpError(error));
safe.fs.unlinkSync(path.join(paths.PROFILE_ICONS_DIR, req.user.id));
next(new HttpSuccess(202, {}));
});
next(new HttpSuccess(202, {}));
}
function getAvatar(req, res) {
assert.strictEqual(typeof req.params.identifier, 'string');
res.sendFile(users.getAvatarFileSync(req.params.identifier));
res.sendFile(path.join(paths.PROFILE_ICONS_DIR, req.params.identifier));
}
function changePassword(req, res, next) {

View File

@@ -122,7 +122,7 @@ function getStatus(req, res, next) {
// check if Cloudron is not in setup state nor activated and let appstore know of the attempt
if (!status.activated && !status.setup.active && !status.restore.active) {
appstore.trackBeginSetup();
appstore.trackBeginSetup(status.provider);
}
});
}

View File

@@ -160,7 +160,6 @@ function setExternalLdapConfig(req, res, next) {
if ('baseDn' in req.body && typeof req.body.baseDn !== 'string') return next(new HttpError(400, 'baseDn must be a string'));
if ('usernameField' in req.body && typeof req.body.usernameField !== 'string') return next(new HttpError(400, 'usernameField must be a string'));
if ('filter' in req.body && typeof req.body.filter !== 'string') return next(new HttpError(400, 'filter must be a string'));
if ('groupBaseDn' in req.body && typeof req.body.groupBaseDn !== 'string') return next(new HttpError(400, 'groupBaseDn must be a string'));
if ('bindDn' in req.body && typeof req.body.bindDn !== 'string') return next(new HttpError(400, 'bindDn must be a non empty string'));
if ('bindPassword' in req.body && typeof req.body.bindPassword !== 'string') return next(new HttpError(400, 'bindPassword must be a string'));
@@ -234,27 +233,6 @@ function setRegistryConfig(req, res, next) {
});
}
function getDirectoryConfig(req, res, next) {
settings.getDirectoryConfig(function (error, directoryConfig) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, directoryConfig));
});
}
function setDirectoryConfig(req, res, next) {
assert.strictEqual(typeof req.body, 'object');
if (typeof req.body.lockUserProfiles !== 'boolean') return next(new HttpError(400, 'lockUserProfiles is required'));
if (typeof req.body.mandatory2FA !== 'boolean') return next(new HttpError(400, 'mandatory2FA is required'));
settings.setDirectoryConfig(req.body, function (error) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(200, {}));
});
}
function getSysinfoConfig(req, res, next) {
settings.getSysinfoConfig(function (error, sysinfoConfig) {
if (error) return next(BoxError.toHttpError(error));
@@ -291,7 +269,6 @@ function get(req, res, next) {
case settings.BOX_AUTOUPDATE_PATTERN_KEY: return getBoxAutoupdatePattern(req, res, next);
case settings.TIME_ZONE_KEY: return getTimeZone(req, res, next);
case settings.DIRECTORY_CONFIG_KEY: return getDirectoryConfig(req, res, next);
case settings.SUPPORT_CONFIG_KEY: return getSupportConfig(req, res, next);
default: return next(new HttpError(404, 'No such setting'));
@@ -313,8 +290,6 @@ function set(req, res, next) {
case settings.BOX_AUTOUPDATE_PATTERN_KEY: return setBoxAutoupdatePattern(req, res, next);
case settings.TIME_ZONE_KEY: return setTimeZone(req, res, next);
case settings.DIRECTORY_CONFIG_KEY: return setDirectoryConfig(req, res, next);
default: return next(new HttpError(404, 'No such setting'));
}
}

View File

@@ -62,7 +62,7 @@ function setup(done) {
},
function createSettings(callback) {
settings.setBackupConfig({ provider: 'filesystem', backupFolder: '/tmp', format: 'tgz', retentionPolicy: { keepWithinSecs: 2 * 24 * 60 * 60 } }, callback);
settings.setBackupConfig({ provider: 'filesystem', backupFolder: '/tmp', format: 'tgz', retentionPolicy: {} }, callback);
}
], done);
}

View File

@@ -32,7 +32,7 @@ function setup(done) {
server.start.bind(server),
database._clear,
settings._setApiServerOrigin.bind(null, 'http://localhost:6060'),
settings.setBackupConfig.bind(null, { provider: 'filesystem', backupFolder: '/tmp', format: 'tgz', retentionPolicy: { keepWithinSecs: 10000 } })
settings.setBackupConfig.bind(null, { provider: 'filesystem', backupFolder: '/tmp', format: 'tgz', retentionPolicy: {} })
], done);
}
@@ -44,7 +44,7 @@ function cleanup(done) {
});
}
describe('Cloudron API', function () {
describe('Cloudron', function () {
describe('activate', function () {

View File

@@ -86,7 +86,7 @@ describe('Groups API', function () {
it('create fails due to mising token', function (done) {
superagent.post(SERVER_URL + '/api/v1/groups')
.send({ name: GROUP_NAME })
.send({ name: GROUP_NAME})
.end(function (error, result) {
expect(result.statusCode).to.equal(401);
done();
@@ -96,7 +96,7 @@ describe('Groups API', function () {
it('create succeeds', function (done) {
superagent.post(SERVER_URL + '/api/v1/groups')
.query({ access_token: token })
.send({ name: GROUP_NAME })
.send({ name: GROUP_NAME})
.end(function (error, result) {
expect(result.statusCode).to.equal(201);
groupObject = result.body;

View File

@@ -9,20 +9,15 @@ var async = require('async'),
constants = require('../../constants.js'),
database = require('../../database.js'),
expect = require('expect.js'),
fs = require('fs'),
rimraf = require('rimraf'),
server = require('../../server.js'),
superagent = require('superagent');
var SERVER_URL = 'http://localhost:' + constants.PORT;
var USERNAME = 'superadmin', PASSWORD = 'Foobar?1337', EMAIL ='silly@me.com';
var BACKUP_FOLDER = '/tmp/backup_test';
var USERNAME = 'superadmin', PASSWORD = 'Foobar?1337', EMAIL ='silly@me.com';
var token = null;
function setup(done) {
fs.mkdirSync(BACKUP_FOLDER, { recursive: true });
async.series([
server.start.bind(null),
database._clear.bind(null),
@@ -45,8 +40,6 @@ function setup(done) {
}
function cleanup(done) {
rimraf.sync(BACKUP_FOLDER);
database._clear(function (error) {
expect(!error).to.be.ok();
@@ -211,246 +204,4 @@ describe('Settings API', function () {
});
});
});
describe('backup_config', function () {
// keep in sync with defaults in settings.js
let defaultConfig = {
provider: 'filesystem',
backupFolder: '/var/backups',
format: 'tgz',
encryption: null,
retentionPolicy: { keepWithinSecs: 2 * 24 * 60 * 60 }, // 2 days
intervalSecs: 24 * 60 * 60 // ~1 day
};
it('can get backup_config (default)', function (done) {
superagent.get(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.end(function (err, res) {
expect(res.statusCode).to.equal(200);
expect(res.body).to.eql(defaultConfig);
done();
});
});
it('cannot set backup_config without provider', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
delete tmp.provider;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid provider', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.provider = 'invalid provider';
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config without intervalSecs', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
delete tmp.intervalSecs;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid intervalSecs', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.intervalSecs = 'not a number';
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config without format', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
delete tmp.format;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid format', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.format = 'invalid format';
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config without retentionPolicy', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
delete tmp.retentionPolicy;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid retentionPolicy', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.retentionPolicy = 'not an object';
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with empty retentionPolicy', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.retentionPolicy = {};
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with retentionPolicy missing properties', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.retentionPolicy = { foo: 'bar' };
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with retentionPolicy with invalid keepWithinSecs', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.retentionPolicy = { keepWithinSecs: 'not a number' };
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid password', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.password = 1234;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid syncConcurrency', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.syncConcurrency = 'not a number';
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid syncConcurrency', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.syncConcurrency = 0;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('cannot set backup_config with invalid acceptSelfSignedCerts', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.acceptSelfSignedCerts = 'not a boolean';
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(400);
done();
});
});
it('can set backup_config', function (done) {
var tmp = JSON.parse(JSON.stringify(defaultConfig));
tmp.format = 'rsync';
tmp.backupFolder = BACKUP_FOLDER;
superagent.post(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.send(tmp)
.end(function (err, res) {
expect(res.statusCode).to.equal(200);
done();
});
});
it('can get backup_config', function (done) {
superagent.get(SERVER_URL + '/api/v1/settings/backup_config')
.query({ access_token: token })
.end(function (err, res) {
expect(res.statusCode).to.equal(200);
expect(res.body.format).to.equal('rsync');
expect(res.body.backupFolder).to.equal(BACKUP_FOLDER);
done();
});
});
});
});

View File

@@ -13,6 +13,7 @@ var async = require('async'),
expect = require('expect.js'),
hat = require('../../hat.js'),
groups = require('../../groups.js'),
mail = require('../../mail.js'),
mailer = require('../../mailer.js'),
superagent = require('superagent'),
server = require('../../server.js'),
@@ -49,7 +50,7 @@ function setup(done) {
], function (error) {
expect(error).to.not.be.ok();
groups.create('somegroupname', '', function (error, result) {
groups.create('somegroupname', function (error, result) {
expect(error).to.not.be.ok();
groupObject = result;

View File

@@ -1,20 +1,18 @@
'use strict';
exports = module.exports = {
get,
update,
list,
create,
remove,
changePassword,
verifyPassword,
createInvite,
sendInvite,
setGroups,
setAvatar,
clearAvatar,
get: get,
update: update,
list: list,
create: create,
remove: remove,
changePassword: changePassword,
verifyPassword: verifyPassword,
createInvite: createInvite,
sendInvite: sendInvite,
setGroups: setGroups,
load
load: load
};
var assert = require('assert'),
@@ -194,25 +192,3 @@ function changePassword(req, res, next) {
next(new HttpSuccess(204));
});
}
function setAvatar(req, res, next) {
assert.strictEqual(typeof req.resource, 'object');
if (!req.files.avatar) return next(new HttpError(400, 'avatar is missing'));
users.setAvatar(req.resource.id, req.files.avatar.path, function (error) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(202, {}));
});
}
function clearAvatar(req, res, next) {
assert.strictEqual(typeof req.resource, 'object');
users.clearAvatar(req.resource.id, function (error) {
if (error) return next(BoxError.toHttpError(error));
next(new HttpSuccess(202, {}));
});
}

View File

@@ -13,21 +13,27 @@ let apps = require('./apps.js'),
docker = require('./docker.js'),
_ = require('underscore');
var NOOP_CALLBACK = function (error) { if (error) debug('Unhandled error: ', error); };
// appId -> { schedulerConfig (manifest), cronjobs }
var gState = { };
function sync() {
function sync(callback) {
assert(!callback || typeof callback === 'function');
callback = callback || NOOP_CALLBACK;
apps.getAll(function (error, allApps) {
if (error) return debug(`sync: error getting app list. ${error.message}`);
if (error) return callback(error);
var allAppIds = allApps.map(function (app) { return app.id; });
var removedAppIds = _.difference(Object.keys(gState), allAppIds);
if (removedAppIds.length !== 0) debug(`sync: stopping jobs of removed apps ${JSON.stringify(removedAppIds)}`);
if (removedAppIds.length !== 0) debug('sync: stopping jobs of removed apps %j', removedAppIds);
async.eachSeries(removedAppIds, function (appId, iteratorDone) {
stopJobs(appId, gState[appId], iteratorDone);
}, function (error) {
if (error) debug(`sync: error stopping jobs of removed apps: ${error.message}`);
if (error) debug('sync: error stopping jobs of removed apps', error);
gState = _.omit(gState, removedAppIds);
@@ -42,7 +48,7 @@ function sync() {
}
stopJobs(app.id, appState, function (error) {
if (error) debug(`sync: error stopping jobs of ${app.id} : ${error.message}`);
if (error) debug(`sync: error stopping jobs of ${app.fqdn} : ${error.message}`);
if (!schedulerConfig) {
delete gState[app.id];
@@ -69,7 +75,7 @@ function killContainer(containerName, callback) {
docker.stopContainerByName.bind(null, containerName),
docker.deleteContainerByName.bind(null, containerName)
], function (error) {
if (error) debug(`killContainer: failed to kill task with name ${containerName} : ${error.message}`);
if (error) debug('Failed to kill task with name %s : %s', containerName, error.message);
callback(error);
});
@@ -95,7 +101,6 @@ function createCronJobs(app, schedulerConfig) {
assert.strictEqual(typeof app, 'object');
assert(schedulerConfig && typeof schedulerConfig === 'object');
const appId = app.id;
var jobs = { };
Object.keys(schedulerConfig).forEach(function (taskName) {
@@ -107,9 +112,7 @@ function createCronJobs(app, schedulerConfig) {
var cronJob = new CronJob({
cronTime: cronTime, // at this point, the pattern has been validated
onTick: () => runTask(appId, taskName, (error) => { // put the app id in closure, so we don't use the outdated app object by mistake
if (error) debug(`could not run task ${taskName} : ${error.message}`);
}),
onTick: runTask.bind(null, app.id, taskName), // put the app id in closure, so we don't use the outdated app object by mistake
start: true
});
@@ -122,14 +125,17 @@ function createCronJobs(app, schedulerConfig) {
function runTask(appId, taskName, callback) {
assert.strictEqual(typeof appId, 'string');
assert.strictEqual(typeof taskName, 'string');
assert.strictEqual(typeof callback, 'function');
assert(!callback || typeof callback === 'function');
const JOB_MAX_TIME = 30 * 60 * 1000; // 30 minutes
callback = callback || NOOP_CALLBACK;
apps.get(appId, function (error, app) {
if (error) return callback(error);
if (app.installationState !== apps.ISTATE_INSTALLED || app.runState !== apps.RSTATE_RUNNING || app.health !== apps.HEALTH_HEALTHY) {
debug(`runTask: skipped task ${taskName} because app ${app.fqdn} has state ${app.installationState} / ${app.runState}`);
return callback();
}

View File

@@ -47,8 +47,6 @@ function initializeExpressSync() {
tokens.method(req, res),
tokens.url(req, res).replace(/(access_token=)[^&]+/, '$1' + '<redacted>'),
tokens.status(req, res),
res.errorBody ? res.errorBody.status : '', // attached by connect-lastmile. can be missing when router errors like 404
res.errorBody ? res.errorBody.message : '', // attached by connect-lastmile. can be missing when router errors like 404
tokens['response-time'](req, res), 'ms', '-',
tokens.res(req, res, 'content-length')
].join(' ');
@@ -66,6 +64,7 @@ function initializeExpressSync() {
// the timeout middleware will respond with a 503. the request itself cannot be 'aborted' and will continue
// search for req.clearTimeout in route handlers to see places where this timeout is reset
.use(middleware.timeout(REQUEST_TIMEOUT, { respond: true }))
.use(json)
.use(urlencoded)
.use(middleware.cors({ origins: [ '*' ], allowCredentials: false }))
.use(router)
@@ -85,164 +84,165 @@ function initializeExpressSync() {
const authorizeUserManager = routes.accesscontrol.authorize(users.ROLE_USER_MANAGER);
// public routes
router.post('/api/v1/cloudron/setup', json, routes.provision.providerTokenAuth, routes.provision.setup); // only available until no-domain
router.post('/api/v1/cloudron/restore', json, routes.provision.restore); // only available until activated
router.post('/api/v1/cloudron/activate', json, routes.provision.activate);
router.get ('/api/v1/cloudron/status', routes.provision.getStatus);
router.get ('/api/v1/cloudron/avatar', routes.branding.getCloudronAvatar); // this is a public alias for /api/v1/branding/cloudron_avatar
router.post('/api/v1/cloudron/setup', routes.provision.providerTokenAuth, routes.provision.setup); // only available until no-domain
router.post('/api/v1/cloudron/restore', routes.provision.restore); // only available until activated
router.post('/api/v1/cloudron/activate', routes.provision.activate);
router.get ('/api/v1/cloudron/status', routes.provision.getStatus);
router.get ('/api/v1/cloudron/avatar', routes.branding.getCloudronAvatar); // this is a public alias for /api/v1/branding/cloudron_avatar
// login/logout routes
router.post('/api/v1/cloudron/login', json, password, routes.cloudron.login);
router.get ('/api/v1/cloudron/logout', routes.cloudron.logout); // this will invalidate the token if any and redirect to /login.html always
router.post('/api/v1/cloudron/password_reset_request', json, routes.cloudron.passwordResetRequest);
router.post('/api/v1/cloudron/password_reset', json, routes.cloudron.passwordReset);
router.post('/api/v1/cloudron/setup_account', json, routes.cloudron.setupAccount);
router.post('/api/v1/cloudron/login', password, routes.cloudron.login);
router.get ('/api/v1/cloudron/logout', routes.cloudron.logout); // this will invalidate the token if any and redirect to /login.html always
router.post('/api/v1/cloudron/password_reset_request', routes.cloudron.passwordResetRequest);
router.post('/api/v1/cloudron/password_reset', routes.cloudron.passwordReset);
router.post('/api/v1/cloudron/setup_account', routes.cloudron.setupAccount);
// developer routes
router.post('/api/v1/developer/login', json, password, routes.cloudron.login); // DEPRECATED we should use the regular /api/v1/cloudron/login
router.post('/api/v1/developer/login', password, routes.cloudron.login); // DEPRECATED we should use the regular /api/v1/cloudron/login
// cloudron routes
router.get ('/api/v1/cloudron/update', token, authorizeAdmin, routes.cloudron.getUpdateInfo);
router.post('/api/v1/cloudron/update', json, token, authorizeAdmin, routes.cloudron.update);
router.post('/api/v1/cloudron/prepare_dashboard_domain', json, token, authorizeAdmin, routes.cloudron.prepareDashboardDomain);
router.post('/api/v1/cloudron/set_dashboard_domain', json, token, authorizeAdmin, routes.cloudron.setDashboardAndMailDomain);
router.post('/api/v1/cloudron/renew_certs', json, token, authorizeAdmin, routes.cloudron.renewCerts);
router.post('/api/v1/cloudron/check_for_updates', json, token, authorizeAdmin, routes.cloudron.checkForUpdates);
router.get ('/api/v1/cloudron/reboot', token, authorizeAdmin, routes.cloudron.isRebootRequired);
router.post('/api/v1/cloudron/reboot', json, token, authorizeAdmin, routes.cloudron.reboot);
router.get ('/api/v1/cloudron/graphs', token, authorizeAdmin, routes.graphs.getGraphs);
router.get ('/api/v1/cloudron/disks', token, authorizeAdmin, routes.cloudron.getDisks);
router.get ('/api/v1/cloudron/memory', token, authorizeAdmin, routes.cloudron.getMemory);
router.get ('/api/v1/cloudron/logs/:unit', token, authorizeAdmin, routes.cloudron.getLogs);
router.get ('/api/v1/cloudron/logstream/:unit', token, authorizeAdmin, routes.cloudron.getLogStream);
router.get ('/api/v1/cloudron/eventlog', token, authorizeAdmin, routes.eventlog.list);
router.get ('/api/v1/cloudron/eventlog/:eventId', token, authorizeAdmin, routes.eventlog.get);
router.post('/api/v1/cloudron/sync_external_ldap', json, token, authorizeAdmin, routes.cloudron.syncExternalLdap);
router.get ('/api/v1/cloudron/server_ip', token, authorizeAdmin, routes.cloudron.getServerIp);
router.get ('/api/v1/cloudron/update', token, authorizeAdmin, routes.cloudron.getUpdateInfo);
router.post('/api/v1/cloudron/update', token, authorizeAdmin, routes.cloudron.update);
router.post('/api/v1/cloudron/prepare_dashboard_domain', token, authorizeAdmin, routes.cloudron.prepareDashboardDomain);
router.post('/api/v1/cloudron/set_dashboard_domain', token, authorizeAdmin, routes.cloudron.setDashboardAndMailDomain);
router.post('/api/v1/cloudron/renew_certs', token, authorizeAdmin, routes.cloudron.renewCerts);
router.post('/api/v1/cloudron/check_for_updates', token, authorizeAdmin, routes.cloudron.checkForUpdates);
router.get ('/api/v1/cloudron/reboot', token, authorizeAdmin, routes.cloudron.isRebootRequired);
router.post('/api/v1/cloudron/reboot', token, authorizeAdmin, routes.cloudron.reboot);
router.get ('/api/v1/cloudron/graphs', token, authorizeAdmin, routes.graphs.getGraphs);
router.get ('/api/v1/cloudron/disks', token, authorizeAdmin, routes.cloudron.getDisks);
router.get ('/api/v1/cloudron/memory', token, authorizeAdmin, routes.cloudron.getMemory);
router.get ('/api/v1/cloudron/logs/:unit', token, authorizeAdmin, routes.cloudron.getLogs);
router.get ('/api/v1/cloudron/logstream/:unit', token, authorizeAdmin, routes.cloudron.getLogStream);
router.get ('/api/v1/cloudron/eventlog', token, authorizeAdmin, routes.eventlog.list);
router.get ('/api/v1/cloudron/eventlog/:eventId', token, authorizeAdmin, routes.eventlog.get);
router.post('/api/v1/cloudron/sync_external_ldap', token, authorizeAdmin, routes.cloudron.syncExternalLdap);
router.get ('/api/v1/cloudron/server_ip', token, authorizeAdmin, routes.cloudron.getServerIp);
// task routes
router.get ('/api/v1/tasks', token, authorizeAdmin, routes.tasks.list);
router.get ('/api/v1/tasks/:taskId', token, authorizeAdmin, routes.tasks.get);
router.get ('/api/v1/tasks/:taskId/logs', token, authorizeAdmin, routes.tasks.getLogs);
router.get ('/api/v1/tasks/:taskId/logstream', token, authorizeAdmin, routes.tasks.getLogStream);
router.post('/api/v1/tasks/:taskId/stop', json, token, authorizeAdmin, routes.tasks.stopTask);
// tasks
router.get ('/api/v1/tasks', token, authorizeAdmin, routes.tasks.list);
router.get ('/api/v1/tasks/:taskId', token, authorizeAdmin, routes.tasks.get);
router.get ('/api/v1/tasks/:taskId/logs', token, authorizeAdmin, routes.tasks.getLogs);
router.get ('/api/v1/tasks/:taskId/logstream', token, authorizeAdmin, routes.tasks.getLogStream);
router.post('/api/v1/tasks/:taskId/stop', token, authorizeAdmin, routes.tasks.stopTask);
// notification routes
router.get ('/api/v1/notifications', token, routes.notifications.verifyOwnership, routes.notifications.list);
router.get ('/api/v1/notifications/:notificationId', token, routes.notifications.verifyOwnership, routes.notifications.get);
router.post('/api/v1/notifications/:notificationId', json, token, routes.notifications.verifyOwnership, routes.notifications.ack);
// notifications
router.get ('/api/v1/notifications', token, routes.notifications.verifyOwnership, routes.notifications.list);
router.get ('/api/v1/notifications/:notificationId', token, routes.notifications.verifyOwnership, routes.notifications.get);
router.post('/api/v1/notifications/:notificationId', token, routes.notifications.verifyOwnership, routes.notifications.ack);
// backup routes
router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list);
router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.startBackup);
router.post('/api/v1/backups/cleanup', json, token, authorizeAdmin, routes.backups.cleanup);
// backups
router.get ('/api/v1/backups', token, authorizeAdmin, routes.backups.list);
router.post('/api/v1/backups/create', token, authorizeAdmin, routes.backups.startBackup);
router.post('/api/v1/backups/cleanup', token, authorizeAdmin, routes.backups.cleanup);
// config route (for dashboard). can return some private configuration unlike status
// config route (for dashboard)
router.get ('/api/v1/config', token, routes.cloudron.getConfig);
// working off the user behind the provided token
router.get ('/api/v1/profile', token, routes.profile.get);
router.post('/api/v1/profile', json, token, routes.profile.authorize, routes.profile.update);
router.get ('/api/v1/profile/avatar/:identifier', routes.profile.getAvatar); // this is not scoped so it can used directly in img tag
router.post('/api/v1/profile/avatar', json, token, multipart, routes.profile.setAvatar); // avatar is not exposed in LDAP. so it's personal and not locked
router.del ('/api/v1/profile/avatar', token, routes.profile.clearAvatar);
router.post('/api/v1/profile/password', json, token, routes.users.verifyPassword, routes.profile.changePassword);
router.post('/api/v1/profile/twofactorauthentication', json, token, routes.profile.setTwoFactorAuthenticationSecret);
router.post('/api/v1/profile/twofactorauthentication/enable', json, token, routes.profile.enableTwoFactorAuthentication);
router.post('/api/v1/profile/twofactorauthentication/disable', json, token, routes.users.verifyPassword, routes.profile.disableTwoFactorAuthentication);
router.get ('/api/v1/profile', token, routes.profile.get);
router.post('/api/v1/profile', token, routes.profile.update);
router.get ('/api/v1/profile/avatar/:identifier', routes.profile.getAvatar); // this is not scoped so it can used directly in img tag
router.post('/api/v1/profile/avatar', token, multipart, routes.profile.setAvatar);
router.del ('/api/v1/profile/avatar', token, routes.profile.clearAvatar);
router.post('/api/v1/profile/password', token, routes.users.verifyPassword, routes.profile.changePassword);
router.post('/api/v1/profile/twofactorauthentication', token, routes.profile.setTwoFactorAuthenticationSecret);
router.post('/api/v1/profile/twofactorauthentication/enable', token, routes.profile.enableTwoFactorAuthentication);
router.post('/api/v1/profile/twofactorauthentication/disable', token, routes.users.verifyPassword, routes.profile.disableTwoFactorAuthentication);
// app password routes
router.get ('/api/v1/app_passwords', token, routes.appPasswords.list);
router.post('/api/v1/app_passwords', json, token, routes.appPasswords.add);
router.get ('/api/v1/app_passwords/:id', token, routes.appPasswords.get);
router.del ('/api/v1/app_passwords/:id', token, routes.appPasswords.del);
router.get ('/api/v1/app_passwords', token, routes.appPasswords.list);
router.post('/api/v1/app_passwords', token, routes.appPasswords.add);
router.get ('/api/v1/app_passwords/:id', token, routes.appPasswords.get);
router.del ('/api/v1/app_passwords/:id', token, routes.appPasswords.del);
// access tokens
router.get ('/api/v1/tokens', token, routes.tokens.getAll);
router.post('/api/v1/tokens', json, token, routes.tokens.add);
router.get ('/api/v1/tokens/:id', token, routes.tokens.verifyOwnership, routes.tokens.get);
router.del ('/api/v1/tokens/:id', token, routes.tokens.verifyOwnership, routes.tokens.del);
router.get ('/api/v1/tokens', token, routes.tokens.getAll);
router.post('/api/v1/tokens', token, routes.tokens.add);
router.get ('/api/v1/tokens/:id', token, routes.tokens.verifyOwnership, routes.tokens.get);
router.del ('/api/v1/tokens/:id', token, routes.tokens.verifyOwnership, routes.tokens.del);
// user routes
router.get ('/api/v1/users', token, authorizeUserManager, routes.users.list);
router.post('/api/v1/users', json, token, authorizeUserManager, routes.users.create);
router.get ('/api/v1/users/:userId', token, authorizeUserManager, routes.users.load, routes.users.get); // this is manage scope because it returns non-restricted fields
router.del ('/api/v1/users/:userId', token, authorizeUserManager, routes.users.load, routes.users.remove);
router.post('/api/v1/users/:userId', json, token, authorizeUserManager, routes.users.load, routes.users.update);
router.post('/api/v1/users/:userId/password', json, token, authorizeUserManager, routes.users.load, routes.users.changePassword);
router.put ('/api/v1/users/:userId/groups', json, token, authorizeUserManager, routes.users.load, routes.users.setGroups);
router.post('/api/v1/users/:userId/send_invite', json, token, authorizeUserManager, routes.users.load, routes.users.sendInvite);
router.post('/api/v1/users/:userId/create_invite', json, token, authorizeUserManager, routes.users.load, routes.users.createInvite);
router.post('/api/v1/users/:userId/avatar', json, token, authorizeUserManager, routes.users.load, multipart, routes.users.setAvatar);
router.del ('/api/v1/users/:userId/avatar', token, authorizeUserManager, routes.users.load, routes.users.clearAvatar);
router.get ('/api/v1/users', token, authorizeUserManager, routes.users.list);
router.post('/api/v1/users', token, authorizeUserManager, routes.users.create);
router.get ('/api/v1/users/:userId', token, authorizeUserManager, routes.users.load, routes.users.get); // this is manage scope because it returns non-restricted fields
router.del ('/api/v1/users/:userId', token, authorizeUserManager, routes.users.load, routes.users.remove);
router.post('/api/v1/users/:userId', token, authorizeUserManager, routes.users.load, routes.users.update);
router.post('/api/v1/users/:userId/password', token, authorizeUserManager, routes.users.load, routes.users.changePassword);
router.put ('/api/v1/users/:userId/groups', token, authorizeUserManager, routes.users.load, routes.users.setGroups);
router.post('/api/v1/users/:userId/send_invite', token, authorizeUserManager, routes.users.load, routes.users.sendInvite);
router.post('/api/v1/users/:userId/create_invite', token, authorizeUserManager, routes.users.load, routes.users.createInvite);
// Group management
router.get ('/api/v1/groups', token, authorizeUserManager, routes.groups.list);
router.post('/api/v1/groups', json, token, authorizeUserManager, routes.groups.create);
router.get ('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.get);
router.put ('/api/v1/groups/:groupId/members', json, token, authorizeUserManager, routes.groups.updateMembers);
router.post('/api/v1/groups/:groupId', json, token, authorizeUserManager, routes.groups.update);
router.del ('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.remove);
router.get ('/api/v1/groups', token, authorizeUserManager, routes.groups.list);
router.post('/api/v1/groups', token, authorizeUserManager, routes.groups.create);
router.get ('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.get);
router.put ('/api/v1/groups/:groupId/members', token, authorizeUserManager, routes.groups.updateMembers);
router.post('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.update);
router.del ('/api/v1/groups/:groupId', token, authorizeUserManager, routes.groups.remove);
// appstore and subscription routes
router.post('/api/v1/appstore/register_cloudron', json, token, authorizeAdmin, routes.appstore.registerCloudron);
router.post('/api/v1/appstore/user_token', json, token, authorizeAdmin, routes.appstore.createUserToken);
router.get ('/api/v1/appstore/subscription', token, authorizeAdmin, routes.appstore.getSubscription);
router.get ('/api/v1/appstore/apps', token, authorizeAdmin, routes.appstore.getApps);
router.get ('/api/v1/appstore/apps/:appstoreId', token, authorizeAdmin, routes.appstore.getApp);
router.get ('/api/v1/appstore/apps/:appstoreId/versions/:versionId', token, authorizeAdmin, routes.appstore.getAppVersion);
router.post('/api/v1/appstore/register_cloudron', token, authorizeAdmin, routes.appstore.registerCloudron);
router.post('/api/v1/appstore/user_token', token, authorizeAdmin, routes.appstore.createUserToken);
router.get ('/api/v1/appstore/subscription', token, authorizeAdmin, routes.appstore.getSubscription);
router.get ('/api/v1/appstore/apps', token, authorizeAdmin, routes.appstore.getApps);
router.get ('/api/v1/appstore/apps/:appstoreId', token, authorizeAdmin, routes.appstore.getApp);
router.get ('/api/v1/appstore/apps/:appstoreId/versions/:versionId', token, authorizeAdmin, routes.appstore.getAppVersion);
// app routes
router.post('/api/v1/apps/install', json, token, authorizeAdmin, routes.apps.install);
router.get ('/api/v1/apps', token, routes.apps.getApps);
router.get ('/api/v1/apps/:id', token, authorizeAdmin, routes.apps.load, routes.apps.getApp);
router.get ('/api/v1/apps/:id/icon', token, routes.apps.load, routes.apps.getAppIcon);
router.post('/api/v1/apps/:id/uninstall', json, token, authorizeAdmin, routes.apps.load, routes.apps.uninstall);
router.post('/api/v1/apps/:id/configure/access_restriction', json, token, authorizeAdmin, routes.apps.load, routes.apps.setAccessRestriction);
router.post('/api/v1/apps/:id/configure/label', json, token, authorizeAdmin, routes.apps.load, routes.apps.setLabel);
router.post('/api/v1/apps/:id/configure/tags', json, token, authorizeAdmin, routes.apps.load, routes.apps.setTags);
router.post('/api/v1/apps/:id/configure/icon', json, token, authorizeAdmin, routes.apps.load, routes.apps.setIcon);
router.post('/api/v1/apps/:id/configure/memory_limit', json, token, authorizeAdmin, routes.apps.load, routes.apps.setMemoryLimit);
router.post('/api/v1/apps/:id/configure/cpu_shares', json, token, authorizeAdmin, routes.apps.load, routes.apps.setCpuShares);
router.post('/api/v1/apps/:id/configure/automatic_backup', json, token, authorizeAdmin, routes.apps.load, routes.apps.setAutomaticBackup);
router.post('/api/v1/apps/:id/configure/automatic_update', json, token, authorizeAdmin, routes.apps.load, routes.apps.setAutomaticUpdate);
router.post('/api/v1/apps/:id/configure/reverse_proxy', json, token, authorizeAdmin, routes.apps.load, routes.apps.setReverseProxyConfig);
router.post('/api/v1/apps/:id/configure/cert', json, token, authorizeAdmin, routes.apps.load, routes.apps.setCertificate);
router.post('/api/v1/apps/:id/configure/debug_mode', json, token, authorizeAdmin, routes.apps.load, routes.apps.setDebugMode);
router.post('/api/v1/apps/:id/configure/mailbox', json, token, authorizeAdmin, routes.apps.load, routes.apps.setMailbox);
router.post('/api/v1/apps/:id/configure/env', json, token, authorizeAdmin, routes.apps.load, routes.apps.setEnvironment);
router.post('/api/v1/apps/:id/configure/data_dir', json, token, authorizeAdmin, routes.apps.load, routes.apps.setDataDir);
router.post('/api/v1/apps/:id/configure/location', json, token, authorizeAdmin, routes.apps.load, routes.apps.setLocation);
router.post('/api/v1/apps/:id/configure/binds', json, token, authorizeAdmin, routes.apps.load, routes.apps.setBinds);
router.post('/api/v1/apps/:id/repair', json, token, authorizeAdmin, routes.apps.load, routes.apps.repair);
router.post('/api/v1/apps/:id/update', json, token, authorizeAdmin, routes.apps.load, routes.apps.update);
router.post('/api/v1/apps/:id/restore', json, token, authorizeAdmin, routes.apps.load, routes.apps.restore);
router.post('/api/v1/apps/:id/import', json, token, authorizeAdmin, routes.apps.load, routes.apps.importApp);
router.post('/api/v1/apps/:id/backup', json, token, authorizeAdmin, routes.apps.load, routes.apps.backup);
router.get ('/api/v1/apps/:id/backups', token, authorizeAdmin, routes.apps.load, routes.apps.listBackups);
router.post('/api/v1/apps/:id/start', json, token, authorizeAdmin, routes.apps.load, routes.apps.start);
router.post('/api/v1/apps/:id/stop', json, token, authorizeAdmin, routes.apps.load, routes.apps.stop);
router.post('/api/v1/apps/:id/restart', json, token, authorizeAdmin, routes.apps.load, routes.apps.restart);
router.get ('/api/v1/apps/:id/logstream', token, authorizeAdmin, routes.apps.load, routes.apps.getLogStream);
router.get ('/api/v1/apps/:id/logs', token, authorizeAdmin, routes.apps.load, routes.apps.getLogs);
router.post('/api/v1/apps/:id/clone', json, token, authorizeAdmin, routes.apps.load, routes.apps.clone);
router.get ('/api/v1/apps/:id/download', token, authorizeAdmin, routes.apps.load, routes.apps.downloadFile);
router.post('/api/v1/apps/:id/upload', json, token, authorizeAdmin, multipart, routes.apps.load, routes.apps.uploadFile);
router.use ('/api/v1/apps/:id/files/*', token, authorizeAdmin, routes.filemanager.proxy);
router.get ('/api/v1/apps/:id/exec', token, authorizeAdmin, routes.apps.load, routes.apps.exec);
router.get ('/api/v1/apps', token, routes.apps.getApps);
router.get ('/api/v1/apps/:id', token, authorizeAdmin, routes.apps.load, routes.apps.getApp);
router.get ('/api/v1/apps/:id/icon', token, routes.apps.load, routes.apps.getAppIcon);
router.post('/api/v1/apps/install', token, authorizeAdmin, routes.apps.install);
router.post('/api/v1/apps/:id/uninstall', token, authorizeAdmin, routes.apps.load, routes.apps.uninstall);
router.post('/api/v1/apps/:id/configure/access_restriction', token, authorizeAdmin, routes.apps.load, routes.apps.setAccessRestriction);
router.post('/api/v1/apps/:id/configure/label', token, authorizeAdmin, routes.apps.load, routes.apps.setLabel);
router.post('/api/v1/apps/:id/configure/tags', token, authorizeAdmin, routes.apps.load, routes.apps.setTags);
router.post('/api/v1/apps/:id/configure/icon', token, authorizeAdmin, routes.apps.load, routes.apps.setIcon);
router.post('/api/v1/apps/:id/configure/memory_limit', token, authorizeAdmin, routes.apps.load, routes.apps.setMemoryLimit);
router.post('/api/v1/apps/:id/configure/cpu_shares', token, authorizeAdmin, routes.apps.load, routes.apps.setCpuShares);
router.post('/api/v1/apps/:id/configure/automatic_backup', token, authorizeAdmin, routes.apps.load, routes.apps.setAutomaticBackup);
router.post('/api/v1/apps/:id/configure/automatic_update', token, authorizeAdmin, routes.apps.load, routes.apps.setAutomaticUpdate);
router.post('/api/v1/apps/:id/configure/reverse_proxy', token, authorizeAdmin, routes.apps.load, routes.apps.setReverseProxyConfig);
router.post('/api/v1/apps/:id/configure/cert', token, authorizeAdmin, routes.apps.load, routes.apps.setCertificate);
router.post('/api/v1/apps/:id/configure/debug_mode', token, authorizeAdmin, routes.apps.load, routes.apps.setDebugMode);
router.post('/api/v1/apps/:id/configure/mailbox', token, authorizeAdmin, routes.apps.load, routes.apps.setMailbox);
router.post('/api/v1/apps/:id/configure/env', token, authorizeAdmin, routes.apps.load, routes.apps.setEnvironment);
router.post('/api/v1/apps/:id/configure/data_dir', token, authorizeAdmin, routes.apps.load, routes.apps.setDataDir);
router.post('/api/v1/apps/:id/configure/location', token, authorizeAdmin, routes.apps.load, routes.apps.setLocation);
router.post('/api/v1/apps/:id/configure/binds', token, authorizeAdmin, routes.apps.load, routes.apps.setBinds);
router.post('/api/v1/apps/:id/repair', token, authorizeAdmin, routes.apps.load, routes.apps.repair);
router.post('/api/v1/apps/:id/update', token, authorizeAdmin, routes.apps.load, routes.apps.update);
router.post('/api/v1/apps/:id/restore', token, authorizeAdmin, routes.apps.load, routes.apps.restore);
router.post('/api/v1/apps/:id/import', token, authorizeAdmin, routes.apps.load, routes.apps.importApp);
router.post('/api/v1/apps/:id/backup', token, authorizeAdmin, routes.apps.load, routes.apps.backup);
router.get ('/api/v1/apps/:id/backups', token, authorizeAdmin, routes.apps.load, routes.apps.listBackups);
router.post('/api/v1/apps/:id/start', token, authorizeAdmin, routes.apps.load, routes.apps.start);
router.post('/api/v1/apps/:id/stop', token, authorizeAdmin, routes.apps.load, routes.apps.stop);
router.post('/api/v1/apps/:id/restart', token, authorizeAdmin, routes.apps.load, routes.apps.restart);
router.get ('/api/v1/apps/:id/logstream', token, authorizeAdmin, routes.apps.load, routes.apps.getLogStream);
router.get ('/api/v1/apps/:id/logs', token, authorizeAdmin, routes.apps.load, routes.apps.getLogs);
router.get ('/api/v1/apps/:id/exec', token, authorizeAdmin, routes.apps.load, routes.apps.exec);
// websocket cannot do bearer authentication
router.get ('/api/v1/apps/:id/execws', routes.accesscontrol.websocketAuth.bind(null, users.ROLE_ADMIN), routes.apps.load, routes.apps.execWebSocket);
router.post('/api/v1/apps/:id/clone', token, authorizeAdmin, routes.apps.load, routes.apps.clone);
router.get ('/api/v1/apps/:id/download', token, authorizeAdmin, routes.apps.load, routes.apps.downloadFile);
router.post('/api/v1/apps/:id/upload', token, authorizeAdmin, multipart, routes.apps.load, routes.apps.uploadFile);
// branding routes
router.get ('/api/v1/branding/:setting', token, authorizeOwner, routes.branding.get);
router.post('/api/v1/branding/:setting', json, token, authorizeOwner, (req, res, next) => {
router.get ('/api/v1/branding/:setting', token, authorizeOwner, routes.branding.get);
router.post('/api/v1/branding/:setting', token, authorizeOwner, (req, res, next) => {
return req.params.setting === 'cloudron_avatar' ? multipart(req, res, next) : next();
}, routes.branding.set);
// settings routes (these are for the settings tab - avatar & name have public routes for normal users. see above)
router.get ('/api/v1/settings/:setting', token, authorizeAdmin, routes.settings.get);
router.post('/api/v1/settings/backup_config', json, token, authorizeOwner, routes.settings.setBackupConfig);
router.post('/api/v1/settings/:setting', json, token, authorizeAdmin, routes.settings.set);
router.get ('/api/v1/settings/:setting', token, authorizeAdmin, routes.settings.get);
router.post('/api/v1/settings/backup_config', token, authorizeOwner, routes.settings.setBackupConfig);
router.post('/api/v1/settings/:setting', token, authorizeAdmin, (req, res, next) => {
return req.params.setting === 'cloudron_avatar' ? multipart(req, res, next) : next();
}, routes.settings.set);
// email routes
router.get('/api/v1/mailserver/:pathname', token, (req, res, next) => {
@@ -253,48 +253,48 @@ function initializeExpressSync() {
authorizeAdmin(req, res, next);
}, routes.mailserver.proxy);
router.get ('/api/v1/mail/:domain', token, authorizeAdmin, routes.mail.getDomain);
router.get ('/api/v1/mail/:domain/status', token, authorizeAdmin, routes.mail.getStatus);
router.post('/api/v1/mail/:domain/mail_from_validation', json, token, authorizeAdmin, routes.mail.setMailFromValidation);
router.post('/api/v1/mail/:domain/catch_all', json, token, authorizeAdmin, routes.mail.setCatchAllAddress);
router.post('/api/v1/mail/:domain/relay', json, token, authorizeAdmin, routes.mail.setMailRelay);
router.post('/api/v1/mail/:domain/enable', json, token, authorizeAdmin, routes.mail.setMailEnabled);
router.post('/api/v1/mail/:domain/dns', json, token, authorizeAdmin, routes.mail.setDnsRecords);
router.post('/api/v1/mail/:domain/send_test_mail', json, token, authorizeAdmin, routes.mail.sendTestMail);
router.get ('/api/v1/mail/:domain/mailbox_count', token, authorizeAdmin, routes.mail.getMailboxCount);
router.get ('/api/v1/mail/:domain/mailboxes', token, authorizeAdmin, routes.mail.listMailboxes);
router.get ('/api/v1/mail/:domain/mailboxes/:name', token, authorizeAdmin, routes.mail.getMailbox);
router.post('/api/v1/mail/:domain/mailboxes', json, token, authorizeAdmin, routes.mail.addMailbox);
router.post('/api/v1/mail/:domain/mailboxes/:name', json, token, authorizeAdmin, routes.mail.updateMailbox);
router.del ('/api/v1/mail/:domain/mailboxes/:name', token, authorizeAdmin, routes.mail.removeMailbox);
router.get ('/api/v1/mail/:domain/mailboxes/:name/aliases', token, authorizeAdmin, routes.mail.getAliases);
router.put ('/api/v1/mail/:domain/mailboxes/:name/aliases', json, token, authorizeAdmin, routes.mail.setAliases);
router.get ('/api/v1/mail/:domain/lists', token, authorizeAdmin, routes.mail.getLists);
router.post('/api/v1/mail/:domain/lists', json, token, authorizeAdmin, routes.mail.addList);
router.get ('/api/v1/mail/:domain/lists/:name', token, authorizeAdmin, routes.mail.getList);
router.post('/api/v1/mail/:domain/lists/:name', json, token, authorizeAdmin, routes.mail.updateList);
router.del ('/api/v1/mail/:domain/lists/:name', token, authorizeAdmin, routes.mail.removeList);
router.get ('/api/v1/mail/:domain', token, authorizeAdmin, routes.mail.getDomain);
router.get ('/api/v1/mail/:domain/status', token, authorizeAdmin, routes.mail.getStatus);
router.post('/api/v1/mail/:domain/mail_from_validation', token, authorizeAdmin, routes.mail.setMailFromValidation);
router.post('/api/v1/mail/:domain/catch_all', token, authorizeAdmin, routes.mail.setCatchAllAddress);
router.post('/api/v1/mail/:domain/relay', token, authorizeAdmin, routes.mail.setMailRelay);
router.post('/api/v1/mail/:domain/enable', token, authorizeAdmin, routes.mail.setMailEnabled);
router.post('/api/v1/mail/:domain/dns', token, authorizeAdmin, routes.mail.setDnsRecords);
router.post('/api/v1/mail/:domain/send_test_mail', token, authorizeAdmin, routes.mail.sendTestMail);
router.get ('/api/v1/mail/:domain/mailboxes', token, authorizeAdmin, routes.mail.listMailboxes);
router.get ('/api/v1/mail/:domain/mailboxes/:name', token, authorizeAdmin, routes.mail.getMailbox);
router.post('/api/v1/mail/:domain/mailboxes', token, authorizeAdmin, routes.mail.addMailbox);
router.post('/api/v1/mail/:domain/mailboxes/:name', token, authorizeAdmin, routes.mail.updateMailbox);
router.del ('/api/v1/mail/:domain/mailboxes/:name', token, authorizeAdmin, routes.mail.removeMailbox);
router.get ('/api/v1/mail/:domain/mailboxes/:name/aliases', token, authorizeAdmin, routes.mail.getAliases);
router.put ('/api/v1/mail/:domain/mailboxes/:name/aliases', token, authorizeAdmin, routes.mail.setAliases);
// support routes
router.post('/api/v1/support/ticket', json, token, authorizeAdmin, routes.support.canCreateTicket, routes.support.createTicket);
router.get ('/api/v1/support/remote_support', token, authorizeAdmin, routes.support.getRemoteSupport);
router.post('/api/v1/support/remote_support', json, token, authorizeAdmin, routes.support.canEnableRemoteSupport, routes.support.enableRemoteSupport);
router.get ('/api/v1/mail/:domain/lists', token, authorizeAdmin, routes.mail.getLists);
router.post('/api/v1/mail/:domain/lists', token, authorizeAdmin, routes.mail.addList);
router.get ('/api/v1/mail/:domain/lists/:name', token, authorizeAdmin, routes.mail.getList);
router.post('/api/v1/mail/:domain/lists/:name', token, authorizeAdmin, routes.mail.updateList);
router.del ('/api/v1/mail/:domain/lists/:name', token, authorizeAdmin, routes.mail.removeList);
// support
router.post('/api/v1/support/ticket', token, authorizeAdmin, routes.support.canCreateTicket, routes.support.createTicket);
router.get ('/api/v1/support/remote_support', token, authorizeAdmin, routes.support.getRemoteSupport);
router.post('/api/v1/support/remote_support', token, authorizeAdmin, routes.support.canEnableRemoteSupport, routes.support.enableRemoteSupport);
// domain routes
router.post('/api/v1/domains', json, token, authorizeAdmin, routes.domains.add);
router.get ('/api/v1/domains', token, routes.domains.getAll);
router.get ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.get); // this is manage scope because it returns non-restricted fields
router.put ('/api/v1/domains/:domain', json, token, authorizeAdmin, routes.domains.update);
router.del ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.del);
router.get ('/api/v1/domains/:domain/dns_check', token, authorizeAdmin, routes.domains.checkDnsRecords);
router.post('/api/v1/domains', token, authorizeAdmin, routes.domains.add);
router.get ('/api/v1/domains', token, routes.domains.getAll);
router.get ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.get); // this is manage scope because it returns non-restricted fields
router.put ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.update);
router.del ('/api/v1/domains/:domain', token, authorizeAdmin, routes.domains.del);
router.get ('/api/v1/domains/:domain/dns_check', token, authorizeAdmin, routes.domains.checkDnsRecords);
// addon routes
router.get ('/api/v1/services', token, authorizeAdmin, routes.services.getAll);
router.get ('/api/v1/services/:service', token, authorizeAdmin, routes.services.get);
router.post('/api/v1/services/:service', json, token, authorizeAdmin, routes.services.configure);
router.get ('/api/v1/services/:service/logs', token, authorizeAdmin, routes.services.getLogs);
router.get ('/api/v1/services/:service/logstream', token, authorizeAdmin, routes.services.getLogStream);
router.post('/api/v1/services/:service/restart', json, token, authorizeAdmin, routes.services.restart);
router.get ('/api/v1/services', token, authorizeAdmin, routes.services.getAll);
router.get ('/api/v1/services/:service', token, authorizeAdmin, routes.services.get);
router.post('/api/v1/services/:service', token, authorizeAdmin, routes.services.configure);
router.get ('/api/v1/services/:service/logs', token, authorizeAdmin, routes.services.getLogs);
router.get ('/api/v1/services/:service/logstream', token, authorizeAdmin, routes.services.getLogStream);
router.post('/api/v1/services/:service/restart', token, authorizeAdmin, routes.services.restart);
// disable server socket "idle" timeout. we use the timeout middleware to handle timeouts on a route level
// we rely on nginx for timeouts on the TCP level (see client_header_timeout)

View File

@@ -50,9 +50,6 @@ exports = module.exports = {
getFooter: getFooter,
setFooter: setFooter,
getDirectoryConfig: getDirectoryConfig,
setDirectoryConfig: setDirectoryConfig,
getAppstoreListingConfig: getAppstoreListingConfig,
setAppstoreListingConfig: setAppstoreListingConfig,
@@ -89,7 +86,6 @@ exports = module.exports = {
SYSINFO_CONFIG_KEY: 'sysinfo_config',
APPSTORE_LISTING_CONFIG_KEY: 'appstore_listing_config',
SUPPORT_CONFIG_KEY: 'support_config',
DIRECTORY_CONFIG_KEY: 'directory_config',
// strings
APP_AUTOUPDATE_PATTERN_KEY: 'app_autoupdate_pattern',
@@ -135,8 +131,8 @@ var addons = require('./addons.js'),
let gDefaults = (function () {
var result = { };
result[exports.APP_AUTOUPDATE_PATTERN_KEY] = cron.DEFAULT_APP_AUTOUPDATE_PATTERN;
result[exports.BOX_AUTOUPDATE_PATTERN_KEY] = cron.DEFAULT_BOX_AUTOUPDATE_PATTERN;
result[exports.APP_AUTOUPDATE_PATTERN_KEY] = '00 30 1,3,5,23 * * *';
result[exports.BOX_AUTOUPDATE_PATTERN_KEY] = '00 00 1,3,5,23 * * *';
result[exports.TIME_ZONE_KEY] = 'America/Los_Angeles';
result[exports.CLOUDRON_NAME_KEY] = 'Cloudron';
result[exports.DYNAMIC_DNS_KEY] = false;
@@ -161,11 +157,6 @@ let gDefaults = (function () {
result[exports.SYSINFO_CONFIG_KEY] = {
provider: 'generic'
};
result[exports.DIRECTORY_CONFIG_KEY] = {
lockUserProfiles: false,
mandatory2FA: false
};
result[exports.ADMIN_DOMAIN_KEY] = '';
result[exports.ADMIN_FQDN_KEY] = '';
result[exports.API_SERVER_ORIGIN_KEY] = 'https://api.cloudron.io';
@@ -582,30 +573,6 @@ function setSysinfoConfig(sysinfoConfig, callback) {
});
}
function getDirectoryConfig(callback) {
assert.strictEqual(typeof callback, 'function');
settingsdb.get(exports.DIRECTORY_CONFIG_KEY, function (error, value) {
if (error && error.reason === BoxError.NOT_FOUND) return callback(null, gDefaults[exports.DIRECTORY_CONFIG_KEY]);
if (error) return callback(error);
callback(null, JSON.parse(value));
});
}
function setDirectoryConfig(directoryConfig, callback) {
assert.strictEqual(typeof directoryConfig, 'object');
assert.strictEqual(typeof callback, 'function');
settingsdb.set(exports.DIRECTORY_CONFIG_KEY, JSON.stringify(directoryConfig), function (error) {
if (error) return callback(error);
notifyChange(exports.DIRECTORY_CONFIG_KEY, directoryConfig);
callback(null);
});
}
function getAppstoreListingConfig(callback) {
assert.strictEqual(typeof callback, 'function');
@@ -729,7 +696,7 @@ function getAll(callback) {
result[exports.DEMO_KEY] = !!result[exports.DEMO_KEY];
// convert JSON objects
[exports.BACKUP_CONFIG_KEY, exports.DIRECTORY_CONFIG_KEY, exports.PLATFORM_CONFIG_KEY, exports.EXTERNAL_LDAP_KEY, exports.REGISTRY_CONFIG_KEY, exports.SYSINFO_CONFIG_KEY ].forEach(function (key) {
[exports.BACKUP_CONFIG_KEY, exports.PLATFORM_CONFIG_KEY, exports.EXTERNAL_LDAP_KEY, exports.REGISTRY_CONFIG_KEY, exports.SYSINFO_CONFIG_KEY ].forEach(function (key) {
result[key] = typeof result[key] === 'object' ? result[key] : safe.JSON.parse(result[key]);
});

View File

@@ -1,9 +1,6 @@
'use strict';
exports = module.exports = {
getBackupPath: getBackupPath,
checkPreconditions: checkPreconditions,
upload: upload,
download: download,
@@ -19,76 +16,25 @@ exports = module.exports = {
injectPrivateFields: injectPrivateFields
};
const PROVIDER_FILESYSTEM = 'filesystem';
const PROVIDER_SSHFS = 'sshfs';
const PROVIDER_CIFS = 'cifs';
const PROVIDER_NFS = 'nfs';
var assert = require('assert'),
BoxError = require('../boxerror.js'),
DataLayout = require('../datalayout.js'),
debug = require('debug')('box:storage/filesystem'),
df = require('@sindresorhus/df'),
EventEmitter = require('events'),
fs = require('fs'),
mkdirp = require('mkdirp'),
path = require('path'),
prettyBytes = require('pretty-bytes'),
readdirp = require('readdirp'),
safe = require('safetydance'),
shell = require('../shell.js');
// storage api
function getBackupPath(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
if (apiConfig.provider === PROVIDER_SSHFS) return path.join(apiConfig.mountPoint, apiConfig.prefix);
if (apiConfig.provider === PROVIDER_CIFS) return path.join(apiConfig.mountPoint, apiConfig.prefix);
if (apiConfig.provider === PROVIDER_NFS) return path.join(apiConfig.mountPoint, apiConfig.prefix);
return apiConfig.backupFolder;
}
// the du call in the function below requires root
function checkPreconditions(apiConfig, dataLayout, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof callback, 'function');
let used = 0;
for (let localPath of dataLayout.localPaths()) {
debug(`checkPreconditions: getting disk usage of ${localPath}`);
let result = safe.child_process.execSync(`du -Dsb ${localPath}`, { encoding: 'utf8' });
if (!result) return callback(new BoxError(BoxError.FS_ERROR, safe.error));
used += parseInt(result, 10);
}
debug(`checkPreconditions: ${used} bytes`);
df.file(getBackupPath(apiConfig)).then(function (result) {
// Check filesystem is mounted so we don't write into the actual folder on disk
if (apiConfig.provider === PROVIDER_SSHFS || apiConfig.provider === PROVIDER_CIFS || apiConfig.provider === PROVIDER_NFS) {
if (result.mountpoint !== apiConfig.mountPoint) return callback(new BoxError(BoxError.FS_ERROR, `${apiConfig.mountPoint} is not mounted`));
} else if (apiConfig.provider === PROVIDER_FILESYSTEM && apiConfig.externalDisk) {
if (result.mountpoint === '/') return callback(new BoxError(BoxError.FS_ERROR, `${apiConfig.backupFolder} is not mounted`));
}
const needed = used + (1024 * 1024 * 1024); // check if there is atleast 1GB left afterwards
if (result.available <= needed) return callback(new BoxError(BoxError.FS_ERROR, `Not enough disk space for backup. Needed: ${prettyBytes(needed)} Available: ${prettyBytes(result.available)}`));
callback(null);
}).catch(function (error) {
callback(new BoxError(BoxError.FS_ERROR, error));
});
}
function upload(apiConfig, backupFilePath, sourceStream, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
assert.strictEqual(typeof sourceStream, 'object');
assert.strictEqual(typeof callback, 'function');
fs.mkdir(path.dirname(backupFilePath), { recursive: true }, function (error) {
mkdirp(path.dirname(backupFilePath), function (error) {
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, error.message));
safe.fs.unlinkSync(backupFilePath); // remove any hardlink
@@ -109,9 +55,8 @@ function upload(apiConfig, backupFilePath, sourceStream, callback) {
// in test, upload() may or may not be called via sudo script
const BACKUP_UID = parseInt(process.env.SUDO_UID, 10) || process.getuid();
// sshfs and cifs handle ownership through the mount args
if (apiConfig.provider === PROVIDER_FILESYSTEM && !safe.fs.chownSync(backupFilePath, BACKUP_UID, BACKUP_UID)) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Unable to chown:' + safe.error.message));
if (apiConfig.provider === PROVIDER_FILESYSTEM && !safe.fs.chownSync(path.dirname(backupFilePath), BACKUP_UID, BACKUP_UID)) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Unable to chown:' + safe.error.message));
if (!safe.fs.chownSync(backupFilePath, BACKUP_UID, BACKUP_UID)) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Unable to chown:' + safe.error.message));
if (!safe.fs.chownSync(path.dirname(backupFilePath), BACKUP_UID, BACKUP_UID)) return callback(new BoxError(BoxError.EXTERNAL_ERROR, 'Unable to chown:' + safe.error.message));
debug('upload %s: done.', backupFilePath);
@@ -170,17 +115,13 @@ function copy(apiConfig, oldFilePath, newFilePath) {
var events = new EventEmitter();
fs.mkdir(path.dirname(newFilePath), { recursive: true }, function (error) {
mkdirp(path.dirname(newFilePath), function (error) {
if (error) return events.emit('done', new BoxError(BoxError.EXTERNAL_ERROR, error.message));
events.emit('progress', `Copying ${oldFilePath} to ${newFilePath}`);
// sshfs and cifs do not allow preserving attributes
var cpOptions = apiConfig.provider === PROVIDER_FILESYSTEM ? '-a' : '-dR';
// this will hardlink backups saving space
cpOptions += apiConfig.noHardlinks ? '' : 'l';
var cpOptions = apiConfig.noHardlinks ? '-a' : '-al';
shell.spawn('copy', '/bin/cp', [ cpOptions, oldFilePath, newFilePath ], { }, function (error) {
if (error) return events.emit('done', new BoxError(BoxError.EXTERNAL_ERROR, error.message));
@@ -229,48 +170,25 @@ function testConfig(apiConfig, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof callback, 'function');
if (apiConfig.provider === PROVIDER_FILESYSTEM) {
if (!apiConfig.backupFolder || typeof apiConfig.backupFolder !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'backupFolder must be non-empty string', { field: 'backupFolder' }));
if ('externalDisk' in apiConfig && typeof apiConfig.externalDisk !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 'externalDisk must be boolean', { field: 'externalDisk' }));
}
if (typeof apiConfig.backupFolder !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'backupFolder must be string', { field: 'backupFolder' }));
if (apiConfig.provider === PROVIDER_SSHFS || apiConfig.provider === PROVIDER_CIFS || apiConfig.provider === PROVIDER_NFS) {
if (!apiConfig.mountPoint || typeof apiConfig.mountPoint !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'mountPoint must be non-empty string', { field: 'mountPoint' }));
if (typeof apiConfig.prefix !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'prefix must be a string', { field: 'prefix' }));
const mounts = safe.fs.readFileSync('/proc/mounts', 'utf8');
const mountInfo = mounts.split('\n').filter(function (l) { return l.indexOf(apiConfig.mountPoint) !== -1; })[0];
if (!mountInfo) return callback(new BoxError(BoxError.BAD_FIELD, `${apiConfig.mountPoint} is not mounted`, { field: 'mountPoint' }));
if (apiConfig.provider === PROVIDER_SSHFS && !mountInfo.split(' ').find(i => i === 'fuse.sshfs')) return callback(new BoxError(BoxError.BAD_FIELD, 'mountPoint must be a "fuse.sshfs" filesystem', { field: 'mountPoint' }));
if (apiConfig.provider === PROVIDER_CIFS && !mountInfo.split(' ').find(i => i === 'cifs')) return callback(new BoxError(BoxError.BAD_FIELD, 'mountPoint must be a "cifs" filesystem', { field: 'mountPoint' }));
if (apiConfig.provider === PROVIDER_NFS && !mountInfo.split(' ').find(i => i === 'nfs')) return callback(new BoxError(BoxError.BAD_FIELD, 'mountPoint must be a "nfs" filesystem', { field: 'mountPoint' }));
}
// common checks
const backupPath = getBackupPath(apiConfig);
const field = apiConfig.provider === PROVIDER_FILESYSTEM ? 'backupFolder' : 'prefix';
const stat = safe.fs.statSync(backupPath);
if (!stat) return callback(new BoxError(BoxError.BAD_FIELD, 'Directory does not exist or cannot be accessed: ' + safe.error.message), { field });
if (!stat.isDirectory()) return callback(new BoxError(BoxError.BAD_FIELD, 'Backup location is not a directory', { field }));
if (!safe.fs.mkdirSync(path.join(backupPath, 'snapshot')) && safe.error.code !== 'EEXIST') {
if (safe.error && safe.error.code === 'EACCES') return callback(new BoxError(BoxError.BAD_FIELD, `Access denied. Run "chown yellowtent:yellowtent ${backupPath}" on the server`, { field }));
return callback(new BoxError(BoxError.BAD_FIELD, safe.error.message, { field }));
}
if (!safe.fs.writeFileSync(path.join(backupPath, 'cloudron-testfile'), 'testcontent')) {
return callback(new BoxError(BoxError.BAD_FIELD, `Unable to create test file as 'yellowtent' user in ${backupPath}: ${safe.error.message}. Check dir/mount permissions`, { field }));
}
if (!safe.fs.unlinkSync(path.join(backupPath, 'cloudron-testfile'))) {
return callback(new BoxError(BoxError.BAD_FIELD, `Unable to remove test file as 'yellowtent' user in ${backupPath}: ${safe.error.message}. Check dir/mount permissions`, { field }));
}
if (!apiConfig.backupFolder) return callback(new BoxError(BoxError.BAD_FIELD, 'backupFolder is required', { field: 'backupFolder' }));
if ('noHardlinks' in apiConfig && typeof apiConfig.noHardlinks !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 'noHardlinks must be boolean', { field: 'noHardLinks' }));
callback(null);
if ('externalDisk' in apiConfig && typeof apiConfig.externalDisk !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 'externalDisk must be boolean', { field: 'externalDisk' }));
fs.stat(apiConfig.backupFolder, function (error, result) {
if (error) return callback(new BoxError(BoxError.BAD_FIELD, 'Directory does not exist or cannot be accessed: ' + error.message), { field: 'backupFolder' });
if (!result.isDirectory()) return callback(new BoxError(BoxError.BAD_FIELD, 'Backup location is not a directory', { field: 'backupFolder' }));
mkdirp(path.join(apiConfig.backupFolder, 'snapshot'), function (error) {
if (error && error.code === 'EACCES') return callback(new BoxError(BoxError.BAD_FIELD, `Access denied. Run "chown yellowtent:yellowtent ${apiConfig.backupFolder}" on the server`, { field: 'backupFolder' }));
if (error) return callback(new BoxError(BoxError.BAD_FIELD, error.message, { field: 'backupFolder' }));
callback(null);
});
});
}
function removePrivateFields(apiConfig) {

View File

@@ -1,9 +1,6 @@
'use strict';
exports = module.exports = {
getBackupPath: getBackupPath,
checkPreconditions: checkPreconditions,
upload: upload,
download: download,
copy: copy,
@@ -26,7 +23,6 @@ var assert = require('assert'),
async = require('async'),
BoxError = require('../boxerror.js'),
constants = require('../constants.js'),
DataLayout = require('../datalayout.js'),
debug = require('debug')('box:storage/gcs'),
EventEmitter = require('events'),
GCS = require('@google-cloud/storage').Storage,
@@ -61,20 +57,6 @@ function getBucket(apiConfig) {
}
// storage api
function getBackupPath(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
return apiConfig.prefix;
}
function checkPreconditions(apiConfig, dataLayout, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof callback, 'function');
callback(null);
}
function upload(apiConfig, backupFilePath, sourceStream, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');

View File

@@ -11,9 +11,6 @@
// for the other API calls we leave it to the backend to retry. this allows
// them to tune the concurrency based on failures/rate limits accordingly
exports = module.exports = {
getBackupPath: getBackupPath,
checkPreconditions: checkPreconditions,
upload: upload,
download: download,
@@ -32,7 +29,6 @@ exports = module.exports = {
var assert = require('assert'),
BoxError = require('../boxerror.js'),
DataLayout = require('../datalayout.js'),
EventEmitter = require('events');
function removePrivateFields(apiConfig) {
@@ -45,21 +41,6 @@ function injectPrivateFields(newConfig, currentConfig) {
// in-place injection of tokens and api keys which came in with constants.SECRET_PLACEHOLDER
}
function getBackupPath(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
// Result: path at the backup storage
return '/';
}
function checkPreconditions(apiConfig, dataLayout, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof callback, 'function');
callback(null);
}
function upload(apiConfig, backupFilePath, sourceStream, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');

View File

@@ -1,9 +1,6 @@
'use strict';
exports = module.exports = {
getBackupPath: getBackupPath,
checkPreconditions: checkPreconditions,
upload: upload,
download: download,
downloadDir: downloadDir,
@@ -21,23 +18,9 @@ exports = module.exports = {
var assert = require('assert'),
BoxError = require('../boxerror.js'),
DataLayout = require('../datalayout.js'),
debug = require('debug')('box:storage/noop'),
EventEmitter = require('events');
function getBackupPath(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
return '';
}
function checkPreconditions(apiConfig, dataLayout, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof callback, 'function');
callback(null);
}
function upload(apiConfig, backupFilePath, sourceStream, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');

View File

@@ -1,9 +1,6 @@
'use strict';
exports = module.exports = {
getBackupPath: getBackupPath,
checkPreconditions: checkPreconditions,
upload: upload,
download: download,
copy: copy,
@@ -28,7 +25,6 @@ var assert = require('assert'),
BoxError = require('../boxerror.js'),
chunk = require('lodash.chunk'),
constants = require('../constants.js'),
DataLayout = require('../datalayout.js'),
debug = require('debug')('box:storage/s3'),
EventEmitter = require('events'),
https = require('https'),
@@ -58,7 +54,7 @@ function getS3Config(apiConfig, callback) {
var credentials = {
signatureVersion: apiConfig.signatureVersion || 'v4',
s3ForcePathStyle: false, // Use vhost style instead of path style - https://forums.aws.amazon.com/ann.jspa?annID=6776
s3ForcePathStyle: true, // Force use path-style url (http://endpoint/bucket/path) instead of host-style (http://bucket.endpoint/path)
accessKeyId: apiConfig.accessKeyId,
secretAccessKey: apiConfig.secretAccessKey,
region: apiConfig.region || 'us-east-1',
@@ -74,33 +70,13 @@ function getS3Config(apiConfig, callback) {
if (apiConfig.endpoint) credentials.endpoint = apiConfig.endpoint;
if (apiConfig.s3ForcePathStyle === true) credentials.s3ForcePathStyle = true;
// s3 endpoint names come from the SDK
const isHttps = (credentials.endpoint && credentials.endpoint.startsWith('https://')) || apiConfig.provider === 's3';
if (isHttps) { // only set agent for https calls. otherwise, it crashes
if (apiConfig.acceptSelfSignedCerts || apiConfig.bucket.includes('.')) {
credentials.httpOptions.agent = new https.Agent({ rejectUnauthorized: false });
}
if (apiConfig.acceptSelfSignedCerts === true && credentials.endpoint && credentials.endpoint.startsWith('https://')) {
credentials.httpOptions.agent = new https.Agent({ rejectUnauthorized: false });
}
callback(null, credentials);
}
// storage api
function getBackupPath(apiConfig) {
assert.strictEqual(typeof apiConfig, 'object');
return apiConfig.prefix;
}
function checkPreconditions(apiConfig, dataLayout, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert(dataLayout instanceof DataLayout, 'dataLayout must be a DataLayout');
assert.strictEqual(typeof callback, 'function');
callback(null);
}
function upload(apiConfig, backupFilePath, sourceStream, callback) {
assert.strictEqual(typeof apiConfig, 'object');
assert.strictEqual(typeof backupFilePath, 'string');
@@ -440,16 +416,10 @@ function testConfig(apiConfig, callback) {
// the node module seems to incorrectly accept bucket name with '/'
if (apiConfig.bucket.includes('/')) return callback(new BoxError(BoxError.BAD_FIELD, 'bucket name cannot contain "/"', { field: 'bucket' }));
// names must be lowercase and start with a letter or number. can contain dashes
if (apiConfig.bucket.includes('_') || apiConfig.bucket.match(/[A-Z]/)) return callback(new BoxError(BoxError.BAD_FIELD, 'bucket name cannot contain "_" or capitals', { field: 'bucket' }));
if (typeof apiConfig.prefix !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'prefix must be a string', { field: 'prefix' }));
if ('signatureVersion' in apiConfig && typeof apiConfig.signatureVersion !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'signatureVersion must be a string', { field: 'signatureVersion' }));
if ('endpoint' in apiConfig && typeof apiConfig.endpoint !== 'string') return callback(new BoxError(BoxError.BAD_FIELD, 'endpoint must be a string', { field: 'endpoint' }));
if ('acceptSelfSignedCerts' in apiConfig && typeof apiConfig.acceptSelfSignedCerts !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 'acceptSelfSignedCerts must be a boolean', { field: 'acceptSelfSignedCerts' }));
if ('s3ForcePathStyle' in apiConfig && typeof apiConfig.s3ForcePathStyle !== 'boolean') return callback(new BoxError(BoxError.BAD_FIELD, 's3ForcePathStyle must be a boolean', { field: 's3ForcePathStyle' }));
// attempt to upload and delete a file with new credentials
getS3Config(apiConfig, function (error, credentials) {
if (error) return callback(error);

View File

@@ -12,7 +12,7 @@ let assert = require('assert'),
once = require('once'),
path = require('path'),
paths = require('./paths.js'),
safe = require('safetydance'),
settings = require('./settings.js'),
shell = require('./shell.js');
// the logic here is also used in the cloudron-support tool
@@ -24,7 +24,7 @@ function sshInfo() {
if (constants.TEST) {
filePath = path.join(paths.baseDir(), 'authorized_keys');
user = process.getuid();
} else if (safe.fs.existsSync('/home/ubuntu')) { // yellowtent user won't have access to anything deeper
} else if (settings.provider() === 'ec2' || settings.provider() === 'lightsail' || settings.provider() === 'ami') {
filePath = '/home/ubuntu/.ssh/authorized_keys';
user = 'ubuntu';
} else {

View File

@@ -29,6 +29,7 @@ describe('Apps', function () {
fallbackEmail: 'admin@me.com',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: hat(256),
displayName: '',
groupIds: [],
@@ -44,6 +45,7 @@ describe('Apps', function () {
fallbackEmail: 'safe@me.com',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: hat(256),
displayName: '',
groupIds: [],
@@ -59,6 +61,7 @@ describe('Apps', function () {
fallbackEmail: 'safe1@me.com',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: hat(256),
displayName: '',
groupIds: [ 'somegroup' ],
@@ -68,13 +71,11 @@ describe('Apps', function () {
var GROUP_0 = {
id: 'somegroup',
name: 'group0',
source: ''
name: 'group0'
};
var GROUP_1 = {
id: 'anothergroup',
name: 'group1',
source: 'ldap'
name: 'group1'
};
const DOMAIN_0 = {
@@ -179,8 +180,8 @@ describe('Apps', function () {
userdb.add.bind(null, ADMIN_0.id, ADMIN_0),
userdb.add.bind(null, USER_0.id, USER_0),
userdb.add.bind(null, USER_1.id, USER_1),
groupdb.add.bind(null, GROUP_0.id, GROUP_0.name, GROUP_0.source),
groupdb.add.bind(null, GROUP_1.id, GROUP_1.name, GROUP_1.source),
groupdb.add.bind(null, GROUP_0.id, GROUP_0.name),
groupdb.add.bind(null, GROUP_1.id, GROUP_1.name),
groups.addMember.bind(null, GROUP_0.id, USER_1.id),
appdb.add.bind(null, APP_0.id, APP_0.appStoreId, APP_0.manifest, APP_0.location, APP_0.domain, apps._translatePortBindings(APP_0.portBindings, APP_0.manifest), APP_0),
appdb.add.bind(null, APP_1.id, APP_1.appStoreId, APP_1.manifest, APP_1.location, APP_1.domain, apps._translatePortBindings(APP_1.portBindings, APP_1.manifest), APP_1),

View File

@@ -50,10 +50,58 @@ describe('Appstore', function () {
beforeEach(nock.cleanAll);
it('cannot send alive status without registering', function (done) {
appstore.sendAliveStatus(function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.LICENSE_ERROR); // missing token
done();
});
});
it('can set cloudron token', function (done) {
settingsdb.set(settings.CLOUDRON_TOKEN_KEY, APPSTORE_TOKEN, done);
});
it('can send alive status', function (done) {
var scope = nock(MOCK_API_SERVER_ORIGIN)
.post(`/api/v1/alive?accessToken=${APPSTORE_TOKEN}`, function (body) {
expect(body.version).to.be.a('string');
expect(body.adminFqdn).to.be.a('string');
expect(body.provider).to.be.a('string');
expect(body.backendSettings).to.be.an('object');
expect(body.backendSettings.backupConfig).to.be.an('object');
expect(body.backendSettings.backupConfig.provider).to.be.a('string');
expect(body.backendSettings.backupConfig.hardlinks).to.be.a('boolean');
expect(body.backendSettings.domainConfig).to.be.an('object');
expect(body.backendSettings.domainConfig.count).to.be.a('number');
expect(body.backendSettings.domainConfig.domains).to.be.an('array');
expect(body.backendSettings.mailConfig).to.be.an('object');
expect(body.backendSettings.mailConfig.outboundCount).to.be.a('number');
expect(body.backendSettings.mailConfig.inboundCount).to.be.a('number');
expect(body.backendSettings.mailConfig.catchAllCount).to.be.a('number');
expect(body.backendSettings.mailConfig.relayProviders).to.be.an('array');
expect(body.backendSettings.appAutoupdatePattern).to.be.a('string');
expect(body.backendSettings.boxAutoupdatePattern).to.be.a('string');
expect(body.backendSettings.sysinfoProvider).to.be.a('string');
expect(body.backendSettings.timeZone).to.be.a('string');
expect(body.machine).to.be.an('object');
expect(body.machine.cpus).to.be.an('array');
expect(body.machine.totalmem).to.be.an('number');
expect(body.events).to.be.an('object');
expect(body.events.lastLogin).to.be.an('number');
return true;
})
.reply(201, { cloudron: { id: CLOUDRON_ID }});
appstore.sendAliveStatus(function (error) {
expect(error).to.not.be.ok();
expect(scope.isDone()).to.be.ok();
done();
});
});
it('can purchase an app', function (done) {
var scope1 = nock(MOCK_API_SERVER_ORIGIN)
.post(`/api/v1/cloudronapps?accessToken=${APPSTORE_TOKEN}`, function () { return true; })

View File

@@ -73,6 +73,7 @@ var ADMIN = {
fallbackEmail: 'admin@me.com',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: '',
displayName: '',
role: 'owner',

View File

@@ -6,22 +6,20 @@
'use strict';
var appdb = require('../appdb.js'),
async = require('async'),
var async = require('async'),
backupdb = require('../backupdb.js'),
backups = require('../backups.js'),
BoxError = require('../boxerror.js'),
createTree = require('./common.js').createTree,
database = require('../database'),
DataLayout = require('../datalayout.js'),
domains = require('../domains.js'),
expect = require('expect.js'),
fs = require('fs'),
os = require('os'),
mkdirp = require('mkdirp'),
moment = require('moment'),
path = require('path'),
rimraf = require('rimraf'),
settingsdb = require('../settingsdb.js'),
settings = require('../settings.js'),
tasks = require('../tasks.js');
@@ -38,7 +36,7 @@ function createBackup(callback) {
if (p.errorMessage) return callback(new Error('backup failed:' + p));
if (!p.result) return callback(new Error('backup has no result:' + p));
backups.getByIdentifierAndStatePaged(backups.BACKUP_IDENTIFIER_BOX, backups.BACKUP_STATE_NORMAL, 1, 1, function (error, result) {
backups.getByStatePaged(backupdb.BACKUP_STATE_NORMAL, 1, 1, function (error, result) {
if (error) return callback(error);
if (result.length !== 1) return callback(new Error('result is not of length 1'));
@@ -72,123 +70,83 @@ function cleanupBackups(callback) {
}
describe('retention policy', function () {
it('keeps latest', function () {
let backup = { creationTime: moment().subtract(5, 's').toDate(), state: backups.BACKUP_STATE_NORMAL };
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: 1, keepLatest: true }, []);
expect(backup.keepReason).to.be('latest');
});
it('does not keep latest', function () {
let backup = { creationTime: moment().subtract(5, 's').toDate(), state: backups.BACKUP_STATE_NORMAL };
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: 1, keepLatest: false }, []);
expect(backup.keepReason).to.be(undefined);
it('always keeps if reason is set', function () {
let backup = { keepReason: 'somereason' };
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: 1 });
expect(backup.keepReason).to.be('somereason');
});
it('always keeps forever policy', function () {
let backup = { creationTime: new Date() };
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: -1, keepLatest: true }, []);
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: -1 });
expect(backup.keepReason).to.be('keepWithinSecs');
});
it('preserveSecs takes precedence', function () {
let backup = { creationTime: new Date(), preserveSecs: 3000 };
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: 1, keepLatest: true }, []);
backups._applyBackupRetentionPolicy([backup], { keepWithinSecs: 1 });
expect(backup.keepReason).to.be('preserveSecs');
});
it('1 daily', function () {
let b = [
{ id: '0', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().toDate() },
{ id: '1', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(1, 'h').toDate() },
{ id: '2', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(3, 'h').toDate() },
{ id: '3', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(20, 'h').toDate() },
{ id: '4', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(5, 'd').toDate() }
{ id: '1', creationTime: moment().toDate() },
{ id: '2', creationTime: moment().subtract(3, 'h').toDate() },
{ id: '3', creationTime: moment().subtract(20, 'h').toDate() },
{ id: '4', creationTime: moment().subtract(5, 'd').toDate() }
];
backups._applyBackupRetentionPolicy(b, { keepDaily: 1, keepLatest: true }, []);
backups._applyBackupRetentionPolicy(b, { keepDaily: 1 });
expect(b[0].keepReason).to.be('keepDaily');
expect(b[1].keepReason).to.be(undefined);
expect(b[2].keepReason).to.be(undefined);
expect(b[3].keepReason).to.be(undefined);
expect(b[3].keepReason).to.be(undefined);
});
it('2 daily, 1 weekly', function () {
let b = [
{ id: '0', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().toDate() },
{ id: '1', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(1, 'h').toDate() },
{ id: '2', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(3, 'h').toDate() },
{ id: '3', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(26, 'h').toDate() },
{ id: '4', state: backups.BACKUP_STATE_ERROR, creationTime: moment().subtract(32, 'h').toDate() },
{ id: '5', state: backups.BACKUP_STATE_CREATING, creationTime: moment().subtract(50, 'h').toDate() },
{ id: '6', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(5, 'd').toDate() }
{ id: '1', creationTime: moment().toDate() },
{ id: '2', creationTime: moment().subtract(3, 'h').toDate() },
{ id: '3', creationTime: moment().subtract(26, 'h').toDate() },
{ id: '4', creationTime: moment().subtract(5, 'd').toDate() }
];
backups._applyBackupRetentionPolicy(b, { keepDaily: 2, keepWeekly: 1, keepLatest: false }, []);
backups._applyBackupRetentionPolicy(b, { keepDaily: 2, keepWeekly: 1 });
expect(b[0].keepReason).to.be('keepDaily'); // today
expect(b[1].keepReason).to.be('keepWeekly'); // today
expect(b[2].keepReason).to.be(undefined);
expect(b[3].keepReason).to.be('keepDaily'); // yesterday
expect(b[4].discardReason).to.be('error'); // errored
expect(b[5].discardReason).to.be('creating-too-long'); // creating for too long
expect(b[6].keepReason).to.be(undefined); // outside retention policy
expect(b[1].keepReason).to.be('keepWeekly');
expect(b[2].keepReason).to.be('keepDaily'); // yesterday
expect(b[3].keepReason).to.be(undefined);
});
it('2 daily, 3 monthly, 1 yearly', function () {
it('2 daily, 6 monthly, 1 yearly', function () {
let b = [
{ id: '0', state: backups.BACKUP_STATE_CREATING, creationTime: moment().toDate() },
{ id: '1', state: backups.BACKUP_STATE_ERROR, creationTime: moment().toDate() },
{ id: '2', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().toDate() },
{ id: '3', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(1, 'h').toDate() },
{ id: '4', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(3, 'h').toDate() },
{ id: '5', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(26, 'h').toDate() },
{ id: '6', state: backups.BACKUP_STATE_CREATING, creationTime: moment().subtract(49, 'h').toDate() },
{ id: '7', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(51, 'd').toDate() },
{ id: '8', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(84, 'd').toDate() },
{ id: '9', state: backups.BACKUP_STATE_NORMAL, creationTime: moment().subtract(97, 'd').toDate() },
{ id: '1', creationTime: moment().toDate() },
{ id: '2', creationTime: moment().subtract(3, 'h').toDate() },
{ id: '3', creationTime: moment().subtract(26, 'h').toDate() },
{ id: '4', creationTime: moment().subtract(5, 'd').toDate() }
];
backups._applyBackupRetentionPolicy(b, { keepDaily: 2, keepMonthly: 3, keepYearly: 1, keepLatest: true }, []);
expect(b[0].keepReason).to.be('creating');
expect(b[1].discardReason).to.be('error'); // errored
expect(b[2].keepReason).to.be('keepDaily');
expect(b[3].keepReason).to.be('keepMonthly');
expect(b[4].keepReason).to.be('keepYearly');
expect(b[5].keepReason).to.be('keepDaily'); // yesterday
expect(b[6].discardReason).to.be('creating-too-long'); // errored
expect(b[7].keepReason).to.be('keepMonthly');
expect(b[8].keepReason).to.be('keepMonthly');
expect(b[9].keepReason).to.be(undefined);
backups._applyBackupRetentionPolicy(b, { keepDaily: 2, keepMonthly: 6, keepYearly: 1 });
expect(b[0].keepReason).to.be('keepDaily'); // today
expect(b[1].keepReason).to.be('keepMonthly');
expect(b[2].keepReason).to.be('keepDaily'); // yesterday
expect(b[3].keepReason).to.be('keepYearly');
});
});
describe('backups', function () {
const DOMAIN_0 = {
domain: 'example.com',
zoneName: 'example.com',
provider: 'manual',
config: { },
fallbackCertificate: null,
tlsConfig: { provider: 'fallback' }
};
const AUDIT_SOURCE = { ip: '1.2.3.4' };
const manifest = { version: '0.0.1', manifestVersion: 1, dockerImage: 'foo', healthCheckPath: '/', httpPort: 3, title: 'ok', addons: { } };
before(function (done) {
const BACKUP_DIR = path.join(os.tmpdir(), 'cloudron-backup-test');
async.series([
fs.mkdir.bind(null, BACKUP_DIR, { recursive: true }),
mkdirp.bind(null, BACKUP_DIR),
database.initialize,
database._clear,
settings.setAdmin.bind(null, DOMAIN_0.domain, 'my.' + DOMAIN_0.domain),
domains.add.bind(null, DOMAIN_0.domain, DOMAIN_0, AUDIT_SOURCE),
settingsdb.set.bind(null, settings.BACKUP_CONFIG_KEY, JSON.stringify({
settings.setBackupConfig.bind(null, {
provider: 'filesystem',
password: 'supersecret',
backupFolder: BACKUP_DIR,
retentionPolicy: { keepWithinSecs: 1 },
format: 'tgz'
}))
})
], done);
});
@@ -201,35 +159,29 @@ describe('backups', function () {
describe('cleanup', function () {
var BACKUP_0 = {
id: 'backup-box-0',
identifier: 'box',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_BOX,
state: backups.BACKUP_STATE_NORMAL,
type: backupdb.BACKUP_TYPE_BOX,
dependsOn: [ 'backup-app-00', 'backup-app-01' ],
manifest: null,
format: 'tgz'
};
var BACKUP_0_APP_0 = { // backup of installed app
var BACKUP_0_APP_0 = {
id: 'backup-app-00',
identifier: 'app0',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_APP,
state: backups.BACKUP_STATE_NORMAL,
type: backupdb.BACKUP_TYPE_APP,
dependsOn: [],
manifest: null,
format: 'tgz'
};
var BACKUP_0_APP_1 = { // this app is uninstalled
var BACKUP_0_APP_1 = {
id: 'backup-app-01',
identifier: 'app1',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_APP,
state: backups.BACKUP_STATE_NORMAL,
type: backupdb.BACKUP_TYPE_APP,
dependsOn: [],
manifest: null,
format: 'tgz'
@@ -239,9 +191,7 @@ describe('backups', function () {
id: 'backup-box-1',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_BOX,
state: backups.BACKUP_STATE_NORMAL,
identifier: 'box',
type: backupdb.BACKUP_TYPE_BOX,
dependsOn: [ 'backup-app-10', 'backup-app-11' ],
manifest: null,
format: 'tgz'
@@ -251,9 +201,7 @@ describe('backups', function () {
id: 'backup-app-10',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_APP,
state: backups.BACKUP_STATE_NORMAL,
identifier: 'app0',
type: backupdb.BACKUP_TYPE_APP,
dependsOn: [],
manifest: null,
format: 'tgz'
@@ -263,22 +211,12 @@ describe('backups', function () {
id: 'backup-app-11',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_APP,
state: backups.BACKUP_STATE_NORMAL,
identifier: 'app1',
type: backupdb.BACKUP_TYPE_APP,
dependsOn: [],
manifest: null,
format: 'tgz'
};
before(function (done) {
appdb.add('app0', 'appStoreId', manifest, 'location', DOMAIN_0.domain, [ ] /* portBindings */, { installationState: 'installed', runState: 'running', mailboxDomain: DOMAIN_0.domain }, done);
});
after(function (done) {
appdb.del('app0', done);
});
it('succeeds without backups', function (done) {
cleanupBackups(done);
});
@@ -295,12 +233,12 @@ describe('backups', function () {
cleanupBackups(function (error) {
expect(error).to.not.be.ok();
backupdb.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 1000, function (error, result) {
backupdb.getByTypePaged(backupdb.BACKUP_TYPE_BOX, 1, 1000, function (error, result) {
expect(error).to.not.be.ok();
expect(result.length).to.equal(1);
expect(result[0].id).to.equal(BACKUP_1.id);
// check that app backups are gone as well. only backup_1 will remain
// check that app backups are gone as well
backupdb.get(BACKUP_0_APP_0.id, function (error) {
expect(error).to.be.a(BoxError);
expect(error.reason).to.equal(BoxError.NOT_FOUND);
@@ -316,12 +254,12 @@ describe('backups', function () {
cleanupBackups(function (error) {
expect(error).to.not.be.ok();
backupdb.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 1000, function (error, result) {
backupdb.getByTypePaged(backupdb.BACKUP_TYPE_BOX, 1, 1000, function (error, result) {
expect(error).to.not.be.ok();
expect(result.length).to.equal(1);
expect(result[0].id).to.equal(BACKUP_1.id);
// check that app backups are also still there. backup_1 is still there
// check that app backups are also still there
backupdb.get(BACKUP_1_APP_0.id, function (error, result) {
expect(error).to.not.be.ok();
expect(result.id).to.equal(BACKUP_1_APP_0.id);
@@ -333,7 +271,6 @@ describe('backups', function () {
});
it('succeeds for app backups not referenced by a box backup', function (done) {
// add two dangling app backups not referenced by box backup. app1 is uninstalled. app0 is there
async.eachSeries([BACKUP_0_APP_0, BACKUP_0_APP_1], (b, done) => backupdb.add(b.id, b, done), function (error) {
expect(error).to.not.be.ok();
@@ -342,12 +279,9 @@ describe('backups', function () {
cleanupBackups(function (error) {
expect(error).to.not.be.ok();
backupdb.getByTypePaged(backups.BACKUP_TYPE_APP, 1, 1000, function (error, result) {
backupdb.getByTypePaged(backupdb.BACKUP_TYPE_APP, 1, 1000, function (error, result) {
expect(error).to.not.be.ok();
expect(result.length).to.equal(3);
expect(result[0].id).to.be(BACKUP_0_APP_0.id); // because app is installed, latest backup is preserved
expect(result[1].id).to.be(BACKUP_1_APP_0.id); // referenced by box
expect(result[2].id).to.be(BACKUP_1_APP_1.id); // referenced by box
expect(result.length).to.equal(2);
done();
});
@@ -435,7 +369,7 @@ describe('backups', function () {
});
it('succeeds to set backup config', function (done) {
fs.mkdirSync(backupConfig.backupFolder, { recursive: true });
mkdirp.sync(backupConfig.backupFolder);
settings.setBackupConfig(backupConfig, function (error) {
expect(error).to.be(null);

View File

@@ -28,7 +28,7 @@ scripts=("${SOURCE_DIR}/src/scripts/clearvolume.sh" \
for script in "${scripts[@]}"; do
if [[ $(sudo -n "${script}" --check 2>/dev/null) != "OK" ]]; then
echo ""
echo "${script} does not have sudo access. Try 'sudo -n ${script} --check'"
echo "${script} does not have sudo access."
echo "You have to add the lines below to /etc/sudoers.d/yellowtent"
echo ""
echo "Defaults!${script} env_keep=\"HOME BOX_ENV\""

View File

@@ -1,6 +1,7 @@
'use strict';
var fs = require('fs'),
mkdirp = require('mkdirp'),
path = require('path'),
rimraf = require('rimraf');
@@ -10,7 +11,7 @@ exports = module.exports = {
function createTree(root, obj) {
rimraf.sync(root);
fs.mkdirSync(root, { recursive: true });
mkdirp.sync(root);
function createSubTree(tree, curpath) {
for (var key in tree) {

View File

@@ -9,7 +9,6 @@ var appdb = require('../appdb.js'),
apps = require('../apps.js'),
async = require('async'),
backupdb = require('../backupdb.js'),
backups = require('../backups.js'),
BoxError = require('../boxerror.js'),
database = require('../database'),
domaindb = require('../domaindb'),
@@ -34,6 +33,7 @@ var USER_0 = {
fallbackEmail: 'safer@me.com',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: hat(256),
displayName: '',
twoFactorAuthenticationEnabled: false,
@@ -52,6 +52,7 @@ var USER_1 = {
fallbackEmail: 'safer2@me.com',
salt: 'tata',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: '',
displayName: 'Herbert 1',
twoFactorAuthenticationEnabled: false,
@@ -70,6 +71,7 @@ var USER_2 = {
fallbackEmail: 'safer3@me.com',
salt: 'tata',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: '',
displayName: 'Herbert 2',
twoFactorAuthenticationEnabled: false,
@@ -606,7 +608,7 @@ describe('database', function () {
});
it('can get all admins', function (done) {
userdb.getByRole('owner', function (error) {
userdb.getByRole('owner', function (error, all) {
expect(error).to.be.ok();
expect(error.reason).to.be(BoxError.NOT_FOUND);
done();
@@ -1324,9 +1326,7 @@ describe('database', function () {
id: 'backup-box',
encryptionVersion: 2,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_BOX,
state: backups.BACKUP_STATE_NORMAL,
identifier: 'box',
type: backupdb.BACKUP_TYPE_BOX,
dependsOn: [ 'dep1' ],
manifest: null,
format: 'tgz'
@@ -1343,8 +1343,7 @@ describe('database', function () {
expect(error).to.be(null);
expect(result.encryptionVersion).to.be(2);
expect(result.packageVersion).to.be('1.0.0');
expect(result.type).to.be(backups.BACKUP_TYPE_BOX);
expect(result.state).to.be(backups.BACKUP_STATE_NORMAL);
expect(result.type).to.be(backupdb.BACKUP_TYPE_BOX);
expect(result.creationTime).to.be.a(Date);
expect(result.dependsOn).to.eql(['dep1']);
expect(result.manifest).to.eql(null);
@@ -1362,7 +1361,7 @@ describe('database', function () {
});
it('getByTypePaged succeeds', function (done) {
backupdb.getByTypePaged(backups.BACKUP_TYPE_BOX, 1, 5, function (error, results) {
backupdb.getByTypePaged(backupdb.BACKUP_TYPE_BOX, 1, 5, function (error, results) {
expect(error).to.be(null);
expect(results).to.be.an(Array);
expect(results.length).to.be(1);
@@ -1396,9 +1395,7 @@ describe('database', function () {
id: 'app_appid_123',
encryptionVersion: null,
packageVersion: '1.0.0',
type: backups.BACKUP_TYPE_APP,
state: backups.BACKUP_STATE_CREATING,
identifier: 'appid',
type: backupdb.BACKUP_TYPE_APP,
dependsOn: [ ],
manifest: { foo: 'bar' },
format: 'tgz'
@@ -1415,8 +1412,7 @@ describe('database', function () {
expect(error).to.be(null);
expect(result.encryptionVersion).to.be(null);
expect(result.packageVersion).to.be('1.0.0');
expect(result.type).to.be(backups.BACKUP_TYPE_APP);
expect(result.state).to.be(backups.BACKUP_STATE_CREATING);
expect(result.type).to.be(backupdb.BACKUP_TYPE_APP);
expect(result.creationTime).to.be.a(Date);
expect(result.dependsOn).to.eql([]);
expect(result.manifest).to.eql({ foo: 'bar' });
@@ -1424,8 +1420,8 @@ describe('database', function () {
});
});
it('getByIdentifierPaged succeeds', function (done) {
backupdb.getByIdentifierPaged('appid', 1, 5, function (error, results) {
it('getByAppIdPaged succeeds', function (done) {
backupdb.getByAppIdPaged(1, 5, 'appid', function (error, results) {
expect(error).to.be(null);
expect(results).to.be.an(Array);
expect(results.length).to.be(1);
@@ -1649,7 +1645,7 @@ describe('database', function () {
var GROUP_ID_1 = 'foundersid';
it('can create a group', function (done) {
groupdb.add(GROUP_ID_1, 'founders', 'ldap', function (error) {
groupdb.add(GROUP_ID_1, 'founders', function (error) {
expect(error).to.be(null);
done();
});
@@ -1834,7 +1830,7 @@ describe('database', function () {
});
it('list mailboxes succeeds', function (done) {
mailboxdb.listMailboxes(DOMAIN_0.domain, null /* search */, 1, 10, function (error, mailboxes) {
mailboxdb.listMailboxes(DOMAIN_0.domain, 1, 10, function (error, mailboxes) {
expect(error).to.be(null);
expect(mailboxes.length).to.be(2);
expect(mailboxes[0].name).to.be('girish');

View File

@@ -21,7 +21,7 @@ describe('Dockerproxy', function () {
dockerProxy.start(function (error) {
expect(error).to.not.be.ok();
exec(`${DOCKER} run -d cloudron/base:2.0.0 "bin/bash" "-c" "while true; do echo 'perpetual walrus'; sleep 1; done"`, function (error, stdout, stderr) {
exec(`${DOCKER} run -d cloudron/base:1.0.0 "bin/bash" "-c" "while true; do echo 'perpetual walrus'; sleep 1; done"`, function (error, stdout, stderr) {
expect(error).to.be(null);
expect(stderr).to.be.empty();
@@ -45,7 +45,7 @@ describe('Dockerproxy', function () {
// it('wait', function (done) {} );
it('can get info', function (done) {
exec(DOCKER + ' info', function (error, stdout/*, stderr*/) {
exec(DOCKER + ' info', function (error, stdout, stderr) {
expect(error).to.be(null);
expect(stdout).to.contain('Containers:');
// expect(stderr).to.be.empty(); // on some machines, i get 'No swap limit support'
@@ -54,7 +54,7 @@ describe('Dockerproxy', function () {
});
it('can create container', function (done) {
var cmd = `${DOCKER} run cloudron/base:2.0.0 "/bin/bash" "-c" "echo 'hello'"`;
var cmd = `${DOCKER} run cloudron/base:1.0.0 "/bin/bash" "-c" "echo 'hello'"`;
exec(cmd, function (error, stdout, stderr) {
expect(error).to.be(null);
expect(stdout).to.contain('hello');
@@ -64,7 +64,7 @@ describe('Dockerproxy', function () {
});
it('proxy overwrites the container network option', function (done) {
var cmd = `${DOCKER} run --network ifnotrewritethiswouldfail cloudron/base:2.0.0 "/bin/bash" "-c" "echo 'hello'"`;
var cmd = `${DOCKER} run --network ifnotrewritethiswouldfail cloudron/base:1.0.0 "/bin/bash" "-c" "echo 'hello'"`;
exec(cmd, function (error, stdout, stderr) {
expect(error).to.be(null);
expect(stdout).to.contain('hello');

View File

@@ -12,7 +12,6 @@ var async = require('async'),
expect = require('expect.js'),
externalldap = require('../externalldap.js'),
groupdb = require('../groupdb.js'),
groups = require('../groups.js'),
domains = require('../domains.js'),
ldap = require('ldapjs'),
mailboxdb = require('../mailboxdb.js'),
@@ -46,17 +45,12 @@ const DOMAIN_0 = {
const LDAP_SHARED_PASSWORD = 'validpassword';
const LDAP_PORT = 4321;
const LDAP_BASE_DN = 'ou=Users,dc=cloudron,dc=io';
const LDAP_GROUP_BASE_DN = 'ou=Groups,dc=cloudron,dc=io';
const LDAP_CONFIG = {
provider: 'testserver',
url: `ldap://localhost:${LDAP_PORT}`,
usernameField: 'customusernameprop',
baseDn: LDAP_BASE_DN,
filter: '(objectClass=inetOrgPerson)',
syncGroups: false,
groupBaseDn: LDAP_GROUP_BASE_DN,
groupFilter: '(objectClass=groupOfNames)',
groupnameField: 'customgroupnameprop',
autoCreate: false
};
@@ -140,12 +134,11 @@ function finalSend(results, req, res, next) {
}
let gLdapUsers = [];
let gLdapGroups = [];
function startLdapServer(callback) {
gLdapServer = ldap.createServer();
gLdapServer.search(LDAP_BASE_DN, function (req, res, next) {
gLdapServer.search(LDAP_CONFIG.baseDn, function (req, res, next) {
let results = [];
gLdapUsers.forEach(function (entry) {
@@ -170,32 +163,7 @@ function startLdapServer(callback) {
finalSend(results, req, res, next);
});
gLdapServer.search(LDAP_GROUP_BASE_DN, function (req, res, next) {
let results = [];
gLdapGroups.forEach(function (entry) {
var dn = ldap.parseDN(`cn=${entry.groupname},${LDAP_GROUP_BASE_DN}`);
var obj = {
dn: dn.toString(),
attributes: {
objectclass: [ 'groupOfNames' ],
cn: entry.groupname,
member: entry.member || []
}
};
obj.attributes[LDAP_CONFIG.groupnameField] = entry.groupname;
if ((req.dn.equals(dn) || req.dn.parentOf(dn)) && req.filter.matches(obj.attributes)) {
results.push(obj);
}
});
finalSend(results, req, res, next);
});
gLdapServer.bind(LDAP_BASE_DN, function (req, res, next) {
gLdapServer.bind(LDAP_CONFIG.baseDn, function (req, res, next) {
// extract the common name which might have different attribute names
var attributeName = Object.keys(req.dn.rdns[0].attrs)[0];
var commonName = req.dn.rdns[0].attrs[attributeName].value;
@@ -340,96 +308,6 @@ describe('External LDAP', function () {
done();
});
});
// now test with groups
it('enabling with groups fails with missing groupBaseDn', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
delete conf.groupBaseDn;
enable(conf, function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.BAD_FIELD);
done();
});
});
it('enabling with groups fails with empty groupBaseDn', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
conf.groupBaseDn = '';
enable(conf, function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.BAD_FIELD);
done();
});
});
it('enabling with groups fails with missing groupFilter', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
delete conf.groupFilter;
enable(conf, function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.BAD_FIELD);
done();
});
});
it('enabling with groups fails with empty groupFilter', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
conf.groupFilter = '';
enable(conf, function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.BAD_FIELD);
done();
});
});
it('enabling with groups fails with missing groupnameField', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
delete conf.groupnameField;
enable(conf, function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.BAD_FIELD);
done();
});
});
it('enabling with groups fails with empty groupnameField', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
conf.groupnameField = '';
enable(conf, function (error) {
expect(error).to.be.ok();
expect(error.reason).to.equal(BoxError.BAD_FIELD);
done();
});
});
it('enabling with groups succeeds', function (done) {
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
enable(function (error) {
expect(error).to.equal(null);
done();
});
});
it('disabling succeeds', function (done) {
disable(function (error) {
expect(error).to.equal(null);
done();
});
});
});
describe('sync', function () {
@@ -507,148 +385,6 @@ describe('External LDAP', function () {
});
});
it('does not sync group if group sync is disabled', function (done) {
gLdapGroups.push({
groupname: 'extGroup1'
});
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getAll(function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(0);
done();
});
});
});
it('enable with groupSync', function (done) {
disable(function (error) {
expect(error).to.equal(null);
let conf = _.extend({}, LDAP_CONFIG);
conf.syncGroups = true;
enable(conf, done);
});
});
it('succeeds with groups enabled', function (done) {
gLdapGroups = [];
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getAll(function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(0);
done();
});
});
});
it('succeeds with groups enabled and new group', function (done) {
gLdapGroups.push({
groupname: 'extGroup1'
});
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getAll(function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(1);
done();
});
});
});
it('succeeds with groups enabled and second new group', function (done) {
gLdapGroups.push({
groupname: 'extGroup2'
});
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getAll(function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(2);
done();
});
});
});
it('does not create already existing group', function (done) {
gLdapGroups.push({
groupname: 'INTERNALgroup' // also tests lowercasing
});
groups.create('internalgroup', '', function (error) {
expect(error).to.equal(null);
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getAll(function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(3);
done();
});
});
});
});
it('adds users of groups', function (done) {
gLdapGroups.push({
groupname: 'nonEmptyGroup',
member: gLdapUsers.slice(-2).map(function (u) { return `cn=${u.username},${LDAP_CONFIG.baseDn}`; })
});
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getByName('nonemptygroup', function (error, result) {
expect(error).to.equal(null);
groups.getMembers(result.id, function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(2);
done();
});
});
});
});
it('adds new users of groups', function (done) {
gLdapGroups.push({
groupname: 'nonEmptyGroup',
member: gLdapUsers.map(function (u) { return `cn=${u.username},${LDAP_CONFIG.baseDn}`; })
});
externalldap.sync(function progress() {}, function (error) {
expect(error).to.equal(null);
groups.getByName('nonemptygroup', function (error, result) {
expect(error).to.equal(null);
groups.getMembers(result.id, function (error, result) {
expect(error).to.equal(null);
expect(result.length).to.equal(4);
done();
});
});
});
});
it('disable', disable);
});

View File

@@ -36,6 +36,7 @@ var USER_0 = {
role: 'user',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: hat(256),
displayName: '',
source: '',
@@ -51,6 +52,7 @@ var USER_1 = { // this user has not signed up yet
role: 'user',
salt: 'morton',
createdAt: 'sometime back',
modifiedAt: 'now',
resetToken: hat(256),
displayName: '',
source: '',
@@ -77,49 +79,42 @@ describe('Groups', function () {
after(cleanup);
it('cannot create group - too small', function (done) {
groups.create('', '', function (error) {
groups.create('', function (error) {
expect(error.reason).to.be(BoxError.BAD_FIELD);
done();
});
});
it('cannot create group - too big', function (done) {
groups.create(new Array(256).join('a'), '', function (error) {
groups.create(new Array(256).join('a'), function (error) {
expect(error.reason).to.be(BoxError.BAD_FIELD);
done();
});
});
it('cannot create group - bad name', function (done) {
groups.create('bad:name', '', function (error) {
groups.create('bad:name', function (error) {
expect(error.reason).to.be(BoxError.BAD_FIELD);
done();
});
});
it('cannot create group - reserved', function (done) {
groups.create('users', '', function (error) {
groups.create('users', function (error) {
expect(error.reason).to.be(BoxError.BAD_FIELD);
done();
});
});
it('cannot create group - invalid', function (done) {
groups.create('cloudron+admin', '', function (error) {
expect(error.reason).to.be(BoxError.BAD_FIELD);
done();
});
});
it('cannot create group - invalid source', function (done) {
groups.create('cloudron+admin', 'unknownsource', function (error) {
groups.create('cloudron+admin', function (error) {
expect(error.reason).to.be(BoxError.BAD_FIELD);
done();
});
});
it('can create valid group', function (done) {
groups.create(GROUP0_NAME, '', function (error, result) {
groups.create(GROUP0_NAME, function (error, result) {
expect(error).to.be(null);
group0Object = result;
done();
@@ -128,14 +123,14 @@ describe('Groups', function () {
it('cannot create existing group with mixed case', function (done) {
var name = GROUP0_NAME[0].toUpperCase() + GROUP0_NAME.substr(1);
groups.create(name, '', function (error) {
groups.create(name, function (error) {
expect(error.reason).to.be(BoxError.ALREADY_EXISTS);
done();
});
});
it('cannot add existing group', function (done) {
groups.create(GROUP0_NAME, 'ldap', function (error) {
groups.create(GROUP0_NAME, function (error) {
expect(error.reason).to.be(BoxError.ALREADY_EXISTS);
done();
});
@@ -183,7 +178,7 @@ describe('Group membership', function () {
async.series([
setup,
function (next) {
groups.create(GROUP0_NAME, '', function (error, result) {
groups.create(GROUP0_NAME, function (error, result) {
if (error) return next(error);
group0Object = result;
next();
@@ -300,7 +295,7 @@ describe('Group membership', function () {
});
it('can remove group with member', function (done) {
groups.create(GROUP0_NAME, '', function (error, result) {
groups.create(GROUP0_NAME, function (error, result) {
expect(error).to.eql(null);
group0Object = result;
@@ -321,14 +316,14 @@ describe('Set user groups', function () {
async.series([
setup,
function (next) {
groups.create(GROUP0_NAME, '', function (error, result) {
groups.create(GROUP0_NAME, function (error, result) {
if (error) return next(error);
group0Object = result;
next();
});
},
function (next) {
groups.create(GROUP1_NAME, '', function (error, result) {
groups.create(GROUP1_NAME, function (error, result) {
if (error) return next(error);
group1Object = result;
next();

View File

@@ -133,7 +133,7 @@ function setup(done) {
});
},
function (callback) {
groups.create(GROUP_NAME, '', function (error, result) {
groups.create(GROUP_NAME, function (error, result) {
if (error) return callback(error);
GROUP_ID = result.id;

View File

@@ -57,7 +57,7 @@ describe('Settings', function () {
it('can get default app_autoupdate_pattern', function (done) {
settings.getAppAutoupdatePattern(function (error, pattern) {
expect(error).to.be(null);
expect(pattern).to.be('00 15 1,3,5,23 * * *');
expect(pattern).to.be('00 30 1,3,5,23 * * *');
done();
});
});

View File

@@ -15,6 +15,7 @@ var BoxError = require('../boxerror.js'),
os = require('os'),
path = require('path'),
rimraf = require('rimraf'),
mkdirp = require('mkdirp'),
recursive_readdir = require('recursive-readdir'),
s3 = require('../storage/s3.js'),
gcs = require('../storage/gcs.js'),
@@ -307,19 +308,19 @@ describe('Storage', function () {
var GCSMockBasePath = path.join(os.tmpdir(), 'gcs-backup-test-buckets/');
before(function () {
var mockGCS = function(){
return {bucket: function(){
var mockGCS = function(cfg){
return {bucket: function(b){
var file = function(filename){
var ensurePathWritable = function (filename) {
filename = GCSMockBasePath + filename;
fs.mkdirSync(path.dirname(filename), { recursive: true });
mkdirp.sync(path.dirname(filename));
return filename;
};
return {
name: filename,
createReadStream: function(){
createReadStream: function(cfg){
return fs.createReadStream(ensurePathWritable(filename))
.on('error', function(e){
console.log('error createReadStream: '+filename);
@@ -328,7 +329,7 @@ describe('Storage', function () {
})
;
},
createWriteStream: function(){
createWriteStream: function(cfg){
return fs.createWriteStream(ensurePathWritable(filename));
},
delete: function(cb){

View File

@@ -47,10 +47,8 @@ describe('System', function () {
it('can get memory', function (done) {
system.getMemory(function (error, memory) {
expect(!error).to.be.ok();
expect(memory.memory).to.be.a('number');
expect(memory.swap).to.be.a('number');
expect(memory.memory).to.be.ok();
expect(memory.swap).to.be.ok();
done();
});
});

View File

@@ -303,7 +303,7 @@ describe('updatechecker - app - manual (email)', function () {
updatechecker.checkAppUpdates({ automatic: false }, function (error) {
expect(!error).to.be.ok();
expect(updatechecker.getUpdateInfo().apps).to.eql({ 'appid-0': { manifest: { version: '2.0.0', changelog: '* some changes' }, unstable: false } });
expect(updatechecker.getUpdateInfo().apps).to.eql({ 'appid-0': { manifest: { version: '2.0.0', changelog: '* some changes' } } });
expect(scope.isDone()).to.be.ok();
checkMails(1, done);
@@ -378,7 +378,7 @@ describe('updatechecker - app - automatic (no email)', function () {
updatechecker.checkAppUpdates({ automatic: false }, function (error) {
expect(!error).to.be.ok();
expect(updatechecker.getUpdateInfo().apps).to.eql({ 'appid-0': { manifest: { version: '2.0.0', changelog: 'c' }, unstable: false } });
expect(updatechecker.getUpdateInfo().apps).to.eql({ 'appid-0': { manifest: { version: '2.0.0', changelog: 'c' } } });
expect(scope.isDone()).to.be.ok();
checkMails(1, done);
@@ -443,7 +443,7 @@ describe('updatechecker - app - automatic free (email)', function () {
updatechecker.checkAppUpdates({ automatic: false }, function (error) {
expect(!error).to.be.ok();
expect(updatechecker.getUpdateInfo().apps).to.eql({ 'appid-0': { manifest: { version: '2.0.0', changelog: 'c' }, unstable: false } });
expect(updatechecker.getUpdateInfo().apps).to.eql({ 'appid-0': { manifest: { version: '2.0.0', changelog: 'c' } } });
expect(scope.isDone()).to.be.ok();
checkMails(1, done);

View File

@@ -8,11 +8,13 @@
var async = require('async'),
BoxError = require('../boxerror.js'),
database = require('../database.js'),
constants = require('../constants.js'),
expect = require('expect.js'),
fs = require('fs'),
groupdb = require('../groupdb.js'),
groups = require('../groups.js'),
domains = require('../domains.js'),
mail = require('../mail.js'),
mailboxdb = require('../mailboxdb.js'),
maildb = require('../maildb.js'),
mailer = require('../mailer.js'),
@@ -736,7 +738,7 @@ describe('User', function () {
auditSource = _.extend({}, AUDIT_SOURCE, { userId: owner.id });
groups.create(NON_ADMIN_GROUP, '', done);
groups.create(NON_ADMIN_GROUP, done);
});
});
@@ -912,12 +914,12 @@ describe('User', function () {
});
});
describe('sendPasswordResetByIdentifier', function () {
describe('resetPasswordByIdentifier', function () {
before(createOwner);
after(cleanupUsers);
it('fails due to unkown email', function (done) {
users.sendPasswordResetByIdentifier('unknown@mail.com', function (error) {
users.resetPasswordByIdentifier('unknown@mail.com', function (error) {
expect(error).to.be.an(BoxError);
expect(error.reason).to.eql(BoxError.NOT_FOUND);
done();
@@ -925,7 +927,7 @@ describe('User', function () {
});
it('fails due to unkown username', function (done) {
users.sendPasswordResetByIdentifier('unknown', function (error) {
users.resetPasswordByIdentifier('unknown', function (error) {
expect(error).to.be.an(BoxError);
expect(error.reason).to.eql(BoxError.NOT_FOUND);
done();
@@ -933,14 +935,14 @@ describe('User', function () {
});
it('succeeds with email', function (done) {
users.sendPasswordResetByIdentifier(EMAIL, function (error) {
users.resetPasswordByIdentifier(EMAIL, function (error) {
expect(error).to.not.be.ok();
checkMails(1, done);
});
});
it('succeeds with username', function (done) {
users.sendPasswordResetByIdentifier(USERNAME, function (error) {
users.resetPasswordByIdentifier(USERNAME, function (error) {
expect(error).to.not.be.ok();
checkMails(1, done);
});

View File

@@ -83,6 +83,7 @@ function checkAppUpdates(options, callback) {
async.eachSeries(result, function (app, iteratorDone) {
if (app.appStoreId === '') return iteratorDone(); // appStoreId can be '' for dev apps
if (app.runState === apps.RSTATE_STOPPED) return iteratorDone(); // stopped apps won't run migration scripts and shouldn't be updated
appstore.getAppUpdate(app, options, function (error, updateInfo) {
if (error) {
@@ -106,7 +107,7 @@ function checkAppUpdates(options, callback) {
return iteratorDone();
}
const canAutoupdateApp = apps.canAutoupdateApp(app, updateInfo);
const canAutoupdateApp = apps.canAutoupdateApp(app, updateInfo.manifest);
if (autoupdatesEnabled && canAutoupdateApp) return iteratorDone();
debug('Notifying of app update for %s from %s to %s', app.id, app.manifest.version, updateInfo.manifest.version);

View File

@@ -16,8 +16,8 @@ var apps = require('./apps.js'),
debug = require('debug')('box:updater'),
df = require('@sindresorhus/df'),
eventlog = require('./eventlog.js'),
fs = require('fs'),
locker = require('./locker.js'),
mkdirp = require('mkdirp'),
os = require('os'),
path = require('path'),
paths = require('./paths.js'),
@@ -121,7 +121,7 @@ function downloadAndVerifyRelease(updateInfo, callback) {
downloadUrl.bind(null, updateInfo.sourceTarballUrl, `${paths.UPDATE_DIR}/box.tar.gz`),
downloadUrl.bind(null, updateInfo.sourceTarballSigUrl, `${paths.UPDATE_DIR}/box.tar.gz.sig`),
gpgVerify.bind(null, `${paths.UPDATE_DIR}/box.tar.gz`, `${paths.UPDATE_DIR}/box.tar.gz.sig`),
fs.mkdir.bind(null, newBoxSource, { recursive: true }),
mkdirp.bind(null, newBoxSource),
extractTarball.bind(null, `${paths.UPDATE_DIR}/box.tar.gz`, newBoxSource)
], function (error) {
if (error) return callback(error);

View File

@@ -28,7 +28,7 @@ var assert = require('assert'),
debug = require('debug')('box:userdb'),
mysql = require('mysql');
var USERS_FIELDS = [ 'id', 'username', 'email', 'fallbackEmail', 'password', 'salt', 'createdAt', 'resetToken', 'displayName',
var USERS_FIELDS = [ 'id', 'username', 'email', 'fallbackEmail', 'password', 'salt', 'createdAt', 'modifiedAt', 'resetToken', 'displayName',
'twoFactorAuthenticationEnabled', 'twoFactorAuthenticationSecret', 'active', 'source', 'role', 'resetTokenCreationTime' ].join(',');
var APP_PASSWORD_FIELDS = [ 'id', 'name', 'userId', 'identifier', 'hashedPassword', 'creationTime' ].join(',');
@@ -166,14 +166,15 @@ function add(userId, user, callback) {
assert.strictEqual(typeof user.fallbackEmail, 'string');
assert.strictEqual(typeof user.salt, 'string');
assert.strictEqual(typeof user.createdAt, 'string');
assert.strictEqual(typeof user.modifiedAt, 'string');
assert.strictEqual(typeof user.resetToken, 'string');
assert.strictEqual(typeof user.displayName, 'string');
assert.strictEqual(typeof user.source, 'string');
assert.strictEqual(typeof user.role, 'string');
assert.strictEqual(typeof callback, 'function');
const query = 'INSERT INTO users (id, username, password, email, fallbackEmail, salt, createdAt, resetToken, displayName, source, role) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
const args = [ userId, user.username, user.password, user.email, user.fallbackEmail, user.salt, user.createdAt, user.resetToken, user.displayName, user.source, user.role ];
const query = 'INSERT INTO users (id, username, password, email, fallbackEmail, salt, createdAt, modifiedAt, resetToken, displayName, source, role) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
const args = [ userId, user.username, user.password, user.email, user.fallbackEmail, user.salt, user.createdAt, user.modifiedAt, user.resetToken, user.displayName, user.source, user.role ];
database.query(query, args, function (error) {
if (error && error.code === 'ER_DUP_ENTRY' && error.sqlMessage.indexOf('users_email') !== -1) return callback(new BoxError(BoxError.ALREADY_EXISTS, 'email already exists'));
@@ -303,7 +304,6 @@ function addAppPassword(id, appPassword, callback) {
const args = [ id, appPassword.userId, appPassword.identifier, appPassword.name, appPassword.hashedPassword ];
database.query(query, args, function (error) {
if (error && error.code === 'ER_DUP_ENTRY' && error.message.indexOf('appPasswords_name_userId_identifier') !== -1) return callback(new BoxError(BoxError.ALREADY_EXISTS, 'name/app combination already exists'));
if (error) return callback(new BoxError(BoxError.DATABASE_ERROR, error));
callback(null);

View File

@@ -16,8 +16,9 @@ exports = module.exports = {
getByResetToken: getByResetToken,
getByUsername: getByUsername,
getAdmins: getAdmins,
resetPasswordByIdentifier: resetPasswordByIdentifier,
setPassword: setPassword,
update: update,
update: updateUser,
createOwner: createOwner,
getOwner: getOwner,
createInvite: createInvite,
@@ -27,14 +28,6 @@ exports = module.exports = {
enableTwoFactorAuthentication: enableTwoFactorAuthentication,
disableTwoFactorAuthentication: disableTwoFactorAuthentication,
sendPasswordResetByIdentifier: sendPasswordResetByIdentifier,
setupAccount,
getAvatarUrlSync,
getAvatarFileSync,
setAvatar,
clearAvatar,
count: count,
AP_MAIL: 'mail',
@@ -61,17 +54,14 @@ let assert = require('assert'),
debug = require('debug')('box:user'),
eventlog = require('./eventlog.js'),
externalLdap = require('./externalldap.js'),
fs = require('fs'),
groups = require('./groups.js'),
hat = require('./hat.js'),
mailer = require('./mailer.js'),
path = require('path'),
paths = require('./paths.js'),
qrcode = require('qrcode'),
safe = require('safetydance'),
settings = require('./settings.js'),
speakeasy = require('speakeasy'),
tokens = require('./tokens.js'),
userdb = require('./userdb.js'),
uuid = require('uuid'),
validator = require('validator'),
@@ -192,6 +182,7 @@ function create(username, password, email, displayName, options, auditSource, ca
password: Buffer.from(derivedKey, 'binary').toString('hex'),
salt: salt.toString('hex'),
createdAt: now,
modifiedAt: now,
resetToken: '',
displayName: displayName,
source: source,
@@ -416,7 +407,7 @@ function getByUsername(username, callback) {
});
}
function update(user, data, auditSource, callback) {
function updateUser(user, data, auditSource, callback) {
assert.strictEqual(typeof user, 'object');
assert.strictEqual(typeof data, 'object');
assert(auditSource && typeof auditSource === 'object');
@@ -493,11 +484,13 @@ function getAdmins(callback) {
});
}
function sendPasswordResetByIdentifier(identifier, callback) {
function resetPasswordByIdentifier(identifier, callback) {
assert.strictEqual(typeof identifier, 'string');
assert.strictEqual(typeof callback, 'function');
const getter = identifier.indexOf('@') === -1 ? userdb.getByUsername : userdb.getByEmail;
var getter;
if (identifier.indexOf('@') === -1) getter = userdb.getByUsername;
else getter = userdb.getByEmail;
getter(identifier.toLowerCase(), function (error, result) {
if (error) return callback(error);
@@ -532,6 +525,7 @@ function setPassword(user, newPassword, callback) {
if (error) return callback(new BoxError(BoxError.CRYPTO_ERROR, error));
let data = {
modifiedAt: (new Date()).toISOString(),
password: Buffer.from(derivedKey, 'binary').toString('hex'),
resetToken: ''
};
@@ -575,12 +569,11 @@ function getOwner(callback) {
});
}
function inviteLink(user, directoryConfig) {
function inviteLink(user) {
let link = `${settings.adminOrigin()}/setupaccount.html?resetToken=${user.resetToken}&email=${encodeURIComponent(user.email)}`;
if (user.username) link += `&username=${encodeURIComponent(user.username)}`;
if (user.displayName) link += `&displayName=${encodeURIComponent(user.displayName)}`;
if (directoryConfig.lockUserProfiles) link += '&profileLocked=true';
return link;
}
@@ -593,16 +586,11 @@ function createInvite(user, callback) {
const resetToken = hat(256), resetTokenCreationTime = new Date();
settings.getDirectoryConfig(function (error, directoryConfig) {
userdb.update(user.id, { resetToken, resetTokenCreationTime }, function (error) {
if (error) return callback(error);
userdb.update(user.id, { resetToken, resetTokenCreationTime }, function (error) {
if (error) return callback(error);
user.resetToken = resetToken;
callback(null, { resetToken, inviteLink: inviteLink(user, directoryConfig) });
});
user.resetToken = resetToken;
callback(null, { resetToken, inviteLink: inviteLink(user) });
});
}
@@ -614,43 +602,9 @@ function sendInvite(user, options, callback) {
if (user.source) return callback(new BoxError(BoxError.CONFLICT, 'User is from an external directory'));
if (!user.resetToken) return callback(new BoxError(BoxError.CONFLICT, 'Must generate resetToken to send invitation'));
settings.getDirectoryConfig(function (error, directoryConfig) {
if (error) return callback(error);
mailer.sendInvite(user, options.invitor || null, inviteLink(user));
mailer.sendInvite(user, options.invitor || null, inviteLink(user, directoryConfig));
callback(null);
});
}
function setupAccount(user, data, auditSource, callback) {
assert.strictEqual(typeof user, 'object');
assert.strictEqual(typeof data, 'object');
assert(auditSource && typeof auditSource === 'object');
assert.strictEqual(typeof callback, 'function');
settings.getDirectoryConfig(function (error, directoryConfig) {
if (error) return callback(error);
const updateFunc = (done) => {
if (directoryConfig.lockUserProfiles) return done();
update(user, _.pick(data, 'username', 'displayName'), auditSource, done);
};
updateFunc(function (error) {
if (error) return callback(error);
setPassword(user, data.password, function (error) { // setPassword clears the resetToken
if (error) return callback(error);
tokens.add(tokens.ID_WEBADMIN, user.id, Date.now() + constants.DEFAULT_TOKEN_EXPIRATION, {}, function (error, result) {
if (error) return callback(error);
callback(null, result.accessToken);
});
});
});
});
callback(null);
}
function setTwoFactorAuthenticationSecret(userId, callback) {
@@ -799,38 +753,3 @@ function delAppPassword(id, callback) {
callback(null);
});
}
function getAvatarFileSync(id) {
assert.strictEqual(typeof id, 'string');
return path.join(paths.PROFILE_ICONS_DIR, id);
}
function getAvatarUrlSync(user) {
assert.strictEqual(typeof user, 'object');
if (fs.existsSync(path.join(paths.PROFILE_ICONS_DIR, user.id))) return `${settings.adminOrigin()}/api/v1/profile/avatar/${user.id}`;
const emailHash = require('crypto').createHash('md5').update(user.email).digest('hex');
return `https://www.gravatar.com/avatar/${emailHash}.jpg`;
}
function setAvatar(id, filename, callback) {
assert.strictEqual(typeof id, 'string');
assert.strictEqual(typeof filename, 'string');
assert.strictEqual(typeof callback, 'function');
fs.rename(filename, path.join(paths.PROFILE_ICONS_DIR, id), function (error) {
if (error) return callback(new BoxError(BoxError.FS_ERROR, error.message));
callback();
});
}
function clearAvatar(id, callback) {
assert.strictEqual(typeof id, 'string');
assert.strictEqual(typeof callback, 'function');
safe.fs.unlinkSync(path.join(paths.PROFILE_ICONS_DIR, id));
callback();
}