diff --git a/setup/start.sh b/setup/start.sh index 8c5b7bd36..54dafee16 100755 --- a/setup/start.sh +++ b/setup/start.sh @@ -58,7 +58,7 @@ mkdir -p "${PLATFORM_DATA_DIR}/collectd/collectd.conf.d" mkdir -p "${PLATFORM_DATA_DIR}/logrotate.d" mkdir -p "${PLATFORM_DATA_DIR}/acme" mkdir -p "${PLATFORM_DATA_DIR}/backup" -mkdir -p "${PLATFORM_DATA_DIR}/logs/backup" "${PLATFORM_DATA_DIR}/logs/updater" +mkdir -p "${PLATFORM_DATA_DIR}/logs/backup" "${PLATFORM_DATA_DIR}/logs/updater" "${PLATFORM_DATA_DIR}/logs/tasks" mkdir -p "${PLATFORM_DATA_DIR}/update" mkdir -p "${BOX_DATA_DIR}/appicons" diff --git a/src/addons.js b/src/addons.js index 0a98e2882..a5b83b45c 100644 --- a/src/addons.js +++ b/src/addons.js @@ -48,7 +48,6 @@ var accesscontrol = require('./accesscontrol.js'), hat = require('./hat.js'), infra = require('./infra_version.js'), mail = require('./mail.js'), - mailboxdb = require('./mailboxdb.js'), once = require('once'), os = require('os'), path = require('path'), diff --git a/src/paths.js b/src/paths.js index 4bd39a148..3db6fcfdd 100644 --- a/src/paths.js +++ b/src/paths.js @@ -33,6 +33,8 @@ exports = module.exports = { UPDATE_CHECKER_FILE: path.join(config.baseDir(), 'boxdata/updatechecker.json'), LOG_DIR: path.join(config.baseDir(), 'platformdata/logs'), + TASKS_LOG_DIR: path.join(config.baseDir(), 'platformdata/logs/tasks'), + // this pattern is for the cloudron logs API route to work BACKUP_LOG_FILE: path.join(config.baseDir(), 'platformdata/logs/backup/app.log'), UPDATER_LOG_FILE: path.join(config.baseDir(), 'platformdata/logs/updater/app.log') diff --git a/src/routes/tasks.js b/src/routes/tasks.js index 880a18104..df387dc6c 100644 --- a/src/routes/tasks.js +++ b/src/routes/tasks.js @@ -3,7 +3,10 @@ exports = module.exports = { get: get, stopTask: stopTask, - list: list + list: list, + + getLogs: getLogs, + getLogStream: getLogStream }; let assert = require('assert'), @@ -55,3 +58,67 @@ function list(req, res, next) { next(new HttpSuccess(200, { tasks })); }); } + +function getLogs(req, res, next) { + assert.strictEqual(typeof req.params.taskId, 'string'); + + var lines = req.query.lines ? parseInt(req.query.lines, 10) : 100; + if (isNaN(lines)) return next(new HttpError(400, 'lines must be a number')); + + var options = { + lines: lines, + follow: false, + format: req.query.format + }; + + tasks.getLogs(req.params.taskId, options, function (error, logStream) { + if (error && error.reason === TaskError.NOT_FOUND) return next(new HttpError(404, 'No such task')); + if (error) return next(new HttpError(500, error)); + + res.writeHead(200, { + 'Content-Type': 'application/x-logs', + 'Content-Disposition': 'attachment; filename="log.txt"', + 'Cache-Control': 'no-cache', + 'X-Accel-Buffering': 'no' // disable nginx buffering + }); + logStream.pipe(res); + }); +} + +// this route is for streaming logs +function getLogStream(req, res, next) { + assert.strictEqual(typeof req.params.taskId, 'string'); + + var lines = req.query.lines ? parseInt(req.query.lines, 10) : -10; // we ignore last-event-id + if (isNaN(lines)) return next(new HttpError(400, 'lines must be a valid number')); + + function sse(id, data) { return 'id: ' + id + '\ndata: ' + data + '\n\n'; } + + if (req.headers.accept !== 'text/event-stream') return next(new HttpError(400, 'This API call requires EventStream')); + + var options = { + lines: lines, + follow: true + }; + + tasks.getLogs(req.params.taskId, options, function (error, logStream) { + if (error && error.reason === TaskError.NOT_FOUND) return next(new HttpError(404, 'No such task')); + if (error) return next(new HttpError(500, error)); + + res.writeHead(200, { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + 'X-Accel-Buffering': 'no', // disable nginx buffering + 'Access-Control-Allow-Origin': '*' + }); + res.write('retry: 3000\n'); + res.on('close', logStream.close); + logStream.on('data', function (data) { + var obj = JSON.parse(data); + res.write(sse(obj.monotonicTimestamp, JSON.stringify(obj))); // send timestamp as id + }); + logStream.on('end', res.end.bind(res)); + logStream.on('error', res.end.bind(res, null)); + }); +} diff --git a/src/server.js b/src/server.js index 7c8d2961f..e786d366f 100644 --- a/src/server.js +++ b/src/server.js @@ -138,6 +138,8 @@ function initializeExpressSync() { // tasks router.get ('/api/v1/tasks', settingsScope, routes.tasks.list); router.get ('/api/v1/tasks/:taskId', settingsScope, routes.tasks.get); + router.get ('/api/v1/tasks/:taskId/logs', cloudronScope, routes.tasks.getLogs); + router.get ('/api/v1/tasks/:taskId/logstream', cloudronScope, routes.tasks.getLogStream); router.post('/api/v1/tasks/:taskId/stop', settingsScope, routes.tasks.stopTask); // backups diff --git a/src/tasks.js b/src/tasks.js index 1a94fdc8b..eaf390e5d 100644 --- a/src/tasks.js +++ b/src/tasks.js @@ -5,6 +5,8 @@ exports = module.exports = { update: update, listPaged: listPaged, + getLogs: getLogs, + startTask: startTask, stopTask: stopTask, @@ -25,6 +27,8 @@ let assert = require('assert'), mailer = require('./mailer.js'), paths = require('./paths.js'), safe = require('safetydance'), + spawn = require('child_process').spawn, + split = require('split'), taskdb = require('./taskdb.js'), util = require('util'), _ = require('underscore'); @@ -34,7 +38,6 @@ const NOOP_CALLBACK = function (error) { if (error) debug(error); }; const TASKS = { // indexed by task type backup: { lock: locker.OP_FULL_BACKUP, - logFile: paths.BACKUP_LOG_FILE, program: __dirname + '/tasks/backuptask.js', onFailure: mailer.backupFailed, startEventId: eventlog.ACTION_BACKUP_START, @@ -42,7 +45,6 @@ const TASKS = { // indexed by task type }, update: { lock: locker.OP_BOX_UPDATE, - logFile: paths.UPDATER_LOG_FILE, program: __dirname + '/tasks/updatertask.js', onFailure: NOOP_CALLBACK, startEventId: eventlog.ACTION_UPDATE, @@ -116,16 +118,16 @@ function startTask(type, args, auditSource, callback) { let error = locker.lock(taskInfo.lock); if (error) return callback(new TaskError(TaskError.BAD_STATE, error.message)); - let fd = safe.fs.openSync(taskInfo.logFile, 'a'); // will autoclose - if (!fd) { - debug(`startTask: unable to get log filedescriptor ${safe.error.message}`); - locker.unlock(taskInfo.lock); - return callback(new TaskError(TaskError.INTERNAL_ERROR, error.message)); - } - taskdb.add({ type: type, percent: 0, message: 'Starting', args: args }, function (error, taskId) { if (error) return callback(new TaskError(TaskError.INTERNAL_ERROR, error)); + let fd = safe.fs.openSync(`${paths.TASKS_LOG_DIR}/${taskId}.log`, 'a'); // will autoclose + if (!fd) { + debug(`startTask: unable to get log filedescriptor ${safe.error.message}`); + locker.unlock(taskInfo.lock); + return callback(new TaskError(TaskError.INTERNAL_ERROR, error.message)); + } + debug(`startTask - starting task ${type}. logs at ${taskInfo.logFile}. id ${taskId}`); eventlog.add(taskInfo.startEventId, auditSource, args); @@ -184,3 +186,50 @@ function listPaged(type, page, perPage, callback) { callback(null, tasks); }); } + +function getLogs(taskId, options, callback) { + assert.strictEqual(typeof taskId, 'string'); + assert(options && typeof options === 'object'); + assert.strictEqual(typeof callback, 'function'); + + debug(`Getting logs for ${taskId}`); + + var lines = options.lines || 100, + format = options.format || 'json', + follow = !!options.follow; + + assert.strictEqual(typeof lines, 'number'); + assert.strictEqual(typeof format, 'string'); + + let cmd = '/usr/bin/tail'; + var args = [ '--lines=' + lines ]; + + if (follow) args.push('--follow', '--retry', '--quiet'); // same as -F. to make it work if file doesn't exist, --quiet to not output file headers, which are no logs + args.push(`${paths.TASKS_LOG_DIR}/${taskId}.log`); + + var cp = spawn(cmd, args); + + var transformStream = split(function mapper(line) { + if (format !== 'json') return line + '\n'; + + var data = line.split(' '); // logs are + var timestamp = (new Date(data[0])).getTime(); + if (isNaN(timestamp)) timestamp = 0; + var message = line.slice(data[0].length+1); + + // ignore faulty empty logs + if (!timestamp && !message) return; + + return JSON.stringify({ + realtimeTimestamp: timestamp * 1000, + message: message, + source: taskId + }) + '\n'; + }); + + transformStream.close = cp.kill.bind(cp, 'SIGKILL'); // closing stream kills the child process + + cp.stdout.pipe(transformStream); + + callback(null, transformStream); +}