665 lines
26 KiB
JavaScript
Executable File
665 lines
26 KiB
JavaScript
Executable File
#!/usr/bin/env node
|
|
|
|
'use strict';
|
|
|
|
const assert = require('assert'),
|
|
{ execSync, spawnSync } = require('child_process'),
|
|
fs = require('fs'),
|
|
os = require('os'),
|
|
path = require('path'),
|
|
program = require('commander'),
|
|
safe = require('safetydance'),
|
|
semver = require('semver'),
|
|
superagent = require('superagent'),
|
|
Table = require('easy-table'),
|
|
url = require('url'),
|
|
util = require('util'),
|
|
yesno = require('yesno'),
|
|
_ = require('underscore');
|
|
|
|
const ENVIRONMENTS = {
|
|
'dev': {
|
|
tag: 'dev',
|
|
url: 'https://releases.dev.cloudron.io/versions.json',
|
|
releasesServer: 'releases.dev.cloudron.io'
|
|
},
|
|
'staging': {
|
|
tag: 'staging',
|
|
url: 'https://releases.staging.cloudron.io/versions.json',
|
|
releasesServer: 'releases.staging.cloudron.io'
|
|
},
|
|
'prod': {
|
|
tag: 'prod',
|
|
url: 'https://releases.cloudron.io/versions.json',
|
|
releasesServer: 'releases.cloudron.io'
|
|
}
|
|
};
|
|
|
|
function exit(error) {
|
|
if (error) console.error(error.message);
|
|
|
|
// we don't call process.exit() immediately, as it does not wait for the async console. api to print remaining logs
|
|
// this is ugly but effective until we find a better way to flush console first
|
|
setTimeout(function () {
|
|
process.exit(error ? 1 : 0);
|
|
}, 250);
|
|
}
|
|
|
|
function parseChangelog(version) {
|
|
var changelog = [ ];
|
|
var lines = fs.readFileSync(__dirname + '/../box/CHANGES', 'utf8').split('\n');
|
|
|
|
version = version.replace(/[+-].*/, ''); // strip prerelease
|
|
|
|
for (var i = 0; i < lines.length; i++) {
|
|
if (lines[i] === '[' + version + ']') break;
|
|
}
|
|
|
|
for (i = i + 1; i < lines.length; i++) {
|
|
if (lines[i] === '') continue;
|
|
if (lines[i][0] === '[') break;
|
|
|
|
lines[i] = lines[i].trim();
|
|
|
|
// detect and remove list style - and * in changelog lines
|
|
if (lines[i].indexOf('-') === 0) lines[i] = lines[i].slice(1).trim();
|
|
if (lines[i].indexOf('*') === 0) lines[i] = lines[i].slice(1).trim();
|
|
|
|
changelog.push(lines[i]);
|
|
}
|
|
|
|
return changelog.sort();
|
|
}
|
|
|
|
function verifyVersionFormat(versionsJson) {
|
|
if (!versionsJson || typeof versionsJson !== 'object') return new Error('versions must be valid object');
|
|
|
|
// check all the keys
|
|
var sortedVersions = Object.keys(versionsJson).sort(semver.compare);
|
|
for (var i = 0; i < sortedVersions.length; i++) {
|
|
var version = sortedVersions[i];
|
|
var versionInfo = versionsJson[version];
|
|
|
|
if ('changeLog' in versionsJson[version] && !util.isArray(versionInfo.changeLog)) return new Error('version ' + version + ' does not have proper changeLog');
|
|
|
|
if (typeof versionInfo.date !== 'string' || ((new Date(versionInfo.date)).toString() === 'Invalid Date')) return new Error('invalid date or missing date');
|
|
|
|
if (versionInfo.next !== null) {
|
|
if (typeof versionInfo.next !== 'string') return new Error('version ' + version + ' does not have "string" next');
|
|
if (!semver.valid(versionInfo.next)) return new Error('version ' + version + ' has non-semver next');
|
|
if (!(versionInfo.next in versionsJson)) return new Error('version ' + version + ' points to non-existent version');
|
|
}
|
|
|
|
if (typeof versionInfo.sourceTarballUrl !== 'string') return new Error('version ' + version + ' does not have proper sourceTarballUrl');
|
|
|
|
if ('author' in versionsJson[version] && typeof versionInfo.author !== 'string') return new Error('author must be a string');
|
|
|
|
var tarballUrl = url.parse(versionInfo.sourceTarballUrl);
|
|
if (tarballUrl.protocol !== 'https:') return new Error('sourceTarballUrl must be https');
|
|
if (!/.tar.gz$/.test(tarballUrl.path)) return new Error('sourceTarballUrl must be tar.gz');
|
|
|
|
var nextVersion = versionInfo.next;
|
|
// despite having the 'next' field, the appstore code currently relies on all versions being sorted based on semver.compare (see boxversions.js)
|
|
if (nextVersion && semver.gt(version, nextVersion)) return new Error('next version cannot be less than current @' + version);
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
function stripUnreachable(releases) {
|
|
const reachableVersions = [];
|
|
let curVersion = '0.160.0';
|
|
|
|
// eslint-disable-next-line no-constant-condition
|
|
while (true) {
|
|
reachableVersions.push(curVersion);
|
|
var nextVersion = releases[curVersion].next;
|
|
if (!nextVersion) break;
|
|
curVersion = nextVersion;
|
|
}
|
|
|
|
return _.pick(releases, reachableVersions);
|
|
}
|
|
|
|
async function uploadVersionsJSON(env, releases) {
|
|
assert.strictEqual(typeof env, 'object');
|
|
assert.strictEqual(typeof releases, 'object');
|
|
|
|
console.log('Computing GPG signature of versions.json...');
|
|
await fs.promises.writeFile('/tmp/versions.json', JSON.stringify(releases, null, 4));
|
|
await fs.promises.rm('/tmp/versions.json.sig', { force: true });
|
|
|
|
execSync('gpg --no-default-keyring --local-user 0EADB19CDDA23CD0FE71E3470A372F8703C493CC --output /tmp/versions.json.sig --detach-sig /tmp/versions.json',
|
|
{ stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Uploading versions.json');
|
|
execSync(`rsync /tmp/versions.json ubuntu@${env.releasesServer}:/home/ubuntu/releases/`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Uploading versions.json.sig');
|
|
execSync(`rsync /tmp/versions.json.sig ubuntu@${env.releasesServer}:/home/ubuntu/releases/`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('versions.json and signature uploaded');
|
|
}
|
|
|
|
async function verifyAndUpload(env, releases) {
|
|
assert.strictEqual(typeof env, 'object');
|
|
assert.strictEqual(typeof releases, 'object');
|
|
|
|
const error = verifyVersionFormat(releases);
|
|
if (error) throw error;
|
|
|
|
return await uploadVersionsJSON(env, releases);
|
|
}
|
|
|
|
async function newRelease(options) {
|
|
const env = ENVIRONMENTS[options.env];
|
|
if (!env) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
if (!options.file) return exit(new Error('--file is required'));
|
|
|
|
const contents = safe.fs.readFileSync(options.file, 'utf8');
|
|
if (!contents) return exit(safe.error);
|
|
|
|
const releases = safe.JSON.parse(contents);
|
|
if (!releases) return exit(new Error(options.file + ' has invalid json :' + safe.error.message));
|
|
|
|
await verifyAndUpload(env, releases);
|
|
}
|
|
|
|
async function edit(options) {
|
|
const env = ENVIRONMENTS[options.env];
|
|
if (!env) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
const [error, result] = await safe(superagent.get(env.url));
|
|
if (error || result.error) return exit(error || result.error);
|
|
|
|
const oldContents = result.type === 'application/json' ? JSON.stringify(result.body, null, 4) : result.text;
|
|
const tmpfile = path.join(os.tmpdir(), 'versions.json');
|
|
await fs.promises.writeFile(tmpfile, oldContents);
|
|
|
|
spawnSync(process.env.EDITOR || 'vim', [tmpfile], {stdio: 'inherit'});
|
|
const newContents = safe.fs.readFileSync(tmpfile, 'utf8');
|
|
if (!newContents || newContents.trim().length === 0 || newContents === oldContents) return exit(new Error('Unchanged'));
|
|
|
|
const releases = safe.JSON.parse(newContents);
|
|
if (!releases) return exit(new Error(options.file + ' has invalid json :' + safe.error.message));
|
|
|
|
if (options.verify) {
|
|
await verifyAndUpload(env, releases);
|
|
} else {
|
|
await uploadVersionsJSON(env, releases);
|
|
}
|
|
}
|
|
|
|
async function createRelease(options) {
|
|
const env = ENVIRONMENTS[options.env];
|
|
if (!env) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
if (env.tag === 'prod' && !options.amend) return exit(new Error('operation is not allowed in prod'));
|
|
|
|
if (!options.revert && !options.amend) {
|
|
if (!options.code) return exit(new Error('--code is required'));
|
|
}
|
|
|
|
if (options.code) {
|
|
if (!fs.existsSync(options.code)) return exit('code must be a valid file');
|
|
|
|
// "gpgconf --reload gpg-agent" is handy to reset existing password in the agent. See https://dev.gnupg.org/T3485 for pinentry-mode (--pinentry-mode=loopback --batch --passphrase ${passphrase} works if we want to gassword protect
|
|
console.log('Computing GPG signature...');
|
|
safe.fs.unlinkSync(`${options.code}.sig`);
|
|
execSync(`gpg --no-default-keyring --local-user 0EADB19CDDA23CD0FE71E3470A372F8703C493CC --output ${options.code}.sig --detach-sig ${options.code}`,
|
|
{ stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Uploading source code tarball and signature...');
|
|
const sourceTarballName = path.basename(options.code);
|
|
execSync(`rsync ${options.code} ubuntu@${env.releasesServer}:/home/ubuntu/releases/${sourceTarballName}`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
execSync(`rsync ${options.code}.sig ubuntu@${env.releasesServer}:/home/ubuntu/releases/${sourceTarballName}.sig`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
options.code = `https://${env.releasesServer}/${sourceTarballName}`;
|
|
}
|
|
|
|
const username = execSync('git config user.name').toString('utf8').trim();
|
|
const email = execSync('git config user.email').toString('utf8').trim();
|
|
|
|
const [error, response] = await safe(superagent.get(env.url));
|
|
if (error || response.error) return exit(error || response.error);
|
|
|
|
const releases = response.type === 'application/json' ? response.body : safe.JSON.parse(response.text);
|
|
|
|
if (!releases) return exit(new Error('versions.json is not valid JSON'));
|
|
|
|
const strippedReleases = stripUnreachable(releases);
|
|
const lastReachableVersion = Object.keys(strippedReleases).sort(semver.rcompare)[0];
|
|
|
|
const sortedVersions = Object.keys(releases).sort(semver.rcompare);
|
|
const lastVersion = sortedVersions[0];
|
|
|
|
if (options.revert) {
|
|
const secondLastVersion = sortedVersions[1];
|
|
|
|
releases[secondLastVersion].next = null;
|
|
delete releases[lastVersion];
|
|
|
|
console.log('Reverting %s', lastVersion);
|
|
return await verifyAndUpload(env, releases);
|
|
}
|
|
|
|
const sourceTarballUrl = options.code || releases[lastReachableVersion].sourceTarballUrl;
|
|
|
|
let newVersion;
|
|
if (options.amend) {
|
|
newVersion = lastVersion;
|
|
} else {
|
|
// box-d6d2ee7d19-937e8ce1ed-3.3.0.tar.gz
|
|
newVersion = path.basename(sourceTarballUrl).split('-')[3].replace('.tar.gz', '');
|
|
}
|
|
// guard against options.version being commander's version function. any command using this code path needs to explicitly clear the version
|
|
// this is the price to pay for using --version with commander
|
|
assert(semver.valid(newVersion), 'invalid new version');
|
|
|
|
releases[lastReachableVersion].next = newVersion;
|
|
|
|
const changelog = options.changelog || parseChangelog(newVersion);
|
|
if (changelog.length === 0) console.log('No changelog for version %s found.', newVersion);
|
|
|
|
releases[newVersion] = {
|
|
sourceTarballUrl: sourceTarballUrl,
|
|
sourceTarballSigUrl: sourceTarballUrl + '.sig',
|
|
changelog: changelog,
|
|
date: (new Date()).toISOString(),
|
|
author: username + ' <' + email + '>',
|
|
next: null
|
|
};
|
|
|
|
await verifyAndUpload(env, releases);
|
|
|
|
console.log('%s : %s', newVersion, JSON.stringify(releases[newVersion], null, 4));
|
|
}
|
|
|
|
async function rerelease(options) {
|
|
const env = ENVIRONMENTS[options.env];
|
|
if (!env) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
const [error, response] = await safe(superagent.get(env.url));
|
|
if (error) return exit(error);
|
|
|
|
const releases = response.body;
|
|
if (!releases) return exit(new Error('versions.json is not valid JSON'));
|
|
|
|
const latestVersion = Object.keys(releases).sort(semver.rcompare)[0];
|
|
const sourceTarballName = url.parse(releases[latestVersion].sourceTarballUrl).pathname.substr(1);
|
|
const tmpFile = '/tmp/' + sourceTarballName;
|
|
let newVersion = semver.inc(latestVersion, 'patch');
|
|
|
|
console.log(`This wil rerelease ${latestVersion} as ${newVersion}`);
|
|
|
|
console.log('Fetching source code tarball...');
|
|
execSync(`rsync ubuntu@${env.releasesServer}:/home/ubuntu/releases/${sourceTarballName} ${tmpFile}`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Extracting tarball...');
|
|
const tmpdir = '/tmp/rerelease';
|
|
execSync(`rm -rf ${tmpdir} && mkdir ${tmpdir} && tar zxf ${tmpFile} -C ${tmpdir}`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Patching VERSION...');
|
|
fs.writeFileSync(`${tmpdir}/VERSION`, newVersion);
|
|
|
|
console.log('Creating new release tarball...');
|
|
const newReleaseTarball = '/tmp/' + sourceTarballName.replace(latestVersion, newVersion);
|
|
execSync(`tar czf ${newReleaseTarball} .`, { cwd: tmpdir, stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
options.code = newReleaseTarball;
|
|
options.changelog = [ 'Same as the old version' ];
|
|
|
|
await createRelease(options);
|
|
}
|
|
|
|
async function listRelease(options) {
|
|
const env = ENVIRONMENTS[options.env];
|
|
if (!env) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
const raw = !!options.raw, releaseFilenames = !!options.releaseFilenames, releaseUrls = !!options.releaseUrls;
|
|
|
|
const [error, response] = await safe(superagent.get(env.url));
|
|
if (error || response.error) return exit(error || response.error);
|
|
|
|
const releases = response.body;
|
|
if (raw) {
|
|
console.log(JSON.stringify(releases, null, 4));
|
|
return exit();
|
|
}
|
|
|
|
if (releaseUrls) {
|
|
for (const version of Object.keys(releases)) {
|
|
const release = releases[version];
|
|
console.log(release.sourceTarballUrl);
|
|
if (release.sourceTarballSigUrl) console.log(release.sourceTarballSigUrl);
|
|
}
|
|
return exit();
|
|
}
|
|
|
|
if (releaseFilenames) {
|
|
for (const version of Object.keys(releases)) {
|
|
const release = releases[version];
|
|
console.log(new URL(release.sourceTarballUrl).pathname.slice(1));
|
|
if (release.sourceTarballSigUrl) console.log(new URL(release.sourceTarballSigUrl).pathname.slice(1));
|
|
}
|
|
return exit();
|
|
}
|
|
|
|
if (response.type !== 'application/json') {
|
|
return exit(new Error('Release file is not valid JSON!'));
|
|
}
|
|
|
|
if (Object.keys(releases).length === 0) {
|
|
console.log('No releases');
|
|
return exit();
|
|
}
|
|
|
|
const strippedReleases = stripUnreachable(releases);
|
|
|
|
const t = new Table();
|
|
|
|
for (const release in releases) {
|
|
t.cell('Release', release in strippedReleases ? release : `~~${release}~~`);
|
|
t.cell('Date', releases[release].date);
|
|
t.cell('Author', releases[release].author.split(' ')[0]);
|
|
t.cell('Next', releases[release].next);
|
|
|
|
|
|
var v = releases[release].sourceTarballUrl.match(/\/box-([^-]*)-?(.*).tar.gz/);
|
|
t.cell('Source', v[1].slice(0, 7));
|
|
t.cell('Webadmin', v[2].slice(0, 7) || '-');
|
|
t.newRow();
|
|
}
|
|
|
|
console.log(`Selected environment: ${env.tag}\n`);
|
|
console.log(t.toString());
|
|
}
|
|
|
|
async function sync(options) {
|
|
const destEnv = ENVIRONMENTS[options.env];
|
|
if (!destEnv) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
let sourceEnv;
|
|
|
|
if (destEnv.tag === 'staging') sourceEnv = ENVIRONMENTS['prod'];
|
|
else if (destEnv.tag === 'dev') sourceEnv = ENVIRONMENTS['staging'];
|
|
else throw new Error('Unable to determine source environment to sync from');
|
|
|
|
console.log(`Syncing ${sourceEnv.tag} to ${destEnv.tag}`);
|
|
|
|
const [getVersionsError, response] = await safe(superagent.get(sourceEnv.url));
|
|
if (getVersionsError) throw new Error(`Error getting versions.json: ${getVersionsError.message}`);
|
|
|
|
const sourceReleases = response.body;
|
|
let destReleases = {};
|
|
|
|
const output = execSync(`ssh ubuntu@${destEnv.releasesServer} "find /home/ubuntu/releases -type f -name '*.tar.gz' -printf '%f\n'"`, { encoding: 'utf8' });
|
|
const destSourceTarballs = output.trim().split('\n');
|
|
|
|
for (const release in sourceReleases) {
|
|
// find a suitable sourceTarballUrl on dev
|
|
let suitableSourceTarball = null;
|
|
|
|
for (const tarball of destSourceTarballs) {
|
|
if (sourceReleases[release].sourceTarballUrl.indexOf(tarball) !== -1) {
|
|
suitableSourceTarball = `https://${destEnv.releasesServer}/${tarball}`;
|
|
}
|
|
}
|
|
|
|
if (!suitableSourceTarball) {
|
|
console.log('Unable to find a suitable source tarball on %s for release %s.', destEnv.tag, release);
|
|
console.log('Required source tarball is %s', sourceReleases[release].sourceTarballUrl.slice(sourceReleases[release].sourceTarballUrl.lastIndexOf('/') + 1));
|
|
return exit(new Error('Bad stuff happenned'));
|
|
}
|
|
|
|
destReleases[release] = {
|
|
sourceTarballUrl: suitableSourceTarball,
|
|
sourceTarballSigUrl: suitableSourceTarball + '.sig',
|
|
changelog: sourceReleases[release].changelog,
|
|
date: sourceReleases[release].date,
|
|
author: sourceReleases[release].author,
|
|
next: sourceReleases[release].next
|
|
};
|
|
}
|
|
|
|
// console.log('Potential %s release file:', destEnv.tag);
|
|
// console.log('');
|
|
// console.log(destReleases);
|
|
// console.log('');
|
|
|
|
await uploadVersionsJSON(destEnv, destReleases);
|
|
}
|
|
|
|
async function cleanup(options) {
|
|
const env = ENVIRONMENTS[options.env];
|
|
if (!env) return exit(new Error(`Unknown environment ${options.env}`));
|
|
|
|
console.log('Cleanup %s', env.tag);
|
|
|
|
const [error, response] = await safe(superagent.get(env.url));
|
|
if (error) return exit(error);
|
|
|
|
const releases = response.body;
|
|
const releaseAssets = [];
|
|
|
|
for (const release in releases) {
|
|
releaseAssets.push(new URL(releases[release].sourceTarballUrl).pathname.slice(1));
|
|
if (releases[release].sourceTarballSigUrl) releaseAssets.push(new URL(releases[release].sourceTarballSigUrl).pathname.slice(1));
|
|
}
|
|
|
|
console.log();
|
|
console.log('Used release assets:');
|
|
console.log(releaseAssets.join('\n'));
|
|
console.log();
|
|
|
|
const output = execSync(`ssh ubuntu@${env.releasesServer} "find /home/ubuntu/releases -type f -name '*.tar.gz*' -printf '%f\n'"`, { encoding: 'utf8' });
|
|
const existingAssets = output.trim().split('\n');
|
|
|
|
const unusedAssets = [];
|
|
|
|
for (const asset of existingAssets) {
|
|
if (releaseAssets.indexOf(asset) !== -1) continue;
|
|
unusedAssets.push(asset);
|
|
}
|
|
|
|
if (unusedAssets.length === 0) {
|
|
console.log();
|
|
console.log('No unused release assets.');
|
|
return exit();
|
|
}
|
|
|
|
console.log();
|
|
console.log('NOT used release assets:');
|
|
console.log(unusedAssets.join('\n'));
|
|
console.log();
|
|
|
|
const ok = await yesno({ question: 'Really delete those unused release assets? [y/N]', defaultValue: null });
|
|
if (!ok) return exit();
|
|
|
|
const escapedAssets = unusedAssets.map(asset => `'/home/ubuntu/releases/${asset}'`).join(' ');
|
|
execSync(`ssh ubuntu@${env.releasesServer} "rm ${escapedAssets}"`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Done.');
|
|
}
|
|
|
|
async function stage(fromEnv, toEnv, stageVersion) {
|
|
const username = execSync('git config user.name').toString('utf8').trim();
|
|
const email = execSync('git config user.email').toString('utf8').trim();
|
|
|
|
console.log(`Staging from ${fromEnv.tag} -> ${toEnv.tag}`);
|
|
|
|
let [error, response] = await safe(superagent.get(fromEnv.url));
|
|
if (error) return exit(error);
|
|
|
|
const fromReleases = response.body;
|
|
if (!fromReleases) return exit(new Error('versions.json is not valid JSON'));
|
|
|
|
[error, response] = await safe(superagent.get(toEnv.url));
|
|
if (error) return exit(error);
|
|
|
|
const toReleases = response.body;
|
|
if (!toReleases) return exit(new Error('versions.json is not valid JSON'));
|
|
|
|
const latestFromVersion = Object.keys(fromReleases).sort(semver.rcompare)[0];
|
|
const nextVersion = stageVersion || latestFromVersion; // dev and staging are assumed to be 'synced'
|
|
|
|
const strippedToReleases = stripUnreachable(toReleases);
|
|
const latestToVersion = Object.keys(strippedToReleases).sort(semver.rcompare)[0];
|
|
|
|
console.log('Releasing version %s to %s (from %s)', nextVersion , toEnv.tag, latestToVersion);
|
|
|
|
// check if we even have a new version to stage
|
|
if (latestFromVersion === latestToVersion) return exit(new Error(`No new version on ${fromEnv.tag} to stage`));
|
|
|
|
// check if we have a changelog
|
|
var changelog = parseChangelog(nextVersion);
|
|
if (changelog.length === 0) return exit(new Error('No changelog found for version ' + nextVersion));
|
|
|
|
const strippedFromReleases = stripUnreachable(fromReleases);
|
|
const latestReachableFromVersion = Object.keys(strippedFromReleases).sort(semver.rcompare)[0];
|
|
|
|
const sourceTarballName = url.parse(fromReleases[latestReachableFromVersion].sourceTarballUrl).pathname.substr(1);
|
|
let tmpFile = '/tmp/' + sourceTarballName;
|
|
|
|
console.log('Copying source code tarball %s to %s', sourceTarballName, toEnv.tag);
|
|
|
|
console.log('Fetching source code tarball...');
|
|
execSync(`rsync ubuntu@${fromEnv.releasesServer}:/home/ubuntu/releases/${sourceTarballName} ${tmpFile}`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Uploading source code tarball...');
|
|
execSync(`rsync ${tmpFile} ubuntu@${toEnv.releasesServer}:/home/ubuntu/releases/${sourceTarballName}`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
tmpFile = `/tmp/${sourceTarballName}.sig`;
|
|
console.log('Fetching signature...');
|
|
execSync(`rsync ubuntu@${fromEnv.releasesServer}:/home/ubuntu/releases/${sourceTarballName}.sig ${tmpFile}`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
console.log('Uploading signature...');
|
|
execSync(`rsync ${tmpFile} ubuntu@${toEnv.releasesServer}:/home/ubuntu/releases/${sourceTarballName}.sig`, { stdio: [ null, process.stdout, process.stderr ] } );
|
|
|
|
const latestReachableToVersion = latestToVersion;
|
|
const release = toReleases; // remove all existing pre-releases when staging a new prerelease
|
|
release[latestReachableToVersion].next = nextVersion;
|
|
release[nextVersion] = {
|
|
changelog: changelog,
|
|
date: (new Date()).toISOString(),
|
|
sourceTarballUrl: `https://${toEnv.releasesServer}/${sourceTarballName}`,
|
|
sourceTarballSigUrl: `https://${toEnv.releasesServer}/${sourceTarballName}.sig`,
|
|
author: username + ' <' + email + '>',
|
|
next: null
|
|
};
|
|
|
|
safe.fs.unlinkSync(tmpFile);
|
|
|
|
await verifyAndUpload(toEnv, release);
|
|
console.log('%s : %s', nextVersion, JSON.stringify(release[nextVersion], null, 4));
|
|
}
|
|
|
|
async function e2e(options) {
|
|
if (!options.code) return exit(new Error('--code tarball is required'));
|
|
|
|
const ok = await yesno({
|
|
question: 'Are you aware that this script does not fix the next pointer automatically?',
|
|
defaultValue: null
|
|
});
|
|
if (!ok) return exit(new Error('doing nothing'));
|
|
|
|
await sync({ env: 'staging' });
|
|
await sync({ env: 'dev' });
|
|
await createRelease({ code: options.code, env: 'dev' });
|
|
await stage(ENVIRONMENTS['dev'], ENVIRONMENTS['staging'], null);
|
|
await rerelease({ env: 'staging' });
|
|
}
|
|
|
|
program.command('amend')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.option('--code <tarball>', 'Source code url')
|
|
.option('--changelog <changelog>', 'Changelog')
|
|
.description('Amend last release. Use with care')
|
|
.action(async function (options) {
|
|
options.amend = true;
|
|
await createRelease(options);
|
|
});
|
|
|
|
program.command('create')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.option('--code <tarball>', 'Source code url')
|
|
.option('--changelog <changelog>', 'Changelog')
|
|
.description('Create a new release')
|
|
.action(createRelease);
|
|
|
|
program.command('edit')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.option('--no-verify', 'Disable verify of release file', false)
|
|
.description('Edit and upload versions.json')
|
|
.action(edit);
|
|
|
|
program.command('list')
|
|
.option('--raw', 'Show raw json')
|
|
.option('--release-filenames', 'Show release filenames')
|
|
.option('--release-urls', 'Show release URLs')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.description('List the releases file')
|
|
.action(listRelease);
|
|
|
|
program.command('new')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.option('--file <file>', 'Upload file as versions.json')
|
|
.description('Upload a new versions.json')
|
|
.action(newRelease);
|
|
|
|
program.command('publish')
|
|
.description('Publish latest staging version to production')
|
|
.option('--version <version>', 'Version to publish', null)
|
|
.action(async function (options) {
|
|
const ok = await yesno({
|
|
question: 'Are you aware that this script does not fix the next pointer automatically?',
|
|
defaultValue: null
|
|
});
|
|
if (!ok) return exit(new Error('doing nothing'));
|
|
|
|
await stage(ENVIRONMENTS['staging'], ENVIRONMENTS['prod'], options.version);
|
|
});
|
|
|
|
program.command('rerelease')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.option('--version <version>', 'Create the specified version', null)
|
|
.description('Make a new release, same as the last release')
|
|
.action(async function (options) {
|
|
await rerelease(options);
|
|
});
|
|
|
|
program.command('revert')
|
|
.option('--env <dev/staging/prod>', 'Environment (dev/staging/prod)', 'dev')
|
|
.description('Revert the last release. Use with care')
|
|
.action(async function (options) {
|
|
options.revert = true;
|
|
await createRelease(options);
|
|
});
|
|
|
|
program.command('stage')
|
|
.description('Stage latest dev version to staging')
|
|
.option('--version <version>', 'Version to publish', null)
|
|
.action(async function (options) {
|
|
await stage(ENVIRONMENTS['dev'], ENVIRONMENTS['staging'], options.version);
|
|
});
|
|
|
|
program.command('e2e')
|
|
.description('Stage latest dev version to staging')
|
|
.option('--code <tarball>', 'Source code url')
|
|
.description('Put a test release directly for e2e on staging')
|
|
.action(e2e);
|
|
|
|
program.command('sync')
|
|
.option('--env <dev/staging>', 'Environment (dev/staging)', 'dev')
|
|
.description('Sync the specified env with the parent env (prod -> staging or staging -> dev)')
|
|
.action(sync);
|
|
|
|
program.command('cleanup')
|
|
.option('--env <dev/staging>', 'Environment (dev/staging)', 'dev')
|
|
.description('Cleanup the release tarballs from the specified env')
|
|
.action(cleanup);
|
|
|
|
program.parse(process.argv);
|
|
|