diff --git a/CHANGELOG.md b/CHANGELOG.md index af4a9b5..640926d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,33 @@ # Changelog -## [Latest](https://github.com/cortex-lab/matlab-ci/commits/master) [2.2.0] +## [Latest](https://github.com/cortex-lab/matlab-ci/commits/master) [3.0.0] + +## Added + + - any number of tasks may be added for a job, which are then executed in series + - now serves a Webpage that shows the log in realtime + - added a jobs endpoint to see which jobs are on the pile + - stderr is piped to log file + - flake8 errors are neatly captured in GitHub status description + - param to skip checks when only ignored files changed + - param to skip draft PR event checks + + ## Modified + + - renamed MATLAB-CI to labCI + - records endpoint can return pending jobs + - tests badge endpoint returns 'error' on errored tests instead of 'unknown' + - job waits for coverage calculation and updating of records before finishing + - On successful completion of tests the duration is appended to the description + +## [2.2.1] + +## Modified + + - fix error where github event incorrectly rejected + - fix bug incorrect log name when endpoint called with branch name + +## [2.2.0] ## Added - nyc dependency for manual coverage of matlab-ci diff --git a/README.md b/README.md index 19025f2..f1ae0f1 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -# MATLAB-ci +# LabCI [![Build Status](https://travis-ci.com/cortex-lab/matlab-ci.svg?branch=master)](https://travis-ci.com/cortex-lab/matlab-ci) -[![Coverage](https://img.shields.io/badge/coverage-81.07-green)](https://img.shields.io/badge/coverage-72.35-yellowgreen) +[![Coverage](https://img.shields.io/badge/coverage-92.13-brightgreen)](https://img.shields.io/badge/coverage-72.35-yellowgreen) -A small set of modules written in Node.js for running automated tests of MATLAB code in response to GitHub events. Also submits code coverage to the Coveralls API. +A small set of modules written in Node.js for running automated tests of MATLAB and Python code in response to GitHub events. Also submits code coverage to the Coveralls API. Currently unsupported: * Running tests on forked repositories @@ -26,11 +26,8 @@ Create a shell/batch script for preparing your environment, and one for running Add these to the settings.json file in config: ``` { - "setup_function": "./prep_env.BAT", - "test_function": "./run_tests.BAT", "listen_port": 3000, "timeout": 480000, - "program": "python", "strict_coverage": false, "events": { "push": { @@ -40,15 +37,23 @@ Add these to the settings.json file in config: "pull_request": { "checks": ["continuous-integration", "coverage"], "actions": ["opened", "synchronize", "reopened"], - "ref_ignore": ["documentation", "gh-pages"] + "ref_ignore": ["documentation", "gh-pages"], + "files_ignore": [".*\\.yml", ".*\\.md", "LICEN[SC]E"] } } + "routines": { + "*": ["prep_env.BAT", "run_tests.BAT"] + } } ``` Some extra optional settings: - `shell` - optional shell to use when calling scripts (see `child_process.execFile` options). - `events:event:ref_include` - same as `ref_ignore`, but a pass list instead of block list. +- `events:event:files_ignore` - list of files whose changes can be ignored. If only ignored files +are changed checks are skipped. +- `events:pull_request:ignore_drafts` - if true draft pull request actions are skipped (NB: Be +sure to add 'ready_for_review' to the actions list when ignoring drafts). - `kill_children` - if present and true, `tree-kill` is used to kill the child processes, required if shell/batch script forks test process (e.g. a batch script calls python). - `repos` - an array of submodules or map of modules to their corresponding paths. diff --git a/config/config.js b/config/config.js index 10d3c8b..25379e3 100644 --- a/config/config.js +++ b/config/config.js @@ -2,78 +2,78 @@ const userSettings = require('./settings.json') || {}; // User settings const path = require('path'); env = process.env.NODE_ENV || 'production'; const appdata = process.env.APPDATA || process.env.HOME; -const dataPath = process.env.APPDATA? path.join(appdata, 'CI') : path.join(appdata, '.ci'); +const dataPath = process.env.APPDATA ? path.join(appdata, 'CI') : path.join(appdata, '.ci'); const fixtureDir = path.resolve(__dirname, '..', 'test', 'fixtures'); const dbFilename = '.db.json'; let settings; // Defaults for when there's no user file; will almost certainly fail -defaults = { - setup_function: null, - test_function: null, +const defaults = { + max_description_len: 140, // GitHub status API has a description char limit listen_port: 3000, - timeout: 8*60000, - program: "python", + timeout: 8 * 60000, strict_coverage: false, events: { push: { checks: null, - ref_ignore: ["documentation", "gh-pages"] + ref_ignore: ['documentation', 'gh-pages'] }, pull_request: { - checks: ["continuous-integration", "coverage"], - actions: ["opened", "synchronize", "reopen"], - ref_ignore: ["documentation", "gh-pages"] + checks: ['continuous-integration', 'coverage'], + actions: ['opened', 'synchronize', 'reopen'], + ref_ignore: ['documentation', 'gh-pages'] } }, dataPath: dataPath, dbFile: path.join(dataPath, dbFilename) -} +}; // Settings for the tests -testing = { +const testing = { listen_port: 3000, timeout: 60000, - setup_function: 'prep_env.BAT', - test_function: "run_tests.BAT", events: { push: { - checks: "continuous-integration", - ref_ignore: "documentation" + checks: 'continuous-integration', + ref_ignore: 'documentation' }, pull_request: { - checks: ["coverage", "continuous-integration"], - actions: ["opened", "synchronize"], - ref_ignore: ["documentation", "gh-pages"] + checks: ['coverage', 'continuous-integration'], + actions: ['opened', 'synchronize'], + ref_ignore: ['documentation', 'gh-pages'] } }, + routines: { + '*': ['prep_env.BAT', 'run_tests.BAT'] + }, dataPath: fixtureDir, dbFile: path.join(fixtureDir, dbFilename) // cache of test results -} +}; // Pick the settings to return if (env.startsWith('test')) { - settings = testing; + settings = testing; } else if (userSettings) { - settings = userSettings; - if (!('dbFile' in settings)) { - settings.dbFile = path.join(dataPath, dbFilename) - } - if (!('dataPath' in settings)) { - settings.dataPath = dataPath; - } + settings = userSettings; } else { - settings = defaults; + settings = defaults; +} + +// Ensure defaults for absent fields +for (let field in defaults) { + if (!(field in settings)) settings[field] = defaults[field]; } // Check ENV set up correctly required = ['GITHUB_PRIVATE_KEY', 'GITHUB_APP_IDENTIFIER', 'GITHUB_WEBHOOK_SECRET', - 'WEBHOOK_PROXY_URL', 'REPO_PATH', 'REPO_NAME', 'REPO_OWNER', 'TUNNEL_HOST', - 'TUNNEL_SUBDOMAIN']; -missing = required.filter(o => { return !process.env[o] }); + 'WEBHOOK_PROXY_URL', 'REPO_PATH', 'REPO_NAME', 'REPO_OWNER', 'TUNNEL_HOST', + 'TUNNEL_SUBDOMAIN']; +missing = required.filter(o => { + return !process.env[o]; +}); if (missing.length > 0) { - errMsg = `Env not set correctly; the following variables not found: \n${missing.join(', ')}` - throw ReferenceError(errMsg) + errMsg = `Env not set correctly; the following variables not found: \n${missing.join(', ')}`; + throw ReferenceError(errMsg); } -module.exports = { settings } +module.exports = { settings }; diff --git a/config/settings.json b/config/settings.json index d48b970..070aea7 100644 --- a/config/settings.json +++ b/config/settings.json @@ -1,9 +1,6 @@ { - "setup_function": "prep_env.BAT", - "test_function": "run_tests.BAT", "listen_port": 3000, "timeout": 480000, - "program": "python", "strict_coverage": false, "events": { "push": { @@ -15,5 +12,8 @@ "actions": ["opened", "synchronize", "reopened"], "ref_ignore": ["documentation", "gh-pages"] } + }, + "routines": { + "*": ["prep_env.BAT", "run_tests.BAT"] } } diff --git a/coverage.js b/coverage.js index 213db71..2fe1e34 100644 --- a/coverage.js +++ b/coverage.js @@ -21,10 +21,9 @@ const fs = require('fs'), xml2js = require('xml2js'), crypto = require('crypto'), - assert = require('assert').strict, parser = new xml2js.Parser(), path = require('path'); -var timestamp, cb; +var timestamp; var token = process.env.COVERALLS_TOKEN; @@ -33,14 +32,14 @@ var token = process.env.COVERALLS_TOKEN; * Loads file containing source code, returns a hash and line count * @param {String} path - Path to the source code file. * @returns {Object} key `Hash` contains MD5 digest string of file; `count` contains number of lines in source file - * @todo Make asynchronous */ function md5(path) { - var hash = crypto.createHash('md5'); // Creating hash object - var buf = fs.readFileSync(path, 'utf-8'); // Read in file - var count = buf.split(/\r\n|\r|\n/).length; // Count the number of lines - hash.update(buf, 'utf-8'); // Update hash - return {hash: hash.digest('hex'), count: count}; + const hash = crypto.createHash('md5'); // Creating hash object + const buf = fs.readFileSync(path, 'utf-8'); // Read in file + const count = buf.split(/\r\n|\r|\n/).length; // Count the number of lines + hash.update(buf, 'utf-8'); // Update hash + + return {hash: hash.digest('hex'), count: count}; } @@ -50,42 +49,41 @@ function md5(path) { * @param {Array} classList - An array of class objects from the loaded XML file. * @param {String} srcPath - The root path of the code repository. * @param {String} sha - The commit SHA for this coverage test. - * @param {function} callback - The callback function to run when complete. Takes object containing array of source - * code files and their code coverage * @returns {Object} * @todo Generalize path default - * @fixme Doesn't work with python's coverage */ -function formatCoverage(classList, srcPath, sha) { - var job = {}; - var sourceFiles = []; - var digest; - srcPath = typeof srcPath != "undefined" ? srcPath : process.env.HOMEPATH; // default to home dir - // For each class, create file object containing array of lines covered and add to sourceFile array - classList.forEach( async c => { - let file = {}; // Initialize file object - let fullPath = c.$.filename.startsWith(srcPath)? c.$.filename : path.join(srcPath, c.$.filename); - digest = md5(fullPath); // Create digest and line count for file - let lines = new Array(digest.count).fill(null); // Initialize line array the size of source code file - c.lines[0].line.forEach(ln => { - let n = Number(ln.$.number); - if (n <= digest.count) {lines[n] = Number(ln.$.hits) } - }); - // create source file object - file.name = c.$.filename; - file.source_digest = digest.hash; - file.coverage = lines; // file.coverage[0] == line 1 - sourceFiles.push(file); - }); +async function formatCoverage(classList, srcPath, sha) { + var job = {}; + var sourceFiles = []; + var digest; + srcPath = typeof srcPath != 'undefined' ? srcPath : process.env.REPO_PATH; // default to home dir + // For each class, create file object containing array of lines covered and add to sourceFile array + await Promise.all(classList.map(async c => { + let file = {}; // Initialize file object + let fullPath = c.$.filename.startsWith(srcPath) ? c.$.filename : path.join(srcPath, c.$.filename); + digest = md5(fullPath); // Create digest and line count for file + let lines = new Array(digest.count).fill(null); // Initialize line array the size of source code file + c.lines[0].line.forEach(ln => { + let n = Number(ln.$.number); + if (n <= digest.count) { + lines[n] = Number(ln.$.hits); + } + }); + // create source file object + file.name = c.$.filename; + file.source_digest = digest.hash; + file.coverage = lines; // file.coverage[0] == line 1 + sourceFiles.push(file); + })); - job.repo_token = token; // env secret token? - job.service_name = `coverage/${process.env.USERDOMAIN}`; - // The associated pull request ID of the build. Used for updating the status and/or commenting. - job.service_pull_request = ''; - job.source_files = sourceFiles; - job.commit_sha = sha; - job.run_at = timestamp; // "2013-02-18 00:52:48 -0800" - cb(job); + job.repo_token = token; // env secret token + job.service_name = `coverage/${process.env.USERDOMAIN}`; + // The associated pull request ID of the build. Used for updating the status and/or commenting. + job.service_pull_request = ''; + job.source_files = sourceFiles; + job.commit_sha = sha; + job.run_at = timestamp; // "2013-02-18 00:52:48 -0800" + return job; } /** @@ -95,44 +93,42 @@ function formatCoverage(classList, srcPath, sha) { * @param {String} sha - The commit SHA for this coverage test * @param {String} repo - The repo to which the commit belongs * @param {Array} submodules - A list of submodules for separating coverage into - * @param {function} callback - The callback function to run when complete * @see {@link https://github.com/cobertura/cobertura/wiki|Cobertura Wiki} */ -function coverage(path, repo, sha, submodules, callback) { - cb = callback; // @fixme Making callback global feels hacky - fs.readFile(path, function(err, data) { // Read in XML file - if (err) {throw err} // @fixme deal with file not found errors - parser.parseString(data, function (err, result) { // Parse XML - // Extract root code path - const rootPath = (result.coverage.sources[0].source[0] || process.env.REPO_PATH).replace(/[\/|\\]+$/, '') - assert(rootPath.endsWith(process.env.REPO_NAME), 'Incorrect source code repository') - timestamp = new Date(result.coverage.$.timestamp*1000); // Convert UNIX timestamp to Date object - let classes = []; // Initialize classes array +function coverage(path, repo, sha, submodules) { + return fs.promises.readFile(path) // Read in XML file + .then(parser.parseStringPromise) // Parse XML + .then(result => { + // Extract root code path + const rootPath = (result.coverage.sources[0].source[0] || process.env.REPO_PATH) + .replace(/[\/|\\]+$/, ''); + timestamp = new Date(result.coverage.$.timestamp * 1000); // Convert UNIX timestamp to Date object + let classes = []; // Initialize classes array - const packages = result.coverage.packages[0].package; - packages.forEach(pkg => { classes.push(pkg.classes[0].class) }); // Get all classes - classes = classes.reduce((acc, val) => acc.concat(val), []); // Flatten + const packages = result.coverage.packages[0].package; + packages.forEach(pkg => { classes.push(pkg.classes[0].class); }); // Get all classes + classes = classes.reduce((acc, val) => acc.concat(val), []); // Flatten - // The submodules - const byModule = {'main' : []}; - submodules.forEach((x) => { byModule[x] = []; }); // initialize submodules + // The submodules + const byModule = {'main': []}; + submodules.forEach((x) => { byModule[x] = []; }); // initialize submodules - // Sort into piles - byModule['main'] = classes.filter(function (e) { - if (e.$.filename.search(/(tests\\|_.*test|docs\\)/i) !== -1) {return false;} // Filter out tests and docs - if (!Array.isArray(e.lines[0].line)) {return false;} // Filter out files with no functional lines - for (let submodule of submodules) { - if (e.$.filename.startsWith(submodule)) { - byModule[submodule].push(e); return false; - } - } - return true; + // Sort into piles + byModule['main'] = classes.filter(function (e) { + if (e.$.filename.search(/(tests\\|_.*test|docs\\)/i) !== -1) return false; // Filter out tests and docs + if (!Array.isArray(e.lines[0].line)) return false; // Filter out files with no functional lines + for (let submodule of submodules) { + if (e.$.filename.startsWith(submodule)) { + byModule[submodule].push(e); + return false; + } + } + return true; + }); + // Select module + let modules = byModule[repo] || byModule['main']; + return formatCoverage(modules, rootPath, sha); }); - // Select module - let modules = byModule[repo] || byModule['main']; - formatCoverage(modules, rootPath, callback); - }); - }); } diff --git a/lib.js b/lib.js index 1f7b5fc..da74930 100644 --- a/lib.js +++ b/lib.js @@ -3,8 +3,9 @@ */ const fs = require('fs'); const path = require('path'); +const cp = require('child_process'); -const createDebug = require('debug'); +const createDebug = require('debug'); const localtunnel = require('localtunnel'); const kill = require('tree-kill'); const shell = require('shelljs'); @@ -13,21 +14,22 @@ const config = require('./config/config').settings; const Coverage = require('./coverage'); const queue = new (require('./queue.js'))(); // The queue object for our app to use + /** * Return a shortened version of an int or string id * @param {any} v - ID to shorten. * @param {int} len - Maximum number of chars. * @returns {String} v as a short string. */ -function shortID(v, len=7) { - if (Array.isArray(v)) { return v.map(v => shortID(v, len)); } - if (Number.isInteger(v)) { v = v.toString(); } - if (typeof v === 'string' || v instanceof String) { v = v.substr(0, len); } - return v; // If not string, array or number, leave unchanged +function shortID(v, len = 7) { + if (Array.isArray(v)) return v.map(v => shortID(v, len)); + if (Number.isInteger(v)) v = v.toString(); + if (typeof v === 'string' || v instanceof String) v = v.substr(0, len); + return v; // If not string, array or number, leave unchanged } // Attach shortID function to logger formatter -createDebug.formatters.g = shortID +createDebug.formatters.g = shortID; const log = createDebug('ci'); const _log = log.extend('lib'); @@ -38,8 +40,8 @@ const _log = log.extend('lib'); * @returns {boolean} true if id is a valid SHA */ function isSHA(id) { - const regex = /^[0-9a-f]{7,40}$/i; - return (typeof id === 'string' || id instanceof String) && id.match(regex) !== null + const regex = /^[0-9a-f]{7,40}$/i; + return (typeof id === 'string' || id instanceof String) && id.match(regex) !== null; } @@ -49,11 +51,11 @@ function isSHA(id) { * @returns {String} A full path */ function fullpath(p) { - if (p[0] === '~') { - return path.join(process.env.HOME, p.slice(1)); - } else { - return path.resolve(p); - } + if (p[0] === '~') { + return path.join(process.env.HOME, p.slice(1)); + } else { + return path.resolve(p); + } } @@ -62,7 +64,10 @@ function fullpath(p) { * @param {Object} x - Input to ensure as array. * @returns {Array} x as an array. */ -function ensureArray(x) { return (Array.isArray(x))? x : [x]; } +function ensureArray(x) { + return (Array.isArray(x)) ? x : [x]; +} + /** * Will match one and only one of the string 'true','1', or 'on' regardless of capitalization and @@ -70,7 +75,25 @@ function ensureArray(x) { return (Array.isArray(x))? x : [x]; } * @param {string} s - String to test * @returns {boolean} s as bool */ -function strToBool(s) { return /^\s*(true|1|on)\s*$/i.test(s); } +function strToBool(s) { + return /^\s*(true|1|on)\s*$/i.test(s); +} + + +/** + * Get the routine for a given context from the settings JSON. + * @param {String} context - The context. + * @returns {Array} The test routine, i.e. an array of functions/scripts to call + */ +function context2routine(context) { + const opts = ('routines' in config) ? config['routines'] : null; + if (!opts) return null; + let routine = ('*' in opts) ? opts['*'] : []; + if (context in opts) { + routine += ensureArray(opts[context]); + } + return routine; +} /** @@ -78,18 +101,18 @@ function strToBool(s) { return /^\s*(true|1|on)\s*$/i.test(s); } * @param {string, array} id - Commit SHA. */ function loadTestRecords(id) { - // FIXME Catch JSON parse error - _log('Loading test records from %s for id %g', config.dbFile, id); - if (!id) { throw new TypeError('invalid id'); } - if(!fs.existsSync(config.dbFile)) { - console.log('Records file not found'); - return [] - } - let obj = JSON.parse(fs.readFileSync(config.dbFile, 'utf8')); - obj = ensureArray(obj); - let records = obj.filter(o => id.includes(o.commit)); - // If single arg return as object, otherwise keep as array - return (!Array.isArray(id) && records.length === 1 ? records[0] : records) + // FIXME Catch JSON parse error + _log('Loading test records from %s for id %g', config.dbFile, id); + if (!id) throw new TypeError('invalid id'); + if (!fs.existsSync(config.dbFile)) { + console.log('Records file not found'); + return []; + } + let obj = JSON.parse(fs.readFileSync(config.dbFile, 'utf8')); + obj = ensureArray(obj); + let records = obj.filter(o => id.includes(o.commit)); + // If single arg return as object, otherwise keep as array + return (!Array.isArray(id) && records.length === 1 ? records[0] : records); } @@ -98,37 +121,38 @@ function loadTestRecords(id) { * @param {Object, Array} r - The record(s) to save. Must contain an id field. */ async function saveTestRecords(r) { - var obj; // the test records - const byID = (a, b) => b.commit.localeCompare(a.commit); - r = ensureArray(r).sort(byID); - if (!r.every(x => isSHA(x.commit))) { - throw new APIError('"commit" not in record(s)'); - } - try { - let data = await fs.promises.readFile(config.dbFile, 'utf8') - obj = ensureArray(JSON.parse(data)); - let ids = r.map(x => x.commit); - let records = obj.filter(o => ids.indexOf(o.commit) >= 0); - // Update existing records - for (let old of records) { - let o = r.filter(x => x.commit === old.commit ); - if (o.length > 0) { - Object.assign(old, o.pop()); - } - } - let updated = records.map(x => x.commit); - r = r.filter(x => updated.indexOf(x.commit) === -1); - } catch (err) { - if (err && err.code === 'ENOENT') { - console.log(`Records file not found at ${config.dbFile}`); - obj = []; - } else { - throw err; - } - } - // Add new records - obj = obj.concat(r); - await fs.promises.writeFile(config.dbFile, JSON.stringify(obj)); + var obj; // the test records + const byID = (a, b) => b.commit.localeCompare(a.commit); + r = ensureArray(r).sort(byID); + if (!r.every(x => isSHA(x.commit))) { + throw new APIError('"commit" not in record(s)'); + } + try { + let data = await fs.promises.readFile(config.dbFile, 'utf8'); + obj = ensureArray(JSON.parse(data)); + let ids = r.map(x => x.commit); + let records = obj.filter(o => ids.indexOf(o.commit) >= 0); + // Update existing records + for (let old of records) { + let o = r.filter(x => x.commit === old.commit); + if (o.length > 0) { + Object.assign(old, o.pop()); + } + } + let updated = records.map(x => x.commit); + r = r.filter(x => updated.indexOf(x.commit) === -1); + } catch (err) { + if (err && err.code === 'ENOENT') { + console.log(`Records file not found at ${config.dbFile}`); + console.log('Creating records file...'); + obj = []; + } else { + throw err; + } + } + // Add new records + obj = obj.concat(r); + await fs.promises.writeFile(config.dbFile, JSON.stringify(obj)); } @@ -137,26 +161,37 @@ async function saveTestRecords(r) { * @param {Object} job - Job object which is being processed. * @returns {boolean} - true if record was found */ -function updateJobFromRecord(job) { +async function updateJobFromRecord(job) { let log = _log.extend('updateJobFromRecord'); log('Loading test records for head commit %g', job.data['sha']); let rec = loadTestRecords(job.data['sha']); // Load test result from json log - if (rec.length === 0) { - log('No record found, return false'); - return false; - } // No record found + if (rec.length === 0) { // No record found + log('No record found, return false'); + return false; + } rec = Array.isArray(rec) ? rec.pop() : rec; // in case of duplicates, take last job.data['status'] = rec['status']; job.data['description'] = rec['description']; - job.data['coverage'] = ('coverage' in rec)? rec['coverage'] : null; - if (!job.data['coverage']) { - log('Coverage missing, computing from XML'); - computeCoverage(job); // Attempt to load from XML + // Append the duration in minutes if available + if (rec['status'] === 'success' && job.created) { + let diff = (new Date().getTime() - job.created.getTime()) / 1000; + let duration = ` (took ${Math.round(diff / 60)} min)`; + // Truncate description if necessary + let strSize = (config.max_description_len - duration.length); + if (job.data['description'].length > strSize) { + job.data['description'] = job.data['description'].slice(0, strSize - 3) + '...'; + } + job.data['description'] += duration; + } + job.data['coverage'] = ('coverage' in rec) ? rec['coverage'] : null; + if (!job.data['coverage'] && rec['status'] !== 'error') { + log('Coverage missing, computing from XML'); + await computeCoverage(job); // Attempt to load from XML FIXME deal with failure } else if ((job.data.context || '').startsWith('coverage')) { - log('Comparing coverage to base commit'); - compareCoverage(job); // If this test was to ascertain coverage, call comparison function + log('Comparing coverage to base commit'); + compareCoverage(job); // If this test was to ascertain coverage, call comparison function } - return true; + return true; } @@ -165,43 +200,59 @@ function updateJobFromRecord(job) { * @param {Object} func - Function to be curried. */ function partial(func) { - return function curried(...args) { - if (args.length >= func.length) { - return func.apply(this, args); - } else { - return function(...args2) { - return curried.apply(this, args.concat(args2)); - } - } - }; + return function curried(...args) { + if (args.length >= func.length) { + return func.apply(this, args); + } else { + return function (...args2) { + return curried.apply(this, args.concat(args2)); + }; + } + }; +} + + +/** + * Append URL parameters to a URL. + * @param {String} url - The URL to append parameters to. + * @param {String} args - One or more URL parameters to append, e.g. 'param=value' + */ +function addParam(url, ...args) { + if (url.indexOf('&') === -1 && !url.endsWith('/')) { + url += '/'; + } + for (param of args) { + url += (/\?/g.test(url) ? '&' : '?') + param; + } + return url; } /** * Check if job already has record, if so, update from record and finish, otherwise call tests function. * @param {Object} job - Job object which is being processed. - * @param {Function} func - The tests function to run, e.g. `runTests`. + * @param {Function} func - The tests function to run, e.g. `buildRoutine`. */ -function shortCircuit(job, func=null) { - // job.data contains the custom data passed when the job was created - // job.id contains id of this job. - let log = _log.extend('shortCircuit'); - log('Checking whether to load from saved for %s @ %g', - (job.data.context || '').split('/').pop(), job.data.sha); - - // To avoid running our tests twice, set the force flag to false for any other jobs in pile that - // have the same commit ID - let sha = job.data.sha; - let others = queue.pile.filter(o => (o.data.sha === sha) && (o.id !== job.id)); - for (let other of others) { other.data.force = false } - // If lazy, load records to check whether we already have the results saved - if (job.data.force === false) { // NB: Strict equality; force by default - _log('Updating job data directly from record for job #%g', job.id); - if (updateJobFromRecord(job)) { return job.done(); } // No need to run tests; skip to complete routine - } - - // Go ahead and prepare to run tests - if (func) { return func(job); } +async function shortCircuit(job, func = null) { + // job.data contains the custom data passed when the job was created + // job.id contains id of this job. + let log = _log.extend('shortCircuit'); + log('Checking whether to load from saved for %s @ %g', + (job.data.context || '').split('/').pop(), job.data.sha); + + // To avoid running our tests twice, set the force flag to false for any other jobs in pile that + // have the same commit ID + let sha = job.data.sha; + let others = queue.pile.filter(o => (o.data.sha === sha) && (o.id !== job.id)); + for (let other of others) other.data.force = false; + // If lazy, load records to check whether we already have the results saved + if (job.data.force === false) { // NB: Strict equality; force by default + _log('Updating job data directly from record for job #%g', job.id); + if (await updateJobFromRecord(job)) return job.done(); // No need to run tests; skip to complete routine + } + + // Go ahead and prepare to run tests + if (func) return func(job); } @@ -210,17 +261,22 @@ function shortCircuit(job, func=null) { * @returns (Class) - A localtunnel instance */ const openTunnel = async () => { - let args = { - port: config.listen_port, - subdomain: process.env.TUNNEL_SUBDOMAIN, - host: process.env.TUNNEL_HOST - }; - const tunnel = await localtunnel(args); - console.log(`Tunnel open on: ${tunnel.url}`); - tunnel.on('close', () => {console.log('Reconnecting'); openTunnel(); }); - tunnel.on('error', (e) => { console.error(e) }); - return tunnel; -} + let args = { + port: config.listen_port, + subdomain: process.env.TUNNEL_SUBDOMAIN, + host: process.env.TUNNEL_HOST + }; + const tunnel = await localtunnel(args); + console.log(`Tunnel open on: ${tunnel.url}`); + tunnel.on('close', () => { + console.log('Reconnecting'); + openTunnel(); + }); + tunnel.on('error', (e) => { + console.error(e); + }); + return tunnel; +}; /** @@ -229,12 +285,12 @@ const openTunnel = async () => { * @returns {Array} A list of submodule names, or null if none were found */ function listSubmodules(repoPath) { - if (!shell.which('git')) { throw new Error('Git not found on path'); } - shell.pushd(repoPath); - let listModules = 'git config --file .gitmodules --get-regexp path'; - const modules = shell.exec(listModules) - shell.popd(); - return (!modules.code && modules.stdout !== '')? modules.match(/(?<=submodule.)[\w-]+/g) : []; + if (!shell.which('git')) throw new Error('Git not found on path'); + shell.pushd(repoPath); + let listModules = 'git config --file .gitmodules --get-regexp path'; + const modules = shell.exec(listModules); + shell.popd(); + return (!modules.code && modules.stdout !== '') ? modules.match(/(?<=submodule.)[\w-]+/g) : []; } @@ -247,15 +303,15 @@ function listSubmodules(repoPath) { * @returns {String} The repository path if found */ function getRepoPath(name) { - if (!config.repos) { return process.env['REPO_PATH']; } // Legacy, to remove - if (config.repos[name]) { return config.repos[name]; } // Found path, return - const modules = listSubmodules(process.env['REPO_PATH']); - let repoPath = process.env['REPO_PATH']; - if (modules && modules.includes(name)) { - // If the repo is a submodule, modify path - repoPath += (path.sep + name); - } - return repoPath; // No modules matched, return default + if (!config.repos) return process.env['REPO_PATH']; // Legacy, to remove + if (config.repos[name]) return config.repos[name]; // Found path, return + const modules = listSubmodules(process.env['REPO_PATH']); + let repoPath = process.env['REPO_PATH']; + if (modules && modules.includes(name)) { + // If the repo is a submodule, modify path + repoPath += (path.sep + name); + } + return repoPath; // No modules matched, return default } @@ -265,15 +321,181 @@ function getRepoPath(name) { * @param {boolean} kill_children - If true all child processes are killed. * @returns {number} - A timeout object. */ -function startJobTimer(job, kill_children=false) { - const timeout = config.timeout || 8*60000; // How long to wait for the tests to run - return setTimeout(() => { - console.log('Max test time exceeded'); - log(kill_children? 'Killing all processes' : 'Ending test process'); - let pid = job.data.process.pid; - job.data.process.kill(); - if (kill_children) { kill(pid); } - }, timeout); +function startJobTimer(job, kill_children = false) { + const timeout = config.timeout || 8 * 60000; // How long to wait for the tests to run + return setTimeout(() => { + console.log('Max test time exceeded'); + log(kill_children ? 'Killing all processes' : 'Ending test process'); + let pid = job._child.pid; + job._child.kill(); + if (kill_children) { + kill(pid); + } + }, timeout); +} + + +/** + * Build task pipeline. Takes a list of scripts/functions and builds a promise chain. + * @param {Object} job - The path of the repository + * @returns {Promise} - The job routine + */ +async function buildRoutine(job) { + const debug = log.extend('pipeline'); + const data = job.data; + // Get task list from job data, or from context if missing + const tasks = data.routine ? ensureArray(data.routine) : context2routine(data.context); + // Throw an error if there is no routine defined for this job + if (!tasks) throw new Error(`No routine defined for context ${data.context}`); + + debug('Building routine for job #%g', job.id); + // variables shared between functions + const repoPath = getRepoPath(data.repo); + const sha = data['sha']; + const logDir = path.join(config.dataPath, 'reports', sha); + const logName = path.join(logDir, `std_output-${shortID(sha)}.log`); + await fs.promises.mkdir(logDir, { recursive: true }); + const logDump = fs.createWriteStream(logName, {flags: 'w'}); + logDump.on('close', () => debug('Closing log file')); + const ops = config.shell ? {'shell': config.shell} : {}; + + const init = () => debug('Executing pipeline for job #%g', job.id); + const routine = tasks.reduce(applyTask, Promise.resolve().then(init)); + return routine + .then(updateJob) + .catch(handleError) + .finally(() => logDump.close()); + + /** + * Build task pipeline. Should recursively call functions to produce chain of spawn callbacks. + * Must return promises. + * @param {Promise} pipeline - The promise chain to add to + * @param {String} task - The script + * @param {Number} idx - The current index in the pipeline + * @param {Array} taskList - An array of functions or scripts to execute consecutively + * @returns {Promise} - The job routine with `task` added to it. + */ + function applyTask(pipeline, task, idx, taskList) { + return pipeline.then(() => { + debug('Starting task "%s" (%i/%i)', task, idx + 1, taskList.length); + const timer = startJobTimer(job, config.kill_children === true); + task = fullpath(task); // Ensure absolute path + return new Promise(function (resolve, reject) { + // Spawn a process to execute our task + const child = cp.spawn(task, [sha, repoPath, config.dataPath], ops); + let stdout = '', stderr = ''; + // Pipe output to log file + child.stdout.pipe(logDump, {end: false}); + child.stderr.pipe(logDump, {end: false}); + // Keep output around for reporting errors + child.stdout.on('data', chunk => { + stdout += chunk; + }); + child.stderr.on('data', chunk => { + stderr += chunk; + }); + // error emitted called when spawn itself fails, or process could not be killed + child.on('error', err => { + debug('clearing job timer'); + clearTimeout(timer); + reject(err); + }) + .on('exit', () => { + debug('clearing job timer'); + clearTimeout(timer); + }) + .on('close', (code, signal) => { + const callback = (code === 0) ? resolve : reject; + const proc = { + code: code, + signal: signal, + stdout: stdout, + stderr: stderr, + process: child + }; + callback(proc); + }); + job.child = child; // Assign the child process to the job + }); + }); + } + + /** + * Handle any errors raised during the job routine. If any process exits with a non-zero code + * this handler will divine the error, update the record and trigger the relevant job callbacks. + * @param {Object} errored - The stdout, stderr, ChildProcess, exit code and signal, + * or a childProcess Error object. + */ + function handleError(errored) { + let message; // Error message to pass to job callbacks and to save into records + // The script that threw the error + const file = (errored instanceof Error) ? errored.path : errored.process.spawnfile; + + // Check if the error is a spawn error, this is thrown when spawn itself fails, i.e. due to + // missing shell script + if (errored instanceof Error) { + if (errored.code === 'ENOENT') { + // Note the missing file (not necessarily the task script that's missing) + message = file ? `File "${file}" not found` : 'No such file or directory'; + } else { + message = `${errored.code} - Failed to spawn ${file}`; + } + // Check if the process was killed (we'll assume by the test timeout callback) + } else if (errored.process.killed || errored.signal === 'SIGTERM') { + message = `Tests stalled after ~${(config.timeout / 60000).toFixed(0)} min`; + } else { // Error raised by process; dig through stdout for reason + debug('error from test function %s', file); + // Isolate error from log + // For MATLAB return the line that begins with 'Error' + let fn = (str) => { + return str.startsWith('Error in \''); + }; + message = errored.stderr.split(/\r?\n/).filter(fn).join(';'); + // For Python, cat from the lost line that doesn't begin with whitespace + if (!message && errored.stderr.includes('Traceback ')) { + let errArr = errored.stderr.split(/\r?\n/); + let idx = errArr.reverse().findIndex(v => { + return v.match('^\\S'); + }); + message = errored.stderr.split(/\r?\n/).slice(-idx - 1).join(';'); + } + // Check for flake8 errors, capture first (NB: flake8 sends output to stdout, not stderr) + if (!message && errored.stdout.match(/:\d+:\d+: [EWF]\d{3}/)) { + let errArr = errored.stdout.split(/\r?\n/); + let err = errArr.filter(v => { + return v.match(/[EWF]\d{3}/); + }); + message = `${err.length} flake8 error${err.length === 1 ? '' : 's'}... ${err[0]}`; + } + // Otherwise simply use the full stderr (will be truncated) + if (!message) message = errored.stderr; + } + // Save error into records for future reference. + let report = { + 'commit': sha, + 'results': message, + 'status': 'error', + 'description': 'Error running ' + (file || 'test routine') + }; + saveTestRecords(report).then(() => { + debug('updated test records'); + }); + job.done(new Error(message)); // Propagate + } + + /** + * Update the job and mark complete. Called when job routine completes without error. + * @param {Object} proc - The stdout, stderr, ChildProcess, exit code and signal + */ + async function updateJob(proc) { + debug('Job routine complete'); + // Attempt to update the job data from the JSON records, throw error if this fails + if (!await updateJobFromRecord(job)) { + job.done(new Error('Failed to return test result')); + } else { + job.done(); // All good + } + } } @@ -282,39 +504,43 @@ function startJobTimer(job, kill_children=false) { * @param {Object} job - Job object which has finished being processed. */ function computeCoverage(job) { - if (typeof job.data.coverage !== 'undefined' && job.data.coverage) { - console.log('Coverage already computed for job #' + job.id) - return; - } - console.log('Updating coverage for job #' + job.id) - const xmlPath = path.join(config.dataPath, 'reports', job.data.sha, 'CoverageResults.xml') - const modules = listSubmodules(process.env.REPO_PATH); - Coverage(xmlPath, job.data.repo, job.data.sha, modules, obj => { - // Digest and save percentage coverage - let misses = 0, hits = 0; - for (let file of obj.source_files) { - misses += file.coverage.filter(x => x === 0).length; - hits += file.coverage.filter(x => x > 0).length; + if (typeof job.data.coverage !== 'undefined' && job.data.coverage) { + console.log('Coverage already computed for job #' + job.id); + return; } - const coverage = hits / (hits + misses) * 100 // As percentage - job.data.coverage = coverage; // Add to job - // Load data and save TODO Move to saveTestRecord(s) function in lib - let records = JSON.parse(fs.readFileSync(config.dbFile, 'utf8')); - records = ensureArray(records); // Ensure array - for (let o of records) { if (o.commit === job.data.sha) { o.coverage = coverage; break; }} - // Save object - fs.writeFile(config.dbFile, JSON.stringify(records), function(err) { - if (err) { - job.status = 'error' - job.description = 'Failed to compute coverage from XML' - console.log(err); - return; - } - // If this test was to ascertain coverage, call comparison function - let toCompare = (job.data.context || '').startsWith('coverage') && job.data.base; - if (toCompare) { compareCoverage(job); } + console.log('Updating coverage for job #' + job.id); + const xmlPath = path.join(config.dataPath, 'reports', job.data.sha, 'CoverageResults.xml'); + const modules = listSubmodules(process.env.REPO_PATH); + return Coverage(xmlPath, job.data.repo, job.data.sha, modules).then(obj => { + // Digest and save percentage coverage + let misses = 0, hits = 0; + for (let file of obj.source_files) { + misses += file.coverage.filter(x => x === 0).length; + hits += file.coverage.filter(x => x > 0).length; + } + const coverage = hits / (hits + misses) * 100; // As percentage + job.data.coverage = coverage; // Add to job + // Load data and save TODO Move to saveTestRecord(s) function in lib + let records = JSON.parse(fs.readFileSync(config.dbFile, 'utf8')); + records = ensureArray(records); // Ensure array + for (let o of records) { + if (o.commit === job.data.sha) { + o.coverage = coverage; + break; + } + } + // Save object + return fs.promises.writeFile(config.dbFile, JSON.stringify(records)).then(() => { + console.log('Coverage saved into records'); + // If this test was to ascertain coverage, call comparison function + let toCompare = (job.data.context || '').startsWith('coverage') && job.data.base; + if (toCompare) return compareCoverage(job); + }); + }).catch(err => { + job.status = 'error'; + job.description = 'Failed to compute coverage from XML'; // Add error msg + console.error(err); }); - }); } @@ -324,72 +550,73 @@ function computeCoverage(job) { * @todo Add support for forked PRs */ function compareCoverage(job) { - let log = _log.extend('compareCoverage'); - if (!(job.data.sha && job.data.base)) { - throw new ReferenceError('No sha (head) or base commit in job data'); - } - log('Comparing coverage for %g -> %g', job.data.sha, job.data.base); - var records; - if (!job.data.coverage) { - log('No coverage in job data; loading from records'); - records = loadTestRecords([job.data.sha, job.data.base]); - // Filter duplicates just in case - records = records.filter((set => o => !set.has(o.commit) && set.add(o.commit))(new Set)); - } else { - let curr = JSON.parse(JSON.stringify( job.data )); // Make a copy - curr.commit = curr.sha; // rename field - records = [curr, loadTestRecords(job.data.base)]; - } - log('The following records were found: %O', records); - const hasCoverage = records.every(o => (o.coverage > 0)); - - // Check if any errored or failed to update coverage - if (records.filter(o => o.status === 'error').length > 0) { - log('One or more have error status; cannot compare coverage'); - job.data.status = 'failure'; - job.data.description = 'Failed to determine coverage as tests incomplete due to errors'; - - // Both records present and they have coverage - } else if (records.length === 2 && hasCoverage) { - log('Calculating coverage difference'); - // Ensure first record is for head commit - if (records[0].commit === job.data.base) { records.reverse() } - // Calculate coverage change - let delta = records[0].coverage - records[1].coverage; - let passed = config.strict_coverage? delta > 0 : delta >= 0; - job.data.status = (passed ? 'success' : 'failure'); - if (delta === 0) { - job.data.description = `Coverage remains at ${Math.round(records[1].coverage * 100) / 100}%`; + let log = _log.extend('compareCoverage'); + if (!(job.data.sha && job.data.base)) { + throw new ReferenceError('No sha (head) or base commit in job data'); + } + log('Comparing coverage for %g -> %g', job.data.sha, job.data.base); + var records; + if (!job.data.coverage) { + log('No coverage in job data; loading from records'); + records = loadTestRecords([job.data.sha, job.data.base]); + // Filter duplicates just in case + records = records.filter((set => o => !set.has(o.commit) && set.add(o.commit))(new Set)); } else { - job.data.description = `Coverage ${passed ? 'increased' : 'decreased'} ` + - `from ${Math.round(records[1].coverage * 100) / 100}% ` + - `to ${Math.round(records[0].coverage * 100) / 100}%`; + let curr = JSON.parse(JSON.stringify(job.data)); // Make a copy + curr.commit = curr.sha; // rename field + records = [curr, loadTestRecords(job.data.base)]; + } + // log('The following records were found: %O', records); + const hasCoverage = records.every(o => (o.coverage > 0)); + + // Check if any errored or failed to update coverage + if (records.filter(o => o.status === 'error').length > 0) { + log('One or more have error status; cannot compare coverage'); + job.data.status = 'failure'; + job.data.description = 'Failed to determine coverage as tests incomplete due to errors'; + + // Both records present and they have coverage + } else if (records.length === 2 && hasCoverage) { + log('Calculating coverage difference'); + // Ensure first record is for head commit + if (records[0].commit === job.data.base) records.reverse(); + // Calculate coverage change + let delta = records[0].coverage - records[1].coverage; + let passed = config.strict_coverage ? delta > 0 : delta >= 0; + job.data.status = (passed ? 'success' : 'failure'); + if (delta === 0) { + job.data.description = `Coverage remains at ${Math.round(records[1].coverage * 100) / 100}%`; + } else { + job.data.description = `Coverage ${passed ? 'increased' : 'decreased'} `; + let previous = Math.round(records[1].coverage * 100) / 100; + let current = Math.round(records[0].coverage * 100) / 100; + job.data.description += (current === previous? 'slightly' : `from ${previous}% to ${current}%`); + } + + } else { // We need to add a new job for incomplete coverage + log('Missing record for base commit; adding new jobs'); + // TODO This could be refactored for efficiency + // Ensure we have coverage for base branch + queue.add({ + skipPost: true, // don't post, to be left for next job + force: false, // should skip if coverage already saved + sha: job.data.base, + owner: process.env.REPO_OWNER, + repo: job.data.repo + }); + // Ensure we have coverage for head commit and post result + queue.add({ + skipPost: false, // don't post, to be left for next job + force: false, // should skip if coverage already saved + sha: job.data.sha, + base: job.data.base, + owner: process.env.REPO_OWNER, + repo: job.data.repo, + context: job.data.context // conserve context + }); + // Skip our current job as we're waiting for base coverage + job.data.skipPost = true; } - - } else { // We need to add a new job for incomplete coverage - log('Missing record for base commit; adding new jobs'); - // TODO This could be refactored for efficiency - // Ensure we have coverage for base branch - queue.add({ - skipPost: true, // don't post, to be left for next job - force: false, // should skip if coverage already saved - sha: job.data.base, - owner: process.env.REPO_OWNER, - repo: job.data.repo, - }); - // Ensure we have coverage for head commit and post result - queue.add({ - skipPost: false, // don't post, to be left for next job - force: false, // should skip if coverage already saved - sha: job.data.sha, - base: job.data.base, - owner: process.env.REPO_OWNER, - repo: job.data.repo, - context: job.data.context // conserve context - }); - // Skip our current job as we're waiting for base coverage - job.data.skipPost = true; - } } @@ -397,66 +624,93 @@ function compareCoverage(job) { * Get the coverage results and build status data for the shields.io coverage badge API. * If test results don't exist, a new job is added to the queue and the message is set to 'pending' * @param {Object} data - An object with the keys 'sha', 'repo', 'owner' and 'context'. - * 'context' must be 'coverage' or 'status'. + * 'context' must be 'coverage', 'build', or 'tests'. */ function getBadgeData(data) { - let id = data.sha; - if (!id) { - throw new ReferenceError('Invalid "sha" field in input data') - } - var report = {'schemaVersion': 1, 'label': data.context === 'status'? 'build' : 'coverage'}; - // Try to load coverage record - let record = data.force? [] : loadTestRecords(id); - // If no record found - if (record.length === 0) { - report['message'] = 'pending'; - report['color'] = 'orange'; - // Check test isn't already on the pile - let onPile = false; - for (let job of queue.pile) { if (job.data.sha === id) { onPile = true; break; } } - if (!onPile) { // Add test to queue - data['skipPost'] = true - queue.add(data); - } - } else { - record = Array.isArray(record) ? record.pop() : record; // in case of duplicates, take last - switch (data.context) { - case 'status': - if (record['status'] === 'error') { - report['message'] = 'unknown'; - report['color'] = 'orange'; - } else { - report['message'] = (record['status'] === 'success' ? 'passing' : 'failing'); - report['color'] = (record['status'] === 'success' ? 'brightgreen' : 'red'); + let id = data.sha; + if (!id) throw new ReferenceError('Invalid "sha" field in input data'); + const report = {'schemaVersion': 1, 'label': data.context}; + // Try to load coverage record + let record = data.force ? [] : loadTestRecords(id); + // If no record found + if (record.length === 0) { + report['message'] = data.context === 'tests'? 'in progress' : 'pending'; + report['color'] = 'orange'; + // Check test isn't already on the pile + let onPile = false; + for (let job of queue.pile) { + if (job.data.sha === id) { + onPile = true; + break; } - break; - case 'coverage': - if (record['status'] === 'error' || !record['coverage']) { - report['message'] = 'unknown'; - report['color'] = 'orange'; - } else { - report['message'] = Math.round(record['coverage'] * 100) / 100 + '%'; - report['color'] = (record['coverage'] > 75 ? 'brightgreen' : 'red'); - } - break; - default: - if (!data['context']) { - throw new ReferenceError('Context required for badge request') - } else { - throw new TypeError('Unsupported context badge request') - } - } - } - return report; + } + if (!onPile) { // Add test to queue + data['skipPost'] = true; + queue.add(data); + } + } else { + record = Array.isArray(record) ? record.pop() : record; // in case of duplicates, take last + switch (data.context) { + case 'build': + if (record['status'] === 'error') { + report['message'] = 'errored'; + report['color'] = 'red'; + } else { + report['message'] = (record['status'] === 'success' ? 'passing' : 'failing'); + report['color'] = (record['status'] === 'success' ? 'brightgreen' : 'red'); + } + break; + case 'tests': + if (record['status'] === 'error') { + report['message'] = 'errored'; + report['color'] = 'red'; + } else { + if (record['statistics']) { + let pass = record['statistics']['passed']; + let fail = record['statistics']['failed'] + record['statistics']['errored']; + let skip = record['statistics']['skipped']; + report['message'] = `${pass} passed`; + if (fail > 0) { + report['message'] += `, ${fail} failed`; + } + if (skip > 0) { + report['message'] += `, ${skip} skipped`; + } + } else { + report['message'] = (record['status'] === 'success' ? 'passed' : 'failed'); + } + report['color'] = (record['status'] === 'success' ? 'brightgreen' : 'red'); + } + + break; + case 'coverage': + if (record['status'] === 'error' || !record['coverage']) { + report['message'] = 'unknown'; + report['color'] = 'orange'; + } else { + report['message'] = Math.round(record['coverage'] * 100) / 100 + '%'; + report['color'] = (record['coverage'] > 75 ? 'brightgreen' : 'red'); + } + break; + default: + if (!data['context']) { + throw new ReferenceError('Context required for badge request'); + } else { + throw new TypeError('Unsupported context badge request'); + } + } + } + return report; } class APIError extends Error { - //... + //... } module.exports = { - ensureArray, loadTestRecords, compareCoverage, computeCoverage, getBadgeData, log, shortID, - openTunnel, APIError, queue, partial, startJobTimer, updateJobFromRecord, shortCircuit, isSHA, - fullpath, strToBool, saveTestRecords, listSubmodules, getRepoPath -} + ensureArray, loadTestRecords, compareCoverage, computeCoverage, getBadgeData, log, shortID, + openTunnel, APIError, queue, partial, startJobTimer, updateJobFromRecord, shortCircuit, isSHA, + fullpath, strToBool, saveTestRecords, listSubmodules, getRepoPath, addParam, context2routine, + buildRoutine +}; diff --git a/main.js b/main.js index 15fa8ba..b40766a 100644 --- a/main.js +++ b/main.js @@ -4,19 +4,17 @@ * @todo save auxiliary configuration into a separate config file * @todo add abort option for when new commits added * @todo rename context to description and use context to track posts - * @todo fix intentions */ -const { openTunnel, queue, shortCircuit} = require('./lib'); -const { srv, handler, eventCallback, runTests, prepareEnv} = require('./serve'); -const config = require("./config/config").settings; +const {openTunnel, queue, shortCircuit, buildRoutine} = require('./lib'); +const {srv, handler, eventCallback} = require('./serve'); +const config = require('./config/config').settings; /** * Build queue processing pipeline. The shortCircuit call checks whether the results may be loaded from file, * bypassing the test function. */ -const run = (job) => { prepareEnv(job, runTests); }; -queue.process((job) => { shortCircuit(job, run); }); +queue.process((job) => { shortCircuit(job, buildRoutine); }); // NB: Only the supported events make it this far (i.e. push and pull requests) handler.on('*', evt => eventCallback(evt)); @@ -32,32 +30,25 @@ handler.on('*', evt => eventCallback(evt)); queue.on('error', _ => {}); -// Log handler errors -handler.on('error', function (err) { - console.error('Error:', err.message); -}) - // Log any unhandled errors process.on('unhandledRejection', (reason, p) => { - console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); - console.log(reason.stack) + console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); + console.log(reason.stack); }); ///////////////////// START TUNNEL ///////////////////// openTunnel().then( - () => { - // Start the server on same port as tunnel - var server = srv.listen(config.listen_port, function () { - let host = server.address().address; - let port = server.address().port; - - console.log("Handler listening at http://%s:%s", host, port); - }); - }, - (e) => { - throw e; - } -) + () => { + // Start the server on same port as tunnel + var server = srv.listen(config.listen_port, function () { + let host = server.address().address; + let port = server.address().port; + + console.log('Handler listening at http://%s:%s', host, port); + }); + }, + (e) => { throw e; } +); diff --git a/package-lock.json b/package-lock.json index a22fbe2..4b72bc2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { - "name": "matlab-ci", - "version": "2.0.0", + "name": "LabCI", + "version": "3.0.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 85edd49..dbc7fee 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { - "name": "matlab-ci", - "version": "2.0.0", + "name": "LabCI", + "version": "3.0.0", "description": "A small set of modules written in Node.js for running automated tests of MATLAB and Python code in response to GitHub events. Also submits code coverage to the Coveralls API.", "main": "main.js", "scripts": { diff --git a/public/format.js b/public/format.js new file mode 100644 index 0000000..1dbfb24 --- /dev/null +++ b/public/format.js @@ -0,0 +1,135 @@ +/** + * A map of class ids and the regular expressions that capture the text to style + */ +const regExps = { + errorStack: /^Traceback.*\r?\n(?:^\s+.*\r?\n)+/gm, // Error stack + error: /^\w*(Error|Exception).*\r?\n/gm, // Error statement + warning: /\w*Warning:.*\r?\n(?:^\s+.*\r?\n)/gm, // Warning + logDebug: /.*DEBUG.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.debug + logInfo: /.*INFO.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.info + logWarn: /.*WARNING.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.warning + logError: /.*ERROR.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.error + logCritical: /.*CRITICAL.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.critical + flake8: /^[a-zA-Z\/\\._]+:\d+:\d+: [EWF]\d{3} .*$/gm // flake8 error +}; +const cursor = ''; +let timer = null; +let lastModified = null; +const id = window.location.pathname.split('/').pop(); +const heading = 'Job log for commit ' + shortID(id); +document.addEventListener('DOMContentLoaded', function() { + document.querySelector('h1').innerText = heading; +}, false); + +/** + * Given some text and a class name, return the text wrapped in a span of that class. + */ +function toSpan(text, className) { + return '' + text + ''; +} + +function escapeHTML(str){ + return new Option(str).innerHTML; +} + +/** + * Return a shortened version of an int or string id + * @param {any} v - ID to shorten. + * @param {int} len - Maximum number of chars. + * @returns {String} v as a short string. + */ +function shortID(v, len=7) { + if (Array.isArray(v)) { return v.map(v => shortID(v, len)); } + if (Number.isInteger(v)) { v = v.toString(); } + if (typeof v === 'string' || v instanceof String) { v = v.substr(0, len); } + return v; // If not string, array or number, leave unchanged +} + +/** + * Fetch the raw log text from remote. + */ +async function updateLog() { + const contentDiv = document.querySelector('pre'); + const queryString = window.location.search; + const urlParams = new URLSearchParams(queryString); + + const url = '/logs/raw/' + id; + // If the console is empty, add some loading text + if (!contentDiv.innerHTML) { + contentDiv.innerHTML = 'Loading log....' + cursor; + } + + // Fetch the remote log text + console.debug('Reloading log'); + let options = {}; + if (lastModified) { + options['headers'] = { 'If-Modified-Since': lastModified }; + } + if (urlParams.has('type')) { + options['query'] = {'type': urlParams.get('type')}; + } + + let response = await fetch(url, options); + if (response.status === 304) { + console.debug('Log unchanged'); + return; + } else if (response.status !== 200) { + console.error('Failed to return the log file'); + // If never loaded, change console text + if (!lastModified) { + contentDiv.innerHTML = toSpan('ERROR: Failed to load log', 'error'); + } + return; + } + lastModified = response.headers.get('Last-Modified'); + const jobStatus = response.headers.get('X-CI-JobStatus'); + let log = await (response).text(); + log = escapeHTML(log); + + // Apply the regex for styling/highlighting the text + // http://ascii-table.com/ansi-escape-sequences-vt-100.php + // https://github.com/F1LT3R/parse-ansi + if (urlParams.get('formatting') !== 'off') { + log = log.replace(/\x1b?\[\d+m/gm, ''); // Remove ANSI color codes + for (let style in regExps) { + log = log.replace(regExps[style], x => toSpan(x, style)); + } + } + + // If not static, add a little blinking cursor to indicate activity + const isRunning = ['queued', 'running'].includes(jobStatus); + if (isRunning) { log += cursor; } + + // Check if you're at the bottom + const elem = document.getElementById('console'); + const atBottom = elem.scrollHeight - elem.scrollTop === elem.clientHeight; + + // Update console text + contentDiv.innerHTML = log; + + // If you were at the bottom, update scroll position + if (atBottom) { + console.debug('Setting scroll height') + elem.scrollTop = elem.scrollHeight; + } + + // Set title, etc. + const header = document.querySelector('h1'); + header.innerText = `${heading} | ${jobStatus.toUpperCase()}`; + document.title = `Job ${jobStatus} for commit ${shortID(id)}`; + document.getElementById('date').innerText = new Date(lastModified).toLocaleString(); + + if (!timer && (urlParams.has('refresh') || isRunning)) { + console.debug('Setting reload timer'); + const timeout = (urlParams.get('refresh') || 2) * 1000; // default 2 sec + const minTimeout = 500; // ms + timer = setInterval(updateLog, Math.max(timeout, minTimeout)); + } else if (response.ok && jobStatus === 'finished' && timer) { + console.debug('Clearing reload timer'); + clearInterval(timer); + timer = null; + } + +} + +document.addEventListener('DOMContentLoaded', updateLog, false); diff --git a/public/highlight.css b/public/highlight.css new file mode 100644 index 0000000..c951620 --- /dev/null +++ b/public/highlight.css @@ -0,0 +1,24 @@ +.logDebug { + color: green; +} + +.logInfo { + color: cyan; +} + +.logWarn, .warning { + color: orange; +} + +.logError { + color: red; +} + +.logCritical { + color: purple; +} + +.error, .flake8 { + color: red; + font-weight: bold; +} diff --git a/public/log.html b/public/log.html new file mode 100644 index 0000000..f4942ef --- /dev/null +++ b/public/log.html @@ -0,0 +1,23 @@ + + + + + CI Log + + + + + +

+

+ + + diff --git a/public/style-clean.css b/public/style-clean.css new file mode 100644 index 0000000..626505f --- /dev/null +++ b/public/style-clean.css @@ -0,0 +1,83 @@ +#console { + position: relative; + margin: auto; + width: 80%; + display: flex; + background-color: burlywood; /* #af926b */ + height: 80vh; + overflow: auto; + padding: 1rem; + color: black; + font: .8rem Inconsolata, monospace; + scrollbar-width: thin; +} + +p#date { + position : absolute; + top : -5px; + right : 10px; + color: grey; +} + +h1 { + text-align:center; +} + +footer { + text-align: center; + padding: 1px; + background-color: bisque; +} + +a { + color: black; +} + +::selection { + background: #ffffff; + color: #000000; +} + +.blinking-cursor { + color: #2E3D48; + -webkit-animation: 1s blink step-end infinite; + -moz-animation: 1s blink step-end infinite; + -o-animation: 1s blink step-end infinite; + animation: 1s blink step-end infinite; +} + +@keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + } +} + +@-moz-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + } +} + +@-webkit-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + } +} + +@-o-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + } +} diff --git a/public/style.css b/public/style.css new file mode 100644 index 0000000..d8031b2 --- /dev/null +++ b/public/style.css @@ -0,0 +1,109 @@ +#console { + position: relative; + margin: auto; + width: 80%; + display: flex; + border: 5px solid beige; + background-color: black; + height: 80vh; + overflow: auto; + padding: 1rem; + color: white; + font: .8rem Inconsolata, monospace; + text-shadow: currentcolor 0 0 5px; + scrollbar-width: thin; +} + +p#date { + position : absolute; + top : -5px; + right : 10px; + color: grey; + text-shadow: none; +} + +h1 { + text-align:center; +} + +footer { + text-align: center; + padding: 1px; + background-color: bisque; +} + +a { + color: black; +} + +body::after { + content: ""; + position: fixed; + top: 0; + left: 0; + width: 100vw; + height: 100vh; + background: repeating-linear-gradient( + 0deg, + rgba(0, 0, 0, 0.15), + rgba(0, 0, 0, 0.15) 1px, + transparent 1px, + transparent 2px + ); + pointer-events: none; +} + +::selection { + background: #ffffff; + color: #000000; + text-shadow: none; +} + +.blinking-cursor { + color: #2E3D48; + text-shadow: 0 0 1px #C8C8C8; + -webkit-animation: 1s blink step-end infinite; + -moz-animation: 1s blink step-end infinite; + -o-animation: 1s blink step-end infinite; + animation: 1s blink step-end infinite; +} + +@keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} + +@-moz-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} + +@-webkit-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} + +@-o-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} diff --git a/queue.js b/queue.js index 2bf66c4..b62943a 100644 --- a/queue.js +++ b/queue.js @@ -1,5 +1,4 @@ -var EventEmitter = require('events').EventEmitter -const assert = require('assert') +var EventEmitter = require('events').EventEmitter; /** * Queue module allows one to add tasks to a queue which are processed sequentially as FILO. @@ -20,119 +19,142 @@ const assert = require('assert') /** Class representing a Queue API. */ class Queue extends EventEmitter { - pile = []; - /** - * Create queue to add jobs to. - * @param {string} path - Path to saved queue object (TODO). - * @property {Array} pile - Array of queued job objects. - * @property (Function) _process - Handle to job process function. - * @event module:Queue~finish - * @event module:Queue~error - * @event module:Queue~complete - * @listens module:Queue~event:finish - * @see {@link Job} - */ + pile = []; - constructor(timeout, path) { - super(); - // Initialize properties - this.path = typeof path == 'undefined' ? './queue.json' : path; //TODO Implement - this.on('finish', function () { // Each time a job finishes... - this.pile.shift(); // take off pile - this.next()}); // start next job - } + /** + * Create queue to add jobs to. + * @param {string} path - Path to saved queue object (TODO). + * @property {Array} pile - Array of queued job objects. + * @property (Function) _process - Handle to job process function. + * @event module:Queue~finish + * @event module:Queue~error + * @event module:Queue~complete + * @listens module:Queue~event:finish + * @see {@link Job} + */ - /** - * Create new job and add to queue. - * @param {Object} data - Data object to be stored in {@link Job}. - */ - add(data) { - // generate 16 digit job id - let d = Date.now().toString() - let r = Math.floor(Math.random() * 1000).toString() - let id = Number((r + d).padEnd(16, '0')) - this.pile.push(new Job(id, data)); // add to bottom of pile - console.log('Job added (' + this.pile.length + ' on pile)') - this.next(); // Start next job if idle - } + constructor(timeout, path) { + super(); + // Initialize properties + this.path = typeof path == 'undefined' ? './queue.json' : path; //TODO Implement + this.on('finish', function () { // Each time a job finishes... + this.pile.shift(); // take off pile + this.next(); + }); // start next job + } - /** - * Process next job if any are on pile. - */ - next() { - if (this.pile.length > 0 && this.pile[0].running === false) { - console.log('Starting next job') - this._process(this.pile[0]) + /** + * Create new job and add to queue. + * @param {Object} data - Data object to be stored in {@link Job}. + * @return {Job} - The newly created job. + */ + add(data) { + // generate 16 digit job id + let d = Date.now().toString(); + let r = Math.floor(Math.random() * 1000).toString(); + let id = Number((r + d).padEnd(16, '0')); + const job = new Job(id, data); // create job + this.pile.push(job); // add to bottom of pile + console.log('Job added (' + this.pile.length + ' on pile)'); + this.next(); // Start next job if idle + return job; } - } - /** - * Create callback to be triggered when process function completes. - * @param {Object} job - {@link Job} object. - * @returns {function} 'done' callback to be called by process function - */ - createDoneCallback(job) { - const obj = this; - return function( err ) { - job.isRunning = false; // set false (will emit 'end') - if( err ) { obj.emit('error', err, job); } - else { obj.emit('complete', job) } - obj.emit('finish', err, job); - } + /** + * Process next job if any are on pile. + */ + next() { + if (this.pile.length > 0 && this.pile[0].running === false) { + console.log('Starting next job'); + this._process(this.pile[0]); + } + } - } + /** + * Create callback to be triggered when process function completes. + * @param {Object} job - {@link Job} object. + * @returns {function} 'done' callback to be called by process function + */ + createDoneCallback(job) { + const obj = this; + return function (err) { + job.isRunning = false; // set false (will emit 'end') + if (err) obj.emit('error', err, job); + else obj.emit('complete', job); + obj.emit('finish', err, job); + }; - /** - * Create callback to be triggered when process function completes. - * @param {Function} func - Function to call with job and done callback when. - * @todo make done callback part of job obj? - */ - process(func) { - this._process = async (job) => { - job.done = this.createDoneCallback(job); - job.isRunning = true; - setImmediate(func, job, job.done); - console.log('Job running') - }; - } + } + + /** + * Create callback to be triggered when process function completes. + * @param {Function} func - Function to call with job and done callback when. + * @todo make done callback part of job obj? + */ + process(func) { + this._process = async (job) => { + job.done = this.createDoneCallback(job); + job.isRunning = true; + setImmediate(func, job, job.done); + console.log('Job running'); + }; + } } /** Class representing a job in the Queue. */ class Job extends EventEmitter { - id; - data; - running; - /** - * Create a job object with associated data. - * @param {number} id - Job ID (unique in current Queue pile). - * @param {Object} data - Data to hold in object, may be used by Queue process function. - * @property {boolean} running - Indicates whether job is currently being processed. - * @event module:Job~end - */ - constructor(id, data) { - super(); - //console.log('Job ' + id + ' constructor called') - // Initialize properties - this.id = id; - this.data = data; - this.running = false; - } + id; + data; + running; + created; + _child; + + /** + * Create a job object with associated data. + * @param {number} id - Job ID (unique in current Queue pile). + * @param {Object} data - Data to hold in object, may be used by Queue process function. + * @property {boolean} running - Indicates whether job is currently being processed. + * @event module:Job~end + */ + constructor(id, data) { + super(); + //console.log('Job ' + id + ' constructor called') + // Initialize properties + this.id = id; + this.data = data; + this.running = false; + this.created = new Date(); + } + + /** + * Set running attribute. If setting to false from true, emit 'end' event. + * @param {boolean} bool - Value to set running. + * @todo rename to be consistent with property + */ + set isRunning(bool) { + if (bool === false && this.running === true) { + this.running = false; + this.emit('end'); + } else { + if (bool === true) { + this.running = true; + } + } + } - /** - * Set running attribute. If setting to false from true, emit 'end' event. - * @param {boolean} bool - Value to set running. - * @todo rename to be consistent with property - */ - set isRunning(bool) { - if (bool === false && this.running === true) { - this.running = false; - this.emit('end'); - } else { - if (bool === true) { - this.running = true; - } + /** + * Set child attribute. Checks that the job is currently running and that any previous child + * process is not currently running. + * @param {ChildProcess} process - Value to set running. + */ + set child(process) { + if (!this.running) { + throw new Error('Cannot add child process while Job not running'); + } else if (this._child && this._child.exitCode === null) { + throw new Error('Job can only be associated with one running process'); + } + this._child = process; } - } } diff --git a/serve.js b/serve.js index c6bc1fe..497ee4d 100644 --- a/serve.js +++ b/serve.js @@ -4,12 +4,10 @@ */ const fs = require('fs'); const path = require('path'); -const cp = require('child_process'); const express = require('express'); const srv = express(); -const shell = require('shelljs'); -const app = require("@octokit/auth-app"); +const app = require('@octokit/auth-app'); const { request } = require('@octokit/request'); const config = require('./config/config').settings; @@ -26,20 +24,24 @@ const secret = process.env['GITHUB_WEBHOOK_SECRET']; // Currently this app is only set up to process push and pull request events so we will have the // handler reject any others. We will also check that only these are set up in the config. const supportedEvents = ['push', 'pull_request']; // events the ci can handle -const maxN = 140; // The maximum n chars of the status description const ENDPOINT = 'logs'; // The URL endpoint for fetching status check details +// An optional static directory for serving css files +const STATIC = 'public'; // Check all config events are supported const events = Object.keys(config.events); -if (events.some(evt => { return !supportedEvents.includes(evt); })) { - let errStr = 'One or more events in config not supported. ' + - `The following events are supported: ${supportedEvents.join(', ')}`; - throw new ReferenceError(errStr) +if (events.some(evt => { + return !supportedEvents.includes(evt); +})) { + let errStr = 'One or more events in config not supported. ' + + `The following events are supported: ${supportedEvents.join(', ')}`; + throw new ReferenceError(errStr); } // Create handler to verify posts signed with webhook secret. Content type must be application/json const createHandler = require('github-webhook-handler'); -const handler = createHandler({ path: '/github', secret: secret, events: supportedEvents}); +const handler = createHandler({path: '/github', secret: secret, events: supportedEvents}); + /** * Fetch and assign the installation access token. Should be called each time a POST is made to @@ -49,25 +51,25 @@ const handler = createHandler({ path: '/github', secret: secret, events: support async function setAccessToken() { let debug = log.extend('auth'); // Return if token still valid - if (new Date(token.expiresAt) > new Date()) { return; } + if (new Date(token.expiresAt) > new Date()) return; // Create app instance for authenticating our GitHub app const auth = app.createAppAuth({ - appId: process.env['GITHUB_APP_IDENTIFIER'], - privateKey: fs.readFileSync(process.env['GITHUB_PRIVATE_KEY']), - webhooks: { secret } + appId: process.env['GITHUB_APP_IDENTIFIER'], + privateKey: fs.readFileSync(process.env['GITHUB_PRIVATE_KEY']), + webhooks: {secret} }); if (token.tokenType !== 'installation') { debug('Fetching install ID'); // Retrieve JSON Web Token (JWT) to authenticate as app - token = await auth({type: "app"}); + token = await auth({type: 'app'}); // Get installation ID - const {data: {id}} = await request("GET /repos/:owner/:repo/installation", { + const {data: {id}} = await request('GET /repos/:owner/:repo/installation', { owner: process.env['REPO_OWNER'], repo: process.env['REPO_NAME'], headers: { authorization: `bearer ${token.token}`, - accept: "application/vnd.github.machine-man-preview+json" + accept: 'application/vnd.github.machine-man-preview+json' } }); token.installationId = id; @@ -90,7 +92,7 @@ async function setAccessToken() { * Failed spoof attempts may end up here but most likely it will be unsupported webhook events. */ handler.on('error', function (err) { - console.log('Error:', err.message); + console.log('Error:', err.message); }); /** @@ -102,16 +104,16 @@ handler.on('error', function (err) { * @todo split auth and handler middleware */ srv.post('/github', async (req, res, next) => { - console.log('Post received') - let id = req.header('x-github-hook-installation-target-id'); - if (id != process.env.GITHUB_APP_IDENTIFIER) { next(); return; } // Not for us; move on - if (req.header('X-GitHub-Event') in supportedEvents) { - await setAccessToken(); - handler(req, res, () => res.end('ok')); - } else { - log('GitHub Event "%s" not supported', req.header('X-GitHub-Event')); - res.sendStatus(400); - } + console.log('Post received'); + let id = req.header('x-github-hook-installation-target-id'); + if (id != process.env.GITHUB_APP_IDENTIFIER) { next(); return; } // Not for us; move on + if (supportedEvents.includes(req.header('X-GitHub-Event'))) { + await setAccessToken(); + handler(req, res, () => res.end('ok')); + } else { + log('GitHub Event "%s" not supported', req.header('X-GitHub-Event')); + res.sendStatus(400); + } }); @@ -124,266 +126,230 @@ srv.post('/github', async (req, res, next) => { * @param {string} [module] - (Sub)module name. REPO_NAME by default. * @return {Promise} - Resolved to full commit SHA. */ -function fetchCommit(id, isBranch=null, module) { - isBranch = isBranch === null ? !lib.isSHA(id) : isBranch - const data = { - owner: process.env['REPO_OWNER'], - repo: module || process.env.REPO_NAME, - id: id - }; - let endpoint = `GET /repos/:owner/:repo/${isBranch ? 'branches': 'commits'}/:id`; - return request(endpoint, data).then(response => { - return isBranch ? response.data.commit.sha : response.data.sha; - }); +function fetchCommit(id, isBranch = null, module) { + isBranch = isBranch === null ? !lib.isSHA(id) : isBranch; + const data = { + owner: process.env['REPO_OWNER'], + repo: module || process.env.REPO_NAME, + id: id + }; + let endpoint = `GET /repos/:owner/:repo/${isBranch ? 'branches' : 'commits'}/:id`; + return request(endpoint, data).then(response => { + return isBranch ? response.data.commit.sha : response.data.sha; + }); } /** * Parse the short SHA or branch name and redirect to static reports directory. */ srv.get(`/coverage/:id`, (req, res) => { - let id = lib.shortID(req.params.id); - let isSHA = (req.query.branch || !lib.isSHA(req.params.id)) === false; - console.log('Request for test coverage for ' + (isSHA? `commit ${id}` : `branch ${req.params.id}`)); - fetchCommit(req.params.id, !isSHA, req.query.module) - .then(id => { - log('Commit ID found: %s', id); - res.redirect(301, `/${ENDPOINT}/coverage/${id}`); - }) - .catch(err => { - log('%s', err.message); - res.statusCode = 404; - res.send(`Coverage for ${isSHA? 'commit' : 'branch'} ${req.params.id} not found`); - }); -}) + let id = lib.shortID(req.params.id); + let isSHA = (req.query.branch || !lib.isSHA(req.params.id)) === false; + console.log('Request for test coverage for ' + (isSHA ? `commit ${id}` : `branch ${req.params.id}`)); + fetchCommit(req.params.id, !isSHA, req.query.module) + .then(id => { + log('Commit ID found: %s', id); + res.redirect(301, `/${ENDPOINT}/coverage/${id}`); + }) + .catch(err => { + log('%s', err.message); + res.statusCode = 404; + res.send(`Coverage for ${isSHA ? 'commit' : 'branch'} ${req.params.id} not found`); + }); +}); /** * Serve the reports tree as a static resource; allows users to inspect the HTML coverage reports. * We will add a link to the reports in the check details. */ -srv.use(`/${ENDPOINT}/coverage`, express.static(path.join(config.dataPath, 'reports'))) +srv.use(`/${ENDPOINT}/coverage`, express.static(path.join(config.dataPath, 'reports'))); + +/** + * Serve the css and javascript for the log Webpage. + */ +srv.use(`/static`, express.static(STATIC)); /** * Serve the test records for requested commit id. Returns JSON data for the commit. + * If no record exists and a job is queued the job data is sent, otherwise a 404. */ srv.get(`/${ENDPOINT}/records/:id`, function (req, res) { - let id = lib.shortID(req.params.id); - let isSHA = (req.query.branch || !lib.isSHA(req.params.id)) === false; - console.log('Request for test records for ' + (isSHA? `commit ${id}` : `branch ${req.params.id}`)); - fetchCommit(req.params.id, !isSHA, req.query.module) - .then(id => { - log('Commit ID found: %s', id); - let record = lib.loadTestRecords(id); - if (record) { - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(record)); - } else { + let id = lib.shortID(req.params.id); + let isSHA = (req.query.branch || !lib.isSHA(req.params.id)) === false; + console.log('Request for test records for ' + (isSHA ? `commit ${id}` : `branch ${req.params.id}`)); + fetchCommit(req.params.id, !isSHA, req.query.module) + .then(id => { + log('Commit ID found: %s', id); + let record = lib.loadTestRecords(id); + if (record.length !== 0) { + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(record)); + } else { + // Check if on pile + for (let job of queue.pile) { + if (job.data.sha === id) { + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(job.data)); + return; + } + } + // Not on pile, return 404 + res.statusCode = 404; + res.send(`Record for ${isSHA ? 'commit' : 'branch'} ${id} not found.`); + } + }) + .catch(err => { + if (err.status === 404) { + res.statusCode = 404; + res.send(`${isSHA ? 'Commit' : 'Branch'} ${req.params.id} not found.`); + } else { + log('%s', err.message || err.name); + res.statusCode = 500; + res.send('Failed to read test records JSON'); + } + }); +}); + +/** + * Serve the test results for requested commit id. This endpoint parses and validates the id. + * If it corresponds to a valid commit SHA, the user is redirected to the log endpoint. + */ +srv.get(`/${ENDPOINT}/:id`, function (req, res) { + let id = lib.shortID(req.params.id); + let log_only = (req.query.type || '').startsWith('log'); + let isSHA = (req.query.branch || !lib.isSHA(req.params.id)) === false; + console.log( + `Request for test ${log_only ? 'log' : 'stdout'} for ` + + (isSHA ? `commit ${id}` : `branch ${req.params.id}`) + ); + fetchCommit(req.params.id, !isSHA, req.query.module) + .then(id => res.redirect(301, '/log/' + id)) + .catch(err => { + log('%s', err.message); res.statusCode = 404; - res.send(`${isSHA? 'Commit' : 'Branch'} ${id} not recognized.`); - } - }) - .catch(err => { - log('%s', err.message); - res.statusCode = 404; - res.send(`Record for ${isSHA? 'commit' : 'branch'} ${req.params.id} not found`); - }); + res.send(`Record for ${isSHA ? 'commit' : 'branch'} ${req.params.id} not found`); + }); }); + /** * Serve the test results for requested commit id. This will be the result of a user clicking on * the 'details' link next to the continuous integration check. The result should be an HTML * formatted copy of the stdout for the job's process. */ -srv.get(`/${ENDPOINT}/:id`, function (req, res) { - let id = lib.shortID(req.params.id); - let log_only = (req.query.type || '').startsWith('log') - let isSHA = (req.query.branch || !lib.isSHA(req.params.id)) === false; - console.log( - `Request for test ${log_only ? 'log' : 'stdout'} for ` + - (isSHA? `commit ${id}` : `branch ${req.params.id}`) - ); - fetchCommit(req.params.id, !isSHA, req.query.module) - .then(id => { - let filename = log_only? `test_output.log` : `std_output-${lib.shortID(req.params.id)}.log`; - let logFile = path.join(config.dataPath, 'reports', id, filename); - fs.readFile(logFile, 'utf8', (err, data) => { - if (err) { - log('%s', err.message); - res.statusCode = 404; - res.send(`Record for ${isSHA? 'commit' : 'branch'} ${id} not found`); - } else { - res.statusCode = 200; - // Wrap in HTML tags so that the formatting is a little nicer. - let preText = '
';
-               let postText = '
'; - res.send(preText + data + postText); - } - }); - }) - .catch(err => { - log('%s', err.message); - res.statusCode = 404; - res.send(`Record for ${isSHA? 'commit' : 'branch'} ${req.params.id} not found`); - }); +srv.get(`/log/:id`, function (req, res) { + try { // Send static HTML page template + res.sendFile(path.join(__dirname, STATIC, 'log.html')); + } catch (err) { + log('%s', err.message); + res.statusCode = 404; + res.send(`Record for commit ${req.params.id} not found`); + } }); -///////////////////// SHIELDS API EVENTS ///////////////////// - /** - * Serve the coverage results and build status for the shields.io coverage badge API. Attempts to - * load the test results from file and if none exist, adds a new job to the queue. + * Serve the log file for requested commit id. This endpoint is fetched by the format.js script + * client side. Returns the raw text log along with a header to indicate whether the job is + * active. If the log hasn't changed since the last request, a 304 is returned instead. */ -srv.get('/:badge/:repo/:branch', async (req, res) => { - const data = { - owner: process.env['REPO_OWNER'], - repo: req.params.repo, - branch: req.params.branch, - } - // Find head commit of branch - return request('GET /repos/:owner/:repo/git/refs/heads/:branch', data) - .then(response => { - data['context'] = req.params.badge; - data['sha'] = response.data.object.sha; - data['force'] = req.query.force === '' || lib.strToBool(req.query.force); - console.log(`Request for ${data.branch} ${data.context}`) - const report = lib.getBadgeData(data); // TODO If pending return 201, else 200 - // Send report - res.setHeader('Content-Type', 'application/json'); - res.end(JSON.stringify(report));}) - .catch(err => { // Specified repo or branch not found - console.error(`${data.owner}/${data.repo}/${data.branch} not found`) - res.sendStatus((err.status === 404) ? 404 : 500) - }); +srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { + let id = lib.shortID(req.params.id); + let log_only = (req.query.type || '').startsWith('log'); + // let default_context = ''; + // for (let x of config.events) { + // if (x.checks) { + // default_context = '_' + (Array.isArray(x.checks)? x.checks.pop(): x.checks); + // break; + // } + // } + let filename = log_only ? `test_output.log` : `std_output-${id}.log`; + let jobStatus = 'finished'; + for (let job of queue.pile) { + if (job.data.sha === req.params.id) { + jobStatus = job.running === true ? 'running' : 'queued'; + break; + } + } + + if (jobStatus === 'queued') { + res.statusCode = 200; + res.header('X-CI-JobStatus', jobStatus); + res.send('Job waiting to start...'); + return; + } + + const options = { + root: path.join(config.dataPath, 'reports', req.params.id), + headers: { + 'X-CI-JobStatus': jobStatus + } + }; + + res.sendFile(filename, options, function (err) { + if (err) { + console.error('Failed to send log: ', err); + res.statusCode = 404; + res.send(`${req.params.id} not found`); + } else { + log('Sent:', filename); + } + }); + }); -///////////////////// QUEUE EVENTS ///////////////////// +/** + * Serve a list of currently cued jobs. + */ +srv.get('/jobs', function (req, res) { + const data = {total: queue.pile.length, pile: queue.pile}; + const replacer = (key, value) => { + return (key[0] === '_') ? undefined : value; + }; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(data, replacer)); +}); -function runTests(job) { - const debug = log.extend('runTests'); - debug('starting job timer'); - const timer = lib.startJobTimer(job, config.kill_children === true); - - // Go ahead with tests - const sha = job.data['sha']; - const repoPath = lib.getRepoPath(job.data.repo); - const logName = path.join(config.dataPath, 'reports', sha, `std_output-${lib.shortID(sha)}.log`); - let fcn = lib.fullpath(config.test_function); - debug('starting test child process %s', fcn); - let ops = config.shell? {'shell': config.shell} : {}; - const runTests = cp.execFile(fcn, [sha, repoPath, config.dataPath], ops, (error, stdout, stderr) => { - debug('clearing job timer'); - clearTimeout(timer); - delete job.data.process; - if (error) { // Send error status - let message; - if (error.killed || error.signal === 'SIGTERM') { - message = `Tests stalled after ~${(config.timeout / 60000).toFixed(0)} min`; - } else { - debug('error from test function: %o', error) - // Isolate error from log - // For MATLAB return the line that begins with 'Error' - let fn = (str) => { return str.startsWith('Error in \'') }; - message = stderr.split(/\r?\n/).filter(fn).join(';'); - // For Python, cat from the lost line that doesn't begin with whitespace - if (!message) { - let errArr = stderr.split(/\r?\n/); - let idx = errArr.reverse().findIndex(v => {return v.match('^\\S')}); - message = stderr.split(/\r?\n/).slice(-idx-1).join(';'); - } - if (!message) { message = error.code; } - } - // Save error into records for future reference. NB: This is currently not done for prepEnv errors - let report = { - 'commit': sha, - 'results': message, - 'status': 'error', - 'description': 'Error running ' + (config.test_function || 'test function') - }; - lib.saveTestRecords(report).then(() => { debug('updated test records'); }); - job.done(new Error(message)); // Propagate - } else { - if (!lib.updateJobFromRecord(job)) { - job.done(new Error('Failed to return test result')); - } else { - job.done(); - } - } - }); - job.data.process = runTests; - - // Write output to file - runTests.stdout.pipe(process.stdout); // Pipe to display - let logDump = fs.createWriteStream(logName, { flags: 'a' }); - runTests.stdout.pipe(logDump); - runTests.on('exit', () => { logDump.close(); }); - return runTests; -} -function prepareEnv(job, callback) { - log('Preparing environment for job #%g', job.id) - const repoPath = lib.getRepoPath(job.data.repo); - switch (config.setup_function) { - case undefined: - // run some basic git commands - checkout(repoPath, job.data.sha); - return callback(job); - case null: // No prep required - return callback(job); - default: - const sha = job.data['sha']; - const logDir = path.join(config.dataPath, 'reports', sha); - const logName = path.join(logDir, `std_output-${lib.shortID(sha)}.log`); - log('Calling %s with args %o', config.setup_function, [sha, repoPath, logName]); - let fcn = lib.fullpath(config.setup_function); - let ops = config.shell? {'shell': config.shell} : {}; - const prepEnv = cp.execFile(fcn, [sha, repoPath, logDir], ops, (err, stdout, stderr) => { - if (err) { - let errmsg = (err.code === 'ENOENT')? `File "${fcn}" not found` : err.code; - console.error('Checkout failed: ' + (stderr || errmsg)); - job.done(new Error(`Failed to prepare env: ${stderr || errmsg}`)); // Propagate error - return; - } - callback(job); - }); - prepEnv.stdout.pipe(process.stdout); - fs.mkdir(path.join(logDir), { recursive: true }, (err) => { - if (err) throw err; - let logDump = fs.createWriteStream(logName, { flags: 'w' }); - prepEnv.stdout.pipe(logDump); - prepEnv.on('exit', () => { logDump.close(); }); - }); - return prepEnv; - } -} +///////////////////// SHIELDS API EVENTS ///////////////////// /** - * Checkout Git repository. - * @param {String} repoPath - The path of the repository - * @param {String} ref - A commit SHA or branch name - * @todo Add error handling + * Serve the coverage results and build status for the shields.io coverage badge API. Attempts to + * load the test results from file and if none exist, adds a new job to the queue. */ -function checkout(repoPath, ref) { - if (!shell.which('git')) { throw new Error('Git not found on path'); } - let verify = (cmd) => { if (!cmd) { - shell.popd(); - throw new Error('Failed to checkout: ' + cmd.stderr); - } }; - if (!shell.pushd(repoPath)) { - shell.mkdir(path.resolve(repoPath + path.sep + '..')); - shell.pushd(repoPath); - verify(shell.exec(`git clone https://github.com/${env.process['REPO_OWNER']}/${env.process['REPO_NAME']}.git`)); - verify(shell.exec(`git checkout ${ref}`)); - } else { - verify(shell.exec('git fetch -a')); - verify(shell.exec('git reset --hard HEAD')); - verify(shell.exec(`git checkout ${ref}`)); - verify(shell.exec('git submodule update --init --recursive')); - verify(shell.exec('git submodule foreach git reset --hard HEAD')); - verify(shell.exec('git status')); - } - shell.popd(); -} +srv.get('/:badge/:repo/:id', async (req, res) => { + const context = req.params.badge === 'status' ? 'build' : req.params.badge; + const data = { + owner: process.env['REPO_OWNER'], + repo: req.params.repo, + routine: lib.context2routine(context) + }; + + // Check we have a matching routine + if (!data.routine) { + console.error(`No routine for "${context}" context`); + return res.sendStatus(404); + } + let isSHA = lib.isSHA(req.params.id); + // Find head commit of branch + return fetchCommit(req.params.id, !isSHA, req.params.repo) + .then(id => { + data['context'] = context; + data['sha'] = id; + data['force'] = req.query.force === '' || lib.strToBool(req.query.force); + console.log(`Request for ${req.params.id} ${data.context}`); + const report = lib.getBadgeData(data); + // Send report + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(report)); + }) + .catch(err => { // Specified repo or branch not found + console.error(`${data.owner}/${data.repo}/${req.params.id} not found`); + res.sendStatus((err.status === 404) ? 404 : 500); + }); +}); ///////////////////// OTHER ///////////////////// @@ -395,28 +361,31 @@ function checkout(repoPath, ref) { * @returns {Function} - A Github request Promise. */ async function updateStatus(data, targetURL = '') { - const debug = log.extend('updateStatus'); - // Validate inputs - if (!lib.isSHA(data.sha)) { throw new ReferenceError('undefined or invalid sha'); } // require sha - let supportedStates = ['pending', 'error', 'success', 'failure']; - if (supportedStates.indexOf(data.status) === -1) { - throw new lib.APIError(`status must be one of "${supportedStates.join('", "')}"`) - } - debug('Updating status to "%s" for %s @ %g', - data['status'], (data['context'] || '').split('/').pop(), data['sha']); - await setAccessToken(); - return request("POST /repos/:owner/:repo/statuses/:sha", { - owner: data['owner'] || process.env['REPO_OWNER'], - repo: data['repo'] || process.env['REPO_NAME'], - headers: { - authorization: `token ${token['token']}`, - accept: "application/vnd.github.machine-man-preview+json" - }, - sha: data['sha'], - state: data['status'], - target_url: targetURL, - description: (data['description'] || '').substring(0, maxN), - context: data['context'] + const debug = log.extend('updateStatus'); + // Validate inputs + if (!lib.isSHA(data.sha)) throw new ReferenceError('undefined or invalid sha'); // require sha + let supportedStates = ['pending', 'error', 'success', 'failure']; + if (supportedStates.indexOf(data.status) === -1) { + throw new lib.APIError(`status must be one of "${supportedStates.join('", "')}"`); + } + debug('Updating status to "%s" for %s @ %g', + data['status'], (data['context'] || '').split('/').pop(), data['sha']); + await setAccessToken(); + if (targetURL && data['repo'] !== process.env['REPO_NAME']) { + targetURL = lib.addParam(targetURL, 'module=' + data['repo']); + } + return request('POST /repos/:owner/:repo/statuses/:sha', { + owner: data['owner'] || process.env['REPO_OWNER'], + repo: data['repo'] || process.env['REPO_NAME'], + headers: { + authorization: `token ${token['token']}`, + accept: 'application/vnd.github.machine-man-preview+json' + }, + sha: data['sha'], + state: data['status'], + target_url: targetURL, + description: (data['description'] || '').substring(0, config.max_description_len), + context: data['context'] }); } @@ -428,128 +397,176 @@ async function updateStatus(data, targetURL = '') { * Payload reference https://developer.github.com/webhooks/event-payloads/ * @param {Object} event - The GitHub event object. * @todo Save full coverage object for future inspection - * @todo Add support for ignore list for specific actions * @todo Add support for regex in branch ignore list */ -async function eventCallback (event) { - const debug = log.extend('event'); - debug('eventCallback called'); - var ref; // ref (i.e. branch name) and head commit - const eventType = event.event; // 'push' or 'pull_request' - var job_template = { // the data structure containing information about our check - sha: null, // The head commit sha to test on - base: null, // The previous commit sha (for comparing changes in code coverage) - force: false, // Whether to run tests when results already cached - owner: process.env['REPO_OWNER'], // event.payload.repository.owner.login - repo: event.payload.repository.name, // The repository name - status: 'pending', // The check state to update our context with - description: null, // A brief description of what transpired - context: null // The precise check name, keeps track of what check we're doing - } - - // Double-check the event was intended for our app. This is also done using the headers before - // this stage. None app specific webhooks could be set up and would make it this far. Could add - // some logic here to deal with generic webhook requests (i.e. skip check status update). - if (event.payload['installation']['id'] !== token['installationId']) { - throw new lib.APIError('Generic webhook events not supported (installation ID invalid)'); - } - - // Harvest data payload depending on event type - switch(eventType) { - case 'pull_request': - let pr = event.payload.pull_request; - ref = pr.head.ref; - job_template['sha'] = pr.head.sha; - job_template['base'] = pr.base.sha; - // Check for repo fork; throw error if forked // TODO Add full stack test for this behaviour - let isFork = (pr.base.repo.owner.login !== pr.head.repo.owner.login) - || (pr.base.repo.owner.login !== process.env['REPO_OWNER']) - || (pr.head.repo.name !== pr.base.repo.name); - if (isFork) { throw ReferenceError('Forked PRs not supported; check config file') } - break; - case 'push': - ref = event.payload.ref; - job_template['sha'] = event.payload.head_commit.id || event.payload.after; // Run tests for head commit only - job_template['base'] = event.payload.before; - break; - default: // Shouldn't get this far - throw new TypeError(`event "${event.event}" not supported`) - } - - // Log the event - console.log('Received a %s event for %s to %s', - eventType.replace('_', ' '), job_template['repo'], ref) - - // Determine what to do from settings - if (!(eventType in config.events)) { - // No events set; return - debug('Event "%s" not set in config', eventType); - return; - } - const todo = config.events[eventType] || {} // List of events to process - - // Check if ref in ignore list or not in include list - let incl = !todo.ref_ignore; // ignore list takes precedence - let ref_list = lib.ensureArray(todo.ref_ignore || todo.ref_include || []); - if ((ref_list.indexOf(ref.split('/').pop()) === -1) === incl) { - // Do nothing if in ignore list, or not in include list - debug(`Ref ${ref} ${incl? 'not' : 'is'} in config ref_${incl? 'include' : 'ignore'} list`); - return; - } - - // Check if action in actions list, if applicable - let actions = lib.ensureArray(todo.actions || []); - if (event.payload.action && actions && actions.indexOf(event.payload.action) === -1) { - debug('Action "%s" not set in config', event.payload.action); - return; - } - - // Validate checks to run - const checks = lib.ensureArray(todo.checks || []); - if (!todo.checks) { - // No checks to perform - debug('No checks set in config'); - return; - } - - // For each check we update it's status and add a job to the queue - let isString = x => { return (typeof x === 'string' || x instanceof String); } - for (let check of checks) { - // Invent a description for the initial status update - if (!isString(check)) { throw new TypeError('Check must be a string') } - // Copy job data and update check specific fields - let data = Object.assign({}, job_template); - data.context = `${check}/${process.env['USERDOMAIN'] || process.env['NAME']}` - switch (check) { - case 'coverage': - data.description = 'Checking coverage'; - break; - case 'continuous-integration': - data.description = 'Tests running'; - break; - default: // generic description - data.description = 'Check in progress'; - } - - // If we have two checks to perform and one already on the pile, set force to false - let qLen = queue.pile.length; - data.force = !(checks.length > 1 && qLen > 0 && queue.pile[qLen-1].data.sha === data.sha); - - /** - * Update the status and start job. - * Posts a 'pending' status while we do our tests - * We wait for the job to be added before we continue so the force flag can be set. - * NB: If the tests and env prep are too quick our outcome may be updated before the pending - * status. - */ - updateStatus(data) - .then(() => console.log(`Updated status to "pending" for ${data.context}`)) - .catch(err => { - console.log(`Failed to update status to "pending" for ${data.context}`); - console.log(err); - }); - queue.add(data); - } +async function eventCallback(event) { + const debug = log.extend('event'); + debug('eventCallback called'); + var ref; // ref (i.e. branch name) and head commit + const eventType = event.event; // 'push' or 'pull_request' + const job_template = { // the data structure containing information about our check + sha: null, // The head commit sha to test on + base: null, // The previous commit sha (for comparing changes in code coverage) + force: false, // Whether to run tests when results already cached + owner: process.env['REPO_OWNER'], // event.payload.repository.owner.login + repo: event.payload.repository.name, // The repository name + status: 'pending', // The check state to update our context with + description: null, // A brief description of what transpired + context: null, // The precise check name, keeps track of what check we're doing + routine: null // A list of scripts call call + }; + + // Double-check the event was intended for our app. This is also done using the headers before + // this stage. None app specific webhooks could be set up and would make it this far. Could add + // some logic here to deal with generic webhook requests (i.e. skip check status update). + if (event.payload['installation']['id'] !== token['installationId']) { + throw new lib.APIError('Generic webhook events not supported (installation ID invalid)'); + } + + let filesGET = { // Data for querying changes files + owner: process.env['REPO_OWNER'], // event.payload.repository.owner.login + repo: event.payload.repository.name, // The repository name + headers: { + accept: 'application/vnd.github.machine-man-preview+json' + } + }; + + // Harvest data payload depending on event type + switch (eventType) { + case 'pull_request': + let pr = event.payload.pull_request; + ref = pr.head.ref; + job_template['sha'] = pr.head.sha; + job_template['base'] = pr.base.sha; + // Check for repo fork; throw error if forked // TODO Add full stack test for this behaviour + let isFork = (pr.base.repo.owner.login !== pr.head.repo.owner.login) + || (pr.base.repo.owner.login !== process.env['REPO_OWNER']) + || (pr.head.repo.name !== pr.base.repo.name); + if (isFork) throw ReferenceError('Forked PRs not supported; check config file'); + if (event.payload.action === 'synchronize') { + filesGET['base'] = event.payload.before; + filesGET['head'] = event.payload.after; + } else { + filesGET['pull_number'] = pr.number; + } + break; + case 'push': + ref = event.payload.ref; + job_template['sha'] = event.payload.head_commit.id || event.payload.after; // Run tests for head commit only + job_template['base'] = event.payload.before; + filesGET['base'] = event.payload.before; + filesGET['head'] = event.payload.head_commit.id || event.payload.after; + break; + default: // Shouldn't get this far + throw new TypeError(`event "${event.event}" not supported`); + } + + // Log the event + console.log('Received a %s event for %s to %s', + eventType.replace('_', ' '), job_template['repo'], ref); + + // Determine what to do from settings + if (!(eventType in config.events)) { + // No events set; return + debug('Event "%s" not set in config', eventType); + return; + } + const todo = config.events[eventType] || {}; // List of events to process + + // Check if pull request is a draft and skip if ignore_drafts (default false) + if (eventType === 'pull_request' && + todo.ignore_drafts === true && + event.payload.pull_request.draft === true) { + debug('Ignoring draft pull_requests'); + return; + } + + // Check if ref in ignore list or not in include list + let incl = !todo.ref_ignore; // ignore list takes precedence + let ref_list = lib.ensureArray(todo.ref_ignore || todo.ref_include || []); + if ((ref_list.indexOf(ref.split('/').pop()) === -1) === incl) { + // Do nothing if in ignore list, or not in include list + debug(`Ref ${ref} ${incl ? 'not' : 'is'} in config ref_${incl ? 'include' : 'ignore'} list`); + return; + } + + // Check if action in actions list, if applicable + let actions = lib.ensureArray(todo.actions || []); + if (event.payload.action && actions && actions.indexOf(event.payload.action) === -1) { + debug('Action "%s" not set in config', event.payload.action); + return; + } + + // Validate checks to run + const checks = lib.ensureArray(todo.checks || []); + if (!todo.checks) { + // No checks to perform + debug('No checks set in config'); + return; + } + + // If some files changes ignored, check if we can skip + if (todo.files_ignore) { + debug('Checking for changed files'); + let pattern = lib.ensureArray(todo.files_ignore).join('|'); + try { + let fileURI = (eventType === 'push' || event.payload.action === 'synchronize') ? + 'GET /repos/:owner/:repo/compare/:base...:head' : + 'GET /repos/:owner/:repo/pulls/:pull_number/files'; + let {data} = await request(fileURI, filesGET); + let files = data.files || data; + if (files.every(x => x.filename.match(pattern))) { + return; + } + } catch (err) { + console.log('Failed to query changed files'); + console.error(err); + } + } + + // For each check we update it's status and add a job to the queue + let isString = x => { + return (typeof x === 'string' || x instanceof String); + }; + for (let check of checks) { + // Invent a description for the initial status update + if (!isString(check)) throw new TypeError('Check must be a string'); + // Copy job data and update check specific fields + let data = Object.assign({}, job_template); + data.context = `${check}/${process.env['USERDOMAIN'] || process.env['NAME']}`; + data.routine = lib.context2routine(check); + let targetURL = `${process.env['WEBHOOK_PROXY_URL']}/log/${data.sha}?refresh=1`; + switch (check) { + case 'coverage': + data.description = 'Checking coverage'; + targetURL = ''; // Must wait until end for coverage + break; + case 'continuous-integration': + data.description = 'Tests running'; + break; + default: // generic description + data.description = 'Check in progress'; + } + + // If we have two checks to perform and one already on the pile, set force to false + let qLen = queue.pile.length; + data.force = !(checks.length > 1 && qLen > 0 && queue.pile[qLen - 1].data.sha === data.sha); + + /** + * Update the status and start job. + * Posts a 'pending' status while we do our tests + * We wait for the job to be added before we continue so the force flag can be set. + * NB: If the tests and env prep are too quick our outcome may be updated before the pending + * status. + */ + updateStatus(data, targetURL) + .then(() => console.log(`Updated status to "pending" for ${data.context}`)) + .catch(err => { + console.log(`Failed to update status to "pending" for ${data.context}`); + console.error(err); + }); + queue.add(data); + } } @@ -560,32 +577,31 @@ async function eventCallback (event) { * @param {Object} job - Job object which has finished being processed. */ queue.on('finish', (err, job) => { // On job end post result to API - var target = ''; // We will only update the endpoint for coverage jobs - console.log(`Job #${lib.shortID(job.id)} finished` + (err ? ' with error' : '')); - if (job.data.skipPost === true) { return; } - - // Update target URL - if (!job.data.skipPost && job.data.context.startsWith('coverage')) { - // No URL for coverage if errored - target = err? '' : `${process.env['WEBHOOK_PROXY_URL']}/${ENDPOINT}/coverage/${job.data.sha}`; - } else { - target = `${process.env['WEBHOOK_PROXY_URL']}/${ENDPOINT}/${job.data.sha}`; - } - - // Update status if error occurred - if (err) { - job.data['status'] = 'error'; - job.data['description'] = err.message; - } - - updateStatus(job.data, target) - .then(() => console.log(`Updated status to "${job.data.status}" for ${job.data.context}`)) - .catch(err => { - console.log(`Failed to update status to "${job.data.status}" for ${job.data.context}`); - console.log(err); - }); + var target = ''; // We will only update the endpoint for coverage jobs + console.log(`Job #${lib.shortID(job.id)} finished` + (err ? ' with error' : '')); + if (job.data.skipPost === true) return; + let context = job.data.context || ''; + + // Update target URL + if (!job.data.skipPost && context.startsWith('coverage')) { + // No URL for coverage if errored + target = err ? '' : `${process.env['WEBHOOK_PROXY_URL']}/${ENDPOINT}/coverage/${job.data.sha}`; + } else { + target = `${process.env['WEBHOOK_PROXY_URL']}/${ENDPOINT}/${job.data.sha}`; + } + + // Update status if error occurred + if (err) { + job.data['status'] = 'error'; + job.data['description'] = err.message; + } + + updateStatus(job.data, target) + .then(() => console.log(`Updated status to "${job.data.status}" for ${job.data.context}`)) + .catch(err => { + console.log(`Failed to update status to "${job.data.status}" for ${job.data.context}`); + console.log(err); + }); }); -module.exports = { - updateStatus, srv, handler, setAccessToken, prepareEnv, runTests, eventCallback, fetchCommit -} +module.exports = {updateStatus, srv, handler, setAccessToken, eventCallback, fetchCommit}; diff --git a/test/coverage.test.js b/test/coverage.test.js index c1b212c..91fc7cc 100644 --- a/test/coverage.test.js +++ b/test/coverage.test.js @@ -1,23 +1,21 @@ const fs = require('fs'); -const assert = require('assert'); const path = require('path'); const sinon = require('sinon'); const expect = require('chai').expect; -const config = require('../config/config').settings; const Coverage = require('../coverage'); const dummy_id = '1c33a6e2ac7d7fc098105b21a702e104e09767cf'; -describe('Test coverage parser:', function() { +describe('Test coverage parser:', function () { var testable; var sandbox; - // Check NODE_ENV is correctly set, meaning our imported settings will be test ones + // Check NODE_ENV is correctly set, meaning our imported settings will be test ones beforeEach(function () { let md5 = '385a5d56850127317c317b0f66e91078'; let code = 'line1\nline2\n\rline3\n\rline4'; - testable = function(obj, done) { + testable = function (obj, done) { expect([496, 63]).to.include(obj.source_files.length); let file = obj.source_files[0]; expect(file).to.have.all.keys('name', 'source_digest', 'coverage'); @@ -30,32 +28,31 @@ describe('Test coverage parser:', function() { .withArgs(sinon.match((x) => x.replace('\\', '/').startsWith('C:/Hello-World'))) .returns(code); fs.readFileSync.callThrough(); - }) + }); it('Check loading MATLAB', function (done) { - let xmlPath = path.resolve('test', 'fixtures', 'CoverageResults.mat.xml') - Coverage(xmlPath, 'Hello-World', dummy_id, [], obj => testable(obj, done) ); + let xmlPath = path.resolve('test', 'fixtures', 'CoverageResults.mat.xml'); + Coverage(xmlPath, 'Hello-World', dummy_id, []) + .then(obj => testable(obj, done)); }); it('Check loading Python', function (done) { - let xmlPath = path.resolve('test', 'fixtures', 'CoverageResults.py.xml') - Coverage(xmlPath, 'Hello-World', dummy_id, [], obj => testable(obj, done) ); + let xmlPath = path.resolve('test', 'fixtures', 'CoverageResults.py.xml'); + Coverage(xmlPath, 'Hello-World', dummy_id, []) + .then(obj => testable(obj, done)); }); - afterEach(function () { sandbox.restore(); }); -}); - - -xdescribe('Test md5 file hash:', function() { - // Check NODE_ENV is correctly set, meaning our imported settings will be test ones - before(function () { - assert(process.env.NODE_ENV.startsWith('test'), 'Test run outside test env'); + it('Check missing file', function (done) { + let xmlPath = path.resolve('test', 'fixtures', 'nofile.xml'); + testable = (err, done) => { + expect(err.code).eq('ENOENT'); + done(); + }; + Coverage(xmlPath, 'Hello-World', dummy_id, []) + .catch(err => testable(err, done)); }); - it('MD5 should return correct hash', function (done) { - let test_path = './path/to/file.mat'; - let stub = sinon.stub(fs, 'readFileSync') - .withArgs(test_path) - .returns('line1\nline2\n\rline3\n\rline4'); + afterEach(function () { + sandbox.restore(); }); }); diff --git a/test/fixtures/.db.json b/test/fixtures/.db.json index ec1fe52..ad2228c 100644 --- a/test/fixtures/.db.json +++ b/test/fixtures/.db.json @@ -1 +1 @@ -[{"commit": "cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71", "results": [{"Duration": 0.07038330000000001, "Details": {}, "Name": "Parameters_test/test_set", "Passed": false, "Failed": true, "Incomplete": true}, {"Duration": 2.5838056999999996, "Details": {}, "Name": "Alyx_test[base_url=https___test_alyx_internationalbrainlab_org]/test_getSessions", "Passed": false, "Failed": true, "Incomplete": false}, {"Duration": 5.1105206, "Details": {}, "Name": "Block2ALF_test/test_incomplete", "Passed": true, "Failed": false, "Incomplete": false}], "status": "failure", "description": "18/320 tests failed", "coverage": 22.19690421937613}, {"commit": "1c33a6e2ac7d7fc098105b21a702e104e09767cf", "results": [{"Duration": 0.0500121, "Details": {}, "Name": "patch_test/Test3_Circle", "Passed": true, "Failed": false, "Incomplete": false}, {"Duration": 0.0482601, "Details": {}, "Name": "toStr_test/test_toStr", "Passed": true, "Failed": false, "Incomplete": false}, {"Duration": 0.0389527, "Details": {}, "Name": "Signals_test/test_output", "Passed": true, "Failed": false, "Incomplete": false}, {"Duration": 0.016370700000000002, "Details": {}, "Name": "Signals_test/test_erf", "Passed": true, "Failed": false, "Incomplete": false}, {"Duration": 0.0152839, "Details": {}, "Name": "rnd_test/test_uni", "Passed": true, "Failed": false, "Incomplete": false}], "status": "success", "description": "All passed", "coverage": 75.77018633540374}, {"commit": "7bdf62", "results": null, "status": "error", "description": "Failed to checkout code: 7bdf62", "coverage": null}] \ No newline at end of file +[{"commit":"cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71","datetime":"2021-04-30T08:23:06.764580","results":[{"Duration":0.07038330000000001,"Details":{},"Name":"Parameters_test/test_set","Passed":false,"Failed":true,"Incomplete":true},{"Duration":2.5838056999999996,"Details":{},"Name":"Alyx_test[base_url=https___test_alyx_internationalbrainlab_org]/test_getSessions","Passed":false,"Failed":true,"Incomplete":false},{"Duration":5.1105206,"Details":{},"Name":"Block2ALF_test/test_incomplete","Passed":true,"Failed":false,"Incomplete":false}],"status":"failure","description":"18/320 tests failed","statistics":{"total":320,"failed":16,"errored":2,"skipped":5,"passed":297,"duration":146},"coverage":22.19690421937613},{"commit":"1c33a6e2ac7d7fc098105b21a702e104e09767cf","results":[{"Duration":0.0500121,"Details":{},"Name":"patch_test/Test3_Circle","Passed":true,"Failed":false,"Incomplete":false},{"Duration":0.0482601,"Details":{},"Name":"toStr_test/test_toStr","Passed":true,"Failed":false,"Incomplete":false},{"Duration":0.0389527,"Details":{},"Name":"Signals_test/test_output","Passed":true,"Failed":false,"Incomplete":false},{"Duration":0.016370700000000002,"Details":{},"Name":"Signals_test/test_erf","Passed":true,"Failed":false,"Incomplete":false},{"Duration":0.0152839,"Details":{},"Name":"rnd_test/test_uni","Passed":true,"Failed":false,"Incomplete":false}],"status":"success","description":"All passed","coverage":75.77018633540374},{"commit":"7bdf62","results":null,"status":"error","description":"Failed to checkout code: 7bdf62","coverage":null}] diff --git a/test/lib.test.js b/test/lib.test.js index cbf837a..dcac085 100644 --- a/test/lib.test.js +++ b/test/lib.test.js @@ -1,13 +1,16 @@ const fs = require('fs'); const cp = require('child_process'); +const events = require('events'); const shell = require('shelljs'); +const path = require('path'); -const config = require('../config/config').settings -const assert = require('assert') +const config = require('../config/config').settings; +const assert = require('assert'); const sinon = require('sinon'); -const expect = require('chai').expect +const expect = require('chai').expect; const lib = require('../lib'); const queue = require('../lib').queue; +const {stdErr} = require('./fixtures/static'); ids = [ 'cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71', @@ -20,13 +23,13 @@ ids = [ /** * A test for the function ensureArray. Should return an array but not affect array inputs. */ -describe('Test ensureArray:', function() { +describe('Test ensureArray:', function () { it('Check returns array', function () { - let s = 'foo' - assert(Array.isArray(lib.ensureArray(s)), 'failed to return array') - assert.deepStrictEqual(lib.ensureArray(s), [s], 'failed to return array') - let arr = ['bar'] - assert.strictEqual(lib.ensureArray(arr), arr, 'failed to return array') + let s = 'foo'; + assert(Array.isArray(lib.ensureArray(s)), 'failed to return array'); + assert.deepStrictEqual(lib.ensureArray(s), [s], 'failed to return array'); + let arr = ['bar']; + assert.strictEqual(lib.ensureArray(arr), arr, 'failed to return array'); }); }); @@ -34,28 +37,34 @@ describe('Test ensureArray:', function() { /** * This tests the shields callback which returns sheilds.io API data for coverage and build status. */ -describe("strToBool function", () => { - it('Check valid true', () => { - strings = ['on', 'true', 'True', '1', 'ON']; - strings.forEach((x) => { expect(lib.strToBool(x)).true; }); - }); - - it('Check valid false', () => { - strings = ['', null, undefined, '0', 'false']; - strings.forEach((x) => { expect(lib.strToBool(x)).false; }); - }); +describe('strToBool function', () => { + it('Check valid true', () => { + strings = ['on', 'true', 'True', '1', 'ON']; + strings.forEach((x) => { + expect(lib.strToBool(x)).true; + }); + }); + + it('Check valid false', () => { + strings = ['', null, undefined, '0', 'false']; + strings.forEach((x) => { + expect(lib.strToBool(x)).false; + }); + }); }); /** * A test for the function partial. Should curry function input. */ -describe('Test partial:', function() { +describe('Test partial:', function () { it('expect curried function', function () { - let f = (a, b) => { return a + b; }; + let f = (a, b) => { + return a + b; + }; let f0 = lib.partial(f); - expect(f0(2)).instanceOf(Function) - expect(f0(2, 2)).eq(4) + expect(f0(2)).instanceOf(Function); + expect(f0(2, 2)).eq(4); }); }); @@ -63,75 +72,156 @@ describe('Test partial:', function() { /** * A test for the function getRepoPath */ -describe('Test getRepoPath:', function() { +describe('Test getRepoPath:', function () { + afterEach(() => { + if (config.repos !== undefined) { + delete config.repos; + } + }); + it('expect returned from env', function () { - let repoPath = lib.getRepoPath() - expect(repoPath).eq(process.env.REPO_PATH) + let repoPath = lib.getRepoPath(); + expect(repoPath).eq(process.env.REPO_PATH); + }); + + it('expect returned from config', function () { + config.repos = { + main: 'path/to/main', + submodule: 'path/to/submodule' + }; + let repoPath = lib.getRepoPath('main'); + expect(repoPath).eq(config.repos.main); + }); +}); + + +/** + * A test for the function addParam + */ +describe('Test addParam:', function () { + it('expect deals with slash', function () { + let url = 'https://example.com'; + const param = 'param=value'; + expect(lib.addParam(url, param)).eq(lib.addParam(url + '/', param)); + url += '/foo'; + expect(lib.addParam(url, param)).eq(url + '/?' + param); + expect(lib.addParam(url, param)).eq(lib.addParam(url + '/', param)); + }); + + it('expect handles multiple params', function () { + const url = 'https://example.com'; + const param1 = 'param=value'; + const param2 = 'par=val'; + const expected = 'https://example.com/?param=value&par=val'; + expect(lib.addParam(url, param1, param2)).eq(expected); + }); +}); + + +/** + * A test for the function context2routine + */ +describe('Test context2routine:', function () { + it('expect returns default', function () { + const context = 'anything'; + const expected = config['routines']['*']; + expect(lib.context2routine(context)).eq(expected); }); }); /** * A test for the function compareCoverage. - * @todo add test for strict compare */ -describe('Test compareCoverage:', function() { - var job; - - beforeEach(function () { - queue.process(async (_job, _done) => { - }); // nop - queue.pile = []; - job = { - data: { - sha: null - } - }; - }) - - it('expect coverage diff', function () { - // Test decrease in coverage - job.data.sha = ids[0]; - job.data.base = ids[1]; - lib.compareCoverage(job); - expect(job.data.status).eq('failure'); - expect(job.data.description).contains('decreased'); - expect(queue.pile).empty; - - // Test increase in coverage - job.data.coverage = 95.56 - lib.compareCoverage(job); - expect(job.data.status).eq('success'); - expect(job.data.description).contains('increased'); - expect(queue.pile).empty; - }); - - it('expect ReferenceError', function () { - job.data.base = null; - expect(() => lib.compareCoverage(job)).throws(ReferenceError); - }); - - it('expect fail status', function () { - job.data.sha = ids[0]; - job.data.base = ids[3]; // errored - lib.compareCoverage(job); - expect(job.data.status).eq('failure'); - expect(job.data.description).contains('incomplete'); - expect(queue.pile).empty; - }); - - it('expect job added', function () { - // Test decrease in coverage - job.data.sha = ids[2]; // fake - job.data.base = ids[1]; - job.data.context = 'coverage'; - lib.compareCoverage(job); - expect(queue.pile.length).eq(2); - expect(job.data.skipPost).true; // Job should be skipped to allow time for jobs to run - expect(queue.pile[0].data.sha).eq(ids[1]) - expect(queue.pile[1].data.skipPost).false; - expect(queue.pile[1].data.context).eq(job.data.context) - }); +describe('Test compareCoverage:', function () { + var job; + const _default_coverage = config.strict_coverage; + + beforeEach(function () { + queue.process(async (_job, _done) => {}); // nop + queue.pile = []; + job = { + data: { + sha: null + } + }; + }); + + afterEach(function () { + // Restore default config param + config.strict_coverage = _default_coverage; + }); + + it('expect coverage diff', function () { + // Test decrease in coverage + job.data.sha = ids[0]; + job.data.base = ids[1]; + lib.compareCoverage(job); + expect(job.data.status).eq('failure'); + expect(job.data.description).contains('decreased'); + expect(queue.pile).empty; + + // Test increase in coverage + job.data.coverage = 95.56; + lib.compareCoverage(job); + expect(job.data.status).eq('success'); + expect(job.data.description).contains('increased'); + expect(queue.pile).empty; + + // Test slight increase + job.data.coverage = 75.7746; + lib.compareCoverage(job); + expect(job.data.status).eq('success'); + expect(job.data.description).contains('increased slightly'); + expect(queue.pile).empty; + }); + + it('test strict coverage', function () { + job.data.sha = ids[0]; + job.data.base = ids[1]; + job.data.coverage = 75.77018633540374; + + // Test strict coverage off + config.strict_coverage = false; + lib.compareCoverage(job); + expect(job.data.status).eq('success'); + expect(job.data.description).contains('remains at'); + expect(queue.pile).empty; + + // Test strict coverage on + config.strict_coverage = true; + lib.compareCoverage(job); + expect(job.data.status).eq('failure'); + expect(job.data.description).contains('remains at'); + expect(queue.pile).empty; + }); + + it('expect ReferenceError', function () { + job.data.base = null; + expect(() => lib.compareCoverage(job)).throws(ReferenceError); + }); + + it('expect fail status', function () { + job.data.sha = ids[0]; + job.data.base = ids[3]; // errored + lib.compareCoverage(job); + expect(job.data.status).eq('failure'); + expect(job.data.description).contains('incomplete'); + expect(queue.pile).empty; + }); + + it('expect job added', function () { + // Test decrease in coverage + job.data.sha = ids[2]; // fake + job.data.base = ids[1]; + job.data.context = 'coverage'; + lib.compareCoverage(job); + expect(queue.pile.length).eq(2); + expect(job.data.skipPost).true; // Job should be skipped to allow time for jobs to run + expect(queue.pile[0].data.sha).eq(ids[1]); + expect(queue.pile[1].data.skipPost).false; + expect(queue.pile[1].data.context).eq(job.data.context); + }); }); @@ -139,10 +229,10 @@ describe('Test compareCoverage:', function() { * A test for the function updateJobFromRecord. * @todo add test for compareCoverage call */ -describe('Test updateJobFromRecord:', function() { +describe('Test updateJobFromRecord', function () { var job; - beforeEach(function() { + beforeEach(function () { queue.process(async (_job, _done) => {}); // nop queue.pile = []; job = { @@ -150,38 +240,88 @@ describe('Test updateJobFromRecord:', function() { sha: null } }; - }) + }); - it('expect no record found', function () { + it('expect no record found', async function () { job.data.sha = ids[2]; - const updated = lib.updateJobFromRecord(job); + const updated = await lib.updateJobFromRecord(job); expect(updated).false; }); - it('expect updated', function () { + it('expect updated', async function () { job.data.sha = ids[0]; - const updated = lib.updateJobFromRecord(job); + const updated = await lib.updateJobFromRecord(job); expect(updated).true; expect(job.data).deep.keys(['sha', 'status', 'description', 'coverage']); }); }); +/** + * A test for inserting the duration in description field by updateJobFromRecord. + */ +describe('Test duration in description', function () { + var job; + var _dbFile = config.dbFile; + + before(function (done) { + job = { + data: { + sha: ids[1] + }, + created: new Date(Date.now() - 1000 * 60 * 10) + }; + config.dbFile = path.join(path.parse(config.dbFile).dir, '._db.json'); + fs.copyFile(_dbFile, config.dbFile, err => { + if (err) throw err; + done(); + }); + }); + + after(function () { + queue.pile = []; // In case a job was added + fs.unlinkSync(config.dbFile); + config.dbFile = _dbFile; + }); + + it('expect duration in description', async function () { + const updated = await lib.updateJobFromRecord(job); + expect(updated).true; + expect(job.data.description).contains('10 min'); + }); + + it('expect truncated description', async function () { + const records = JSON.parse(await fs.promises.readFile(config.dbFile, 'utf8')); + records[1]['description'] = 'Lorem ipsum '.repeat(13); + await fs.promises.writeFile(config.dbFile, JSON.stringify(records)); + const updated = await lib.updateJobFromRecord(job); + expect(updated).true; + expect(job.data.description.length).lte(config.max_description_len); + expect(job.data.description.endsWith('... (took 10 min)')).true; + }); +}); + + /** * A test for the function startJobTimer. Should kill the process when time is up and update the * job data. */ -describe('Test startJobTimer:', function() { +describe('Test startJobTimer:', function () { var clock; - before(() => { clock = sinon.useFakeTimers(); }); + before(() => { + clock = sinon.useFakeTimers(); + queue.process(() => {}); + queue.pile = []; + }); it('expect process killed', function (done) { const childProcess = { kill: () => { done(); }, pid: 10108 }; - const job = { data: {process: childProcess} }; + const job = queue.add({}); + job.child = childProcess; lib.startJobTimer(job); // Skip to the end... clock.tick(config.timeout + 1); @@ -190,49 +330,297 @@ describe('Test startJobTimer:', function() { it('expect tree-killed', function (done) { // Test tree-kill switch. We can't stub function exports so we'll use a slow ping command // and kill it. Should be relatively consistent across platforms. + const job = queue.add({}); const cmd = 'ping 127.0.0.1 -n 6 > nul'; - const childProcess = cp.exec(cmd, () => { done(); }); - childProcess.kill = () => {}; // nop - const job = { data: {process: childProcess} }; + job.child = cp.exec(cmd, () => { done(); }); + job._child.kill = () => { + }; // nop lib.startJobTimer(job, true); // Skip to the end... clock.tick(config.timeout + 1); }); - after(() => { clock.restore(); }) + after(() => { + clock.restore(); + }); + + afterEach(() => { + queue.pile = []; + }); +}); + + +/** + * This tests the buildRoutine function. + */ +describe('running tests', () => { + var sandbox; // Sandbox for spying on queue + var spawnStub; // Main fileExec stub + var execEvent; + var job; + + function childProcessStub(errmsg) { + if (errmsg) { + return () => { // Return function to raise exception + setImmediate(() => { + execEvent.stderr.emit('data', errmsg); + }); + setImmediate(() => { + execEvent.exitCode = 1; + execEvent.emit('exit', execEvent.exitCode, null); + }); + setImmediate(() => { + execEvent.emit('close', 1, null); + }); + return execEvent; + }; + } else { + return () => { // Return function to successfully execute + setImmediate(() => { + execEvent.exitCode = 0; + execEvent.emit('exit', execEvent.exitCode, null); + }); + setImmediate(() => { + execEvent.emit('close', 0, null); + }); + return execEvent; + }; + } + } + + before(() => { + sandbox = sinon.createSandbox(); + }); + + beforeEach(function () { + spawnStub = sandbox.stub(cp, 'spawn'); + execEvent = new events.EventEmitter(); + execEvent.stdout = execEvent.stderr = new events.EventEmitter(); + execEvent.stdout.pipe = sandbox.spy(); + execEvent.exitCode = null; // NB: Must be set before another process is attached to Job + job = { + id: 123, + data: {sha: ids[0]}, + done: () => {} + }; + }); + + it('expect default routine', fin => { + // Create a job field with no routine field + job.done = validate; + let log = path.join(config.dataPath, 'reports', ids[0], 'std_output-cabe27e.log'); + let tasks = config['routines']['*'].map(x => path.resolve(path.join(__dirname, '..', x))); + spawnStub.callsFake(childProcessStub()); + lib.buildRoutine(job); + + function validate(err) { + for (let fn of tasks) { + spawnStub.calledWith(fn, [ids[0], config.repo, config.dataPath]); + } + expect(spawnStub.calledTwice).true; + expect(err).undefined; + expect(fs.existsSync(log)).true; + fin(); + } + }); + + it('test missing file error', fin => { + job.done = validate; + + // Raise a file not found error + spawnStub.callsFake(() => { + const err = new Error('ENOENT'); + err.code = 'ENOENT'; + err.path = config['routines']['*'][0]; + setImmediate(() => { + execEvent.emit('error', err, null); + }); + return execEvent; + }); + sandbox.stub(fs.promises, 'writeFile'); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + lib.buildRoutine(job).finally(fin); + + function validate(err) { + expect(spawnStub.calledOnce).true; + expect(err.message).matches(/File ".*?" not found/); + } + }); + + it('test misc spawn error', fin => { + job.done = validate; + + // Raise a file not found error + spawnStub.callsFake(() => { + const err = new Error('Unknown error'); + err.code = -1; + err.path = config['routines']['*'][0]; + setImmediate(() => { + execEvent.emit('error', err, null); + }); + return execEvent; + }); + sandbox.stub(fs.promises, 'writeFile'); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + lib.buildRoutine(job).finally(fin); + + function validate(err) { + expect(spawnStub.calledOnce).true; + expect(err.message).matches(/Failed to spawn/); + } + }); + + /** + * This tests handling error caused by routine failing to save a test record into the JSON db + */ + it('test update from record error', fin => { + job.done = validate; + job.data.sha = ids[2]; // No record for this SHA + + // Raise a file not found error + spawnStub.callsFake(childProcessStub()); + sandbox.stub(fs.promises, 'writeFile'); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + lib.buildRoutine(job).finally(fin); + + function validate(err) { + expect(spawnStub.calledTwice).true; + expect(err.message).contains('test result'); + } + }); + + it('runtests parses MATLAB error', (fin) => { + var err; + const errmsg = 'Error in MATLAB_function line 23'; + job.done = (e) => { err = e; }; + + // Exit with a MATLAB error + spawnStub.callsFake(childProcessStub(errmsg)); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + sandbox.stub(fs.promises, 'writeFile').callsFake((db_path, rec) => { + expect(db_path).eq(config.dbFile); + expect(rec).contains(errmsg); + expect(spawnStub.calledOnce).true; + expect(err.message).to.have.string(errmsg); + fin(); + }); + lib.buildRoutine(job); + }); + + it('runtests parses Python error', fin => { + var err; + job.done = (e) => { err = e; }; + + // Exit with a Python error + spawnStub.callsFake(childProcessStub(stdErr)); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + sandbox.stub(fs.promises, 'writeFile').callsFake((db_path, rec) => { + expect(db_path).eq(config.dbFile); + let errmsg = 'FileNotFoundError: Invalid data root folder '; + expect(rec).contains(errmsg); + expect(spawnStub.calledOnce).true; + expect(err.message).to.have.string(errmsg); + fin(); + }); + lib.buildRoutine(job); + }); + + it('runtests parses flake error', fin => { + var err; + job.done = (e) => { err = e; }; + const flake_stderr = ('foobar...\n' + + './oneibl/params.py:4:1: F401 \'pathlib.PurePath\' imported but unused\n' + + './ibllib/tests/qc/test_dlc_qc.py:11:1: F401 \'brainbox.core.Bunch\' imported but unused' + ); + + // Exit with flake8 errors + spawnStub.callsFake(childProcessStub(flake_stderr)); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + sandbox.stub(fs.promises, 'writeFile').callsFake((db_path, rec) => { + expect(db_path).eq(config.dbFile); + expect(rec).contains('2 flake8 errors'); + expect(spawnStub.calledOnce).true; + expect(err.message).matches(/F401 '.*' imported but unused/); + fin(); + }); + lib.buildRoutine(job); + }); + + it('should open and close log', fin => { + const logSpy = { + close: sandbox.stub(), + on: () => {} + }; + sandbox.stub(fs, 'createWriteStream').returns(logSpy); + sandbox.stub(fs, 'mkdir'); + logSpy.close.callsFake(fin); + spawnStub.callsFake(childProcessStub()); + lib.buildRoutine(job); + }); + + it('expect loads test record', fin => { + queue.process(lib.buildRoutine); + queue.on('error', _ => {}); + + function validate(err, job) { + expect(err).undefined; + expect(job._child).eq(execEvent); + expect(job.data.status).eq('failure'); + expect(job.data.coverage).approximately(22.1969, 0.001); + fin(); + } + + sandbox.stub(queue._events, 'finish').value([validate]); + spawnStub.callsFake(childProcessStub()); + queue.add({sha: ids[0]}); + }); + + afterEach(function (done) { + queue.pile = []; + delete queue.process; + sandbox.verifyAndRestore(); + const logDir = path.join(config.dataPath, 'reports'); + fs.rmdir(logDir, {recursive: true}, err => { + if (err) throw err; + done(); + }); + }); + }); /** * A test for the function loadTestRecords. */ -describe('Test loading test records:', function() { +describe('Test loading test records:', function () { // Check NODE_ENV is correctly set, meaning our imported settings will be test ones before(function () { - assert(process.env.NODE_ENV.startsWith('test'), 'Test run outside test env') + assert(process.env.NODE_ENV.startsWith('test'), 'Test run outside test env'); }); it('Check loading existing record', function () { let id = ids[0]; const record = lib.loadTestRecords(id); - assert(record != null, 'failed to load record') - assert(!Array.isArray(record), 'failed to return single obj') - assert.strictEqual(record.commit, id, 'failed to return correct record') + assert(record != null, 'failed to load record'); + assert(!Array.isArray(record), 'failed to return single obj'); + assert.strictEqual(record.commit, id, 'failed to return correct record'); }); it('Check loading multiple records', function () { const records = lib.loadTestRecords(ids); - assert(records != null, 'failed to load records') - assert(Array.isArray(records), 'failed to return array') - assert.strictEqual(records.length, ids.length-1, 'failed to return both records') + assert(records != null, 'failed to load records'); + assert(Array.isArray(records), 'failed to return array'); + assert.strictEqual(records.length, ids.length - 1, 'failed to return both records'); }); it('Check loading fail', function () { - let id = ids[2] // this commit is not in db + let id = ids[2]; // this commit is not in db const record = lib.loadTestRecords(id); - let isEmptyArr = x => { return Array.isArray(x) && x.length === 0; } - assert(isEmptyArr(record)) - assert(isEmptyArr(lib.loadTestRecords([id, id]))) + let isEmptyArr = x => { + return Array.isArray(x) && x.length === 0; + }; + assert(isEmptyArr(record)); + assert(isEmptyArr(lib.loadTestRecords([id, id]))); }); }); @@ -240,16 +628,32 @@ describe('Test loading test records:', function() { /** * A test for the function saveTestRecords. */ -describe('Test saving test records:', function() { +describe('Test saving test records:', function () { var backup; + const dbFile = config.dbFile; // Store default path so we can change it // Check NODE_ENV is correctly set, meaning our imported settings will be test ones before(function () { - assert(process.env.NODE_ENV.startsWith('test'), 'Test run outside test env') + assert(process.env.NODE_ENV.startsWith('test'), 'Test run outside test env'); backup = config.dbFile + Date.now(); fs.copyFileSync(config.dbFile, backup); }); + // Restore correct dbFile path + afterEach(done => { + if (config.dbFile !== dbFile) { + fs.unlink(config.dbFile, err => { + if (err) { + console.error(err); + } + config.dbFile = dbFile; + done(); + }); + } else { + done(); + } + }); + it('Check saving existing record', async function () { const record = lib.loadTestRecords(ids[0]); delete record['results']; // remove a field @@ -262,32 +666,60 @@ describe('Test saving test records:', function() { it('Check saving new records', async function () { const records = [ - lib.loadTestRecords(ids[1]), - { - 'commit': ids[1].replace('2', '3'), // not in db - 'status': 'error', - } + lib.loadTestRecords(ids[1]), + { + 'commit': ids[1].replace('2', '3'), // not in db + 'status': 'error' + } ]; records[0]['status'] = 'error'; // change a field await lib.saveTestRecords(records); const new_records = lib.loadTestRecords(records.map(x => x.commit)); assert(new_records.length === 2); for (o of new_records) { - assert.strictEqual(o.status, 'error', 'failed to update all records'); + assert.strictEqual(o.status, 'error', 'failed to update all records'); } }); it('Check validation errors', function (done) { const record = { - commit: ids[2], - status: 'success' + commit: ids[2], + status: 'success' }; lib.saveTestRecords(record).catch(err => { - expect(err).instanceOf(lib.APIError); - done(); + expect(err).instanceOf(lib.APIError); + done(); + }); + }); + + it('Check missing file error', function (done) { + config.dbFile = path.join(path.parse(config.dbFile)['dir'], '.missing_db.json'); // Non-existent db file + assert(!fs.existsSync(config.dbFile)); + const record = { + commit: ids[0], + status: 'success' + }; + lib.saveTestRecords(record).then(() => { + expect(fs.existsSync(config.dbFile)).true; + done(); }); }); + it('Expect catches parse file error', async () => { + const incomplete = '{"commit": "7bdf62", "status": "error", "description": "."}]'; + await fs.promises.writeFile(config.dbFile, incomplete); + const record = { + commit: ids[0], + status: 'success' + }; + try { + await lib.saveTestRecords(record); + assert(false, 'failed to throw error'); + } catch (err) { + expect(err).instanceOf(SyntaxError); + } + }); + after(function () { fs.renameSync(backup, config.dbFile); }); @@ -297,136 +729,170 @@ describe('Test saving test records:', function() { /** * This tests the shields callback which returns sheilds.io API data for coverage and build status. */ -describe("getBadgeData function", () => { - const sandbox = sinon.createSandbox(); // Sandbox for spying on queue - var input; // Input data for function - - beforeEach(function () { - queue.process(async (_job, _done) => {}) // nop - sandbox.spy(queue); - input = { - sha: null, - owner: process.env['REPO_OWNER'], - repo: '', - branch: '', - context: '' - }; - }); - - it('Check Coverage', function () { - var data, expected; - - // Low coverage - input['sha'] = ids[0]; - input['context'] = 'coverage'; - data = lib.getBadgeData(input); - expected = { - schemaVersion: 1, - label: input['context'], - message: '22.2%', - color: 'red' - }; - expect(data).to.deep.equal(expected); - sandbox.assert.notCalled(queue.add); - - // High coverage - input['sha'] = ids[1]; - expected['message'] = '75.77%'; - expected['color'] = 'brightgreen'; - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.notCalled(queue.add); - - // Errored - input['sha'] = ids[3]; - expected['message'] = 'unknown'; - expected['color'] = 'orange'; - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.notCalled(queue.add); - - // No coverage - input['sha'] = ids[2]; - expected['message'] = 'pending'; - expected['color'] = 'orange'; - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.calledOnce(queue.add); - }); - - it('Check build status', function () { - var data, expected; - - // Failed tests - input['sha'] = ids[0]; - input['context'] = 'status'; - data = lib.getBadgeData(input); - expected = { - schemaVersion: 1, - label: 'build', - message: 'failing', - color: 'red' - }; - expect(data).to.deep.equal(expected); - sandbox.assert.notCalled(queue.add); - - // High coverage - input['sha'] = ids[1]; - expected['message'] = 'passing'; - expected['color'] = 'brightgreen'; - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.notCalled(queue.add); - - // Errored - input['sha'] = ids[3]; - expected['message'] = 'unknown'; - expected['color'] = 'orange'; - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.notCalled(queue.add); - - // No coverage - input['sha'] = ids[2]; - expected['message'] = 'pending'; - expected['color'] = 'orange'; - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.calledOnce(queue.add); - - // Shouldn't add as job already queued - data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.calledOnce(queue.add); - }); - - it('Check force flag', function () { - input['sha'] = ids[1]; - input['context'] = 'status'; - input['force'] = true; // set force flag to true - const expected = { - schemaVersion: 1, - label: 'build', - message: 'pending', - color: 'orange' - }; - let data = lib.getBadgeData(input); - expect(data).to.deep.equal(expected); - sandbox.assert.calledOnce(queue.add); - }); - - it('Check error handling', function () { - expect(() => lib.getBadgeData(input)).to.throw(ReferenceError, 'sha'); - input['sha'] = ids[0] - expect(() => lib.getBadgeData(input)).to.throw(ReferenceError, 'Context'); - input['context'] = 'updated' - expect(() => lib.getBadgeData(input)).to.throw(TypeError, 'context'); - }); - - afterEach(function () { - queue.pile = []; - sandbox.restore(); - }); +describe('getBadgeData function', () => { + const sandbox = sinon.createSandbox(); // Sandbox for spying on queue + var input; // Input data for function + + beforeEach(function () { + queue.process(async (_job, _done) => { + }); // nop + sandbox.spy(queue); + input = { + sha: null, + owner: process.env['REPO_OWNER'], + repo: '', + branch: '', + context: '' + }; + }); + + it('Check Coverage', function () { + var data, expected; + + // Low coverage + input['sha'] = ids[0]; + input['context'] = 'coverage'; + data = lib.getBadgeData(input); + expected = { + schemaVersion: 1, + label: input['context'], + message: '22.2%', + color: 'red' + }; + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // High coverage + input['sha'] = ids[1]; + expected['message'] = '75.77%'; + expected['color'] = 'brightgreen'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // Errored + input['sha'] = ids[3]; + expected['message'] = 'unknown'; + expected['color'] = 'orange'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // No coverage + input['sha'] = ids[2]; + expected['message'] = 'pending'; + expected['color'] = 'orange'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.calledOnce(queue.add); + }); + + it('Check build status', function () { + var data, expected; + + // Failed tests + input['sha'] = ids[0]; + input['context'] = 'build'; + data = lib.getBadgeData(input); + expected = { + schemaVersion: 1, + label: 'build', + message: 'failing', + color: 'red' + }; + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // High coverage + input['sha'] = ids[1]; + expected['message'] = 'passing'; + expected['color'] = 'brightgreen'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // Errored + input['sha'] = ids[3]; + expected['message'] = 'errored'; + expected['color'] = 'red'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // No coverage + input['sha'] = ids[2]; + expected['message'] = 'pending'; + expected['color'] = 'orange'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.calledOnce(queue.add); + + // Shouldn't add as job already queued + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.calledOnce(queue.add); + }); + + it('Check tests status', function () { + var data, expected; + + // Failed tests + input['sha'] = ids[0]; + input['context'] = 'tests'; + data = lib.getBadgeData(input); + expected = { + schemaVersion: 1, + label: 'tests', + message: '297 passed, 18 failed, 5 skipped', + color: 'red' + }; + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // Errored + input['sha'] = ids[3]; + expected['message'] = 'errored'; + expected['color'] = 'red'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + + // No stats field + input['sha'] = ids[1]; + expected['message'] = 'passed'; + expected['color'] = 'brightgreen'; + data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.notCalled(queue.add); + }); + + it('Check force flag', function () { + input['sha'] = ids[1]; + input['context'] = 'build'; + input['force'] = true; // set force flag to true + const expected = { + schemaVersion: 1, + label: 'build', + message: 'pending', + color: 'orange' + }; + let data = lib.getBadgeData(input); + expect(data).to.deep.equal(expected); + sandbox.assert.calledOnce(queue.add); + }); + + it('Check error handling', function () { + expect(() => lib.getBadgeData(input)).to.throw(ReferenceError, 'sha'); + input['sha'] = ids[0]; + expect(() => lib.getBadgeData(input)).to.throw(ReferenceError, 'Context'); + input['context'] = 'updated'; + expect(() => lib.getBadgeData(input)).to.throw(TypeError, 'context'); + }); + + afterEach(function () { + queue.pile = []; + sandbox.restore(); + }); }); @@ -434,61 +900,68 @@ describe("getBadgeData function", () => { /** * A test for the main queue process callback. */ -describe('Test short circuit', function() { +describe('Test short circuit', function () { beforeEach(function () { - queue.process(async (_job, _done) => {}); // nop + queue.process(async (_job, _done) => { + }); // nop queue.pile = []; }); it('expect force flag set', function (done) { - // We expect that the job that's on the pile has 'force' set to false + // We expect that the job that's on the pile has 'force' set to false // Add job to the pile - queue.add( { sha: ids[0] }) // Record exists + queue.add({sha: ids[0]}); // Record exists function tests(run) { - expect(run).true; - expect(queue.pile[0].data.force).false; - done(); + expect(run).true; + expect(queue.pile[0].data.force).false; + done(); } + const job = { - data: { - sha: ids[0] // Record exists - }, - done: () => tests(false) + data: { + sha: ids[0] // Record exists + }, + done: () => tests(false) }; - lib.shortCircuit(job, () => { tests(true); }); + lib.shortCircuit(job, () => { + tests(true); + }); }); it('expect short circuit', function (done) { - // We expect that the job that's on the pile has 'force' set to false + // We expect that the job that's on the pile has 'force' set to false const job = { - data: { - sha: ids[0], // record exists - force: false // load from record - } + data: { + sha: ids[0], // record exists + force: false // load from record + } }; + function tests(run) { - expect(run).false; - expect(job.data.status).eq('failure'); - done(); + expect(run).false; + expect(job.data.status).eq('failure'); + done(); } + job.done = () => tests(false); lib.shortCircuit(job, () => tests(true)); }); it('expect forced test function called', function (done) { - // Record doesn't exist, so we expect the tests to be run anyway + // Record doesn't exist, so we expect the tests to be run anyway function tests(run) { - expect(run).true; - done(); + expect(run).true; + done(); } + const job = { - data: { - sha: ids[2], // record exists - force: false // load from record - }, - done: () => tests(false) + data: { + sha: ids[2], // record exists + force: false // load from record + }, + done: () => tests(false) }; lib.shortCircuit(job, () => tests(true)); }); @@ -498,22 +971,22 @@ describe('Test short circuit', function() { /** * A test for shortID function. */ -describe('Test shortID', function() { +describe('Test shortID', function () { - it('expect short str from int', function () { - const out = lib.shortID(987654321); - expect(out).eq('9876543'); - }); + it('expect short str from int', function () { + const out = lib.shortID(987654321); + expect(out).eq('9876543'); + }); - it('expect short str from str', function () { - const out = lib.shortID('98r7654321o', 3); - expect(out).eq('98r'); - }); + it('expect short str from str', function () { + const out = lib.shortID('98r7654321o', 3); + expect(out).eq('98r'); + }); - it('expect works with arrays', function () { - const out = lib.shortID([987654321, '7438ht43', null], 3); - expect(out).deep.equal(['987', '743', null]); - }); + it('expect works with arrays', function () { + const out = lib.shortID([987654321, '7438ht43', null], 3); + expect(out).deep.equal(['987', '743', null]); + }); }); @@ -521,77 +994,77 @@ describe('Test shortID', function() { /** * A test for isSHA function. */ -describe('Test isSHA', function() { +describe('Test isSHA', function () { - it('expect true on SHA', function () { - expect(lib.isSHA(ids[0])).true; - }); + it('expect true on SHA', function () { + expect(lib.isSHA(ids[0])).true; + }); - it('expect false on fake', function () { - expect(lib.isSHA(ids[2])).false; - }); + it('expect false on fake', function () { + expect(lib.isSHA(ids[2])).false; + }); }); /** * A test for listSubmodules function. */ -describe('Test listSubmodules', function() { +describe('Test listSubmodules', function () { const sandbox = sinon.createSandbox(); const submodules = 'submodule.alyx-matlab.path alyx-matlab\nsubmodule.signals.path signals\n'; - beforeEach(function() { + beforeEach(function () { sandbox.spy(shell, 'pushd'); sandbox.spy(shell, 'popd'); }); - it('expect array returned', function () { - // NB: This test is over-engineered :( - const output = { - code: 0, - stdout: submodules, - match: (str) => submodules.match(str) - }; - sandbox - .stub(shell, 'exec') - .returns(output); - sandbox - .stub(shell, 'which') - .withArgs('git') - .returns(true); - const moduleList = lib.listSubmodules(process.env['REPO_PATH']); - expect(moduleList).deep.eq(['alyx-matlab', 'signals']); - expect(shell.pushd.calledOnce); - expect(shell.pushd.calledOnceWith(process.env['REPO_PATH'])); - expect(shell.popd.calledOnce); - }); - - it('expect empty array returned', function () { + it('expect array returned', function () { + // NB: This test is over-engineered :( + const output = { + code: 0, + stdout: submodules, + match: (str) => submodules.match(str) + }; + sandbox + .stub(shell, 'exec') + .returns(output); + sandbox + .stub(shell, 'which') + .withArgs('git') + .returns(true); + const moduleList = lib.listSubmodules(process.env['REPO_PATH']); + expect(moduleList).deep.eq(['alyx-matlab', 'signals']); + expect(shell.pushd.calledOnce); + expect(shell.pushd.calledOnceWith(process.env['REPO_PATH'])); + expect(shell.popd.calledOnce); + }); + + it('expect empty array returned', function () { const output = { - code: 0, - stdout: '', - match: (str) => ''.match(str) + code: 0, + stdout: '', + match: (str) => ''.match(str) }; sandbox - .stub(shell, 'exec') - .returns(output); + .stub(shell, 'exec') + .returns(output); sandbox - .stub(shell, 'which') - .withArgs('git') - .returns(true); + .stub(shell, 'which') + .withArgs('git') + .returns(true); const moduleList = lib.listSubmodules(process.env['REPO_PATH']); expect(moduleList).to.be.empty; - }); + }); - it('expect error', function () { + it('expect error', function () { sandbox - .stub(shell, 'which') - .withArgs('git') - .returns(null); + .stub(shell, 'which') + .withArgs('git') + .returns(null); expect(() => lib.listSubmodules(process.env['REPO_PATH'])).to.throw(); - }); + }); - afterEach(function() { + afterEach(function () { sandbox.restore(); }); diff --git a/test/main.test.js b/test/main.test.js index f8248d0..3d64d33 100644 --- a/test/main.test.js +++ b/test/main.test.js @@ -17,117 +17,117 @@ const assert = require('chai').assert; const lib = require('../lib'); const queue = lib.queue; -const { handler, eventCallback, srv, prepareEnv, runTests } = require('../serve'); -const { token } = require('./fixtures/static'); +const {handler, eventCallback, srv, prepareEnv, runTests} = require('../serve'); +const {token} = require('./fixtures/static'); const config = require('../config/config').settings; // Create a constant JWT // TODO put in static -const SHA = 'cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71' +const SHA = 'cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71'; /** * The hooks setup in main.js. */ function main() { - const run = (job) => { prepareEnv(job, runTests); }; - queue.process((job) => { lib.shortCircuit(job, run); }); - handler.on('*', evt => eventCallback(evt)); - queue.on('error', _ => {}); - handler.on('error', function (err) { - console.error('Error:', err.message); - }) - process.on('unhandledRejection', (reason, p) => { - console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); - console.log(reason.stack) - }); - lib.openTunnel() - .then(() => { - // Start the server on same port as tunnel - var server = srv.listen(config.listen_port, function () { - let host = server.address().address; - let port = server.address().port; + const run = (job) => { prepareEnv(job, runTests); }; + queue.process((job) => { lib.shortCircuit(job, run); }); + handler.on('*', evt => eventCallback(evt)); + queue.on('error', _ => {}); + handler.on('error', function (err) { + console.error('Error:', err.message); + }); + process.on('unhandledRejection', (reason, p) => { + console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); + console.log(reason.stack); + }); + lib.openTunnel() + .then(() => { + // Start the server on same port as tunnel + var server = srv.listen(config.listen_port, function () { + let host = server.address().address; + let port = server.address().port; - console.log("Handler listening at http://%s:%s", host, port); - }); - }) - .catch(e => { throw e; }); + console.log('Handler listening at http://%s:%s', host, port); + }); + }) + .catch(e => { throw e; }); } /** * TODO Document. */ -xdescribe("Full stack", () => { - var scope; // Our server mock - var clock; // Our clock mock for replicable JWT - var evt; // A payload event loaded from fixtures - var sandbox; // Sandbox for spying on queue +xdescribe('Full stack', () => { + var scope; // Our server mock + var clock; // Our clock mock for replicable JWT + var evt; // A payload event loaded from fixtures + var sandbox; // Sandbox for spying on queue - before(function () { - const APP_ID = process.env.GITHUB_APP_IDENTIFIER; - const evt = JSON.parse(fs.readFileSync('./test/fixtures/pull_payload.json')); - // https://runkit.com/gr2m/reproducable-jwt - clock = sinon.useFakeTimers({ - now: 0, - toFake: ['Date'] - }); - // For outgoing requests - scope = nock('https://api.github.com', { - reqheaders: { - accept: 'application/vnd.github.machine-man-preview+json', - } - }); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .matchHeader('authorization', `bearer ${token}`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .matchHeader('authorization', `bearer ${token}`) - .reply(201, { - token: '#t0k3N', - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - let pr = evt.pull_request; - let uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; - scope.post(uri, body => { return body.state === 'pending'}) - .twice() - .reply(201, {}); - scope.post(uri, body => { return body.state === 'success'}) - .twice() - .reply(201, {}); + before(function () { + const APP_ID = process.env.GITHUB_APP_IDENTIFIER; + const evt = JSON.parse(fs.readFileSync('./test/fixtures/pull_payload.json')); + // https://runkit.com/gr2m/reproducable-jwt + clock = sinon.useFakeTimers({ + now: 0, + toFake: ['Date'] + }); + // For outgoing requests + scope = nock('https://api.github.com', { + reqheaders: { + accept: 'application/vnd.github.machine-man-preview+json' + } + }); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) + .matchHeader('authorization', `bearer ${token}`) + .reply(201, {id: APP_ID}); + scope.post(`/app/installations/${APP_ID}/access_tokens`) + .matchHeader('authorization', `bearer ${token}`) + .reply(201, { + token: '#t0k3N', + permissions: { + checks: 'write', + metadata: 'read', + contents: 'read' + } + }); + let pr = evt.pull_request; + let uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; + scope.post(uri, body => { return body.state === 'pending'; }) + .twice() + .reply(201, {}); + scope.post(uri, body => { return body.state === 'success'; }) + .twice() + .reply(201, {}); - sandbox = sinon.createSandbox() - const stub = sandbox.stub(cp, 'execFile'); - sandbox.stub(fs, 'createWriteStream'); - sandbox.stub(lib, 'openTunnel').resolves(null); - const execEvent = new events.EventEmitter(); - execEvent.stdout = new events.EventEmitter(); - execEvent.stdout.pipe = sandbox.spy(); - stub - .returns(execEvent) - .callsArgAsync(2, null, 'external script called', ''); - }); + sandbox = sinon.createSandbox(); + const stub = sandbox.stub(cp, 'execFile'); + sandbox.stub(fs, 'createWriteStream'); + sandbox.stub(lib, 'openTunnel').resolves(null); + const execEvent = new events.EventEmitter(); + execEvent.stdout = new events.EventEmitter(); + execEvent.stdout.pipe = sandbox.spy(); + stub + .returns(execEvent) + .callsArgAsync(2, null, 'external script called', ''); + }); - it('full stack job request', done => { - main(); - const server = supertest.agent(`http://localhost:${config.port}`); - server - .post(`/github`, evt) - .expect('Content-Type', 'application/json') - .expect(201) - .end(function (err, res) { - scope.isDone(); - if (err) return done(err); - done(); - }); + it('full stack job request', done => { + main(); + const server = supertest.agent(`http://localhost:${config.port}`); + server + .post(`/github`, evt) + .expect('Content-Type', 'application/json') + .expect(201) + .end(function (err, res) { + scope.isDone(); + if (err) return done(err); + done(); + }); - }); + }); - after(function() { - clock.restore(); - queue.pile = []; - sandbox.verifyAndRestore(); - }); + after(function () { + clock.restore(); + queue.pile = []; + sandbox.verifyAndRestore(); + }); }); diff --git a/test/serve.test.js b/test/serve.test.js index c26c21d..a9feffe 100644 --- a/test/serve.test.js +++ b/test/serve.test.js @@ -1,228 +1,260 @@ const fs = require('fs'); -const cp = require('child_process'); -const events = require('events'); const path = require('path'); const nock = require('nock'); // for mocking outbound requests const request = require('supertest'); // for mocking inbound requests const sinon = require('sinon'); // for mocking local modules const expect = require('chai').expect; const assert = require('chai').assert; -const appAuth = require("@octokit/auth-app"); +const appAuth = require('@octokit/auth-app'); const APIError = require('../lib').APIError; -const lib = require('../lib'); -const { updateStatus, setAccessToken, eventCallback, srv, prepareEnv, runTests, fetchCommit} = - require('../serve'); +const {updateStatus, setAccessToken, eventCallback, srv, fetchCommit} = require('../serve'); const queue = require('../lib').queue; const config = require('../config/config').settings; -const { stdErr, token } = require('./fixtures/static'); +const {token} = require('./fixtures/static'); const APP_ID = process.env.GITHUB_APP_IDENTIFIER; const ENDPOINT = 'logs'; // The URL endpoint for fetching status check details -const SHA = 'cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71' +const SHA = 'cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71'; + +/** + * This fixture ensures the `token` variable is not null. + * Must be called before any other nock fixtures or else they will be reset. + */ +async function setToken() { + const scope = nock('https://api.github.com'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) + .reply(201, {id: APP_ID}); + scope.post(`/app/installations/${APP_ID}/access_tokens`) + .reply(201, { + token: '#t0k3N', + permissions: { + checks: 'write', + metadata: 'read', + contents: 'read' + } + }); + await setAccessToken(); + nock.cleanAll(); +} + +/** + * This fixture injects the default null token via setAccessToken. + */ +async function resetToken() { + const token_default = {'tokenType': null}; + const sandbox = sinon.createSandbox({ + useFakeTimers: { + now: new Date(3000, 1, 1, 0, 0) + } + }); + sandbox.stub(appAuth, 'createAppAuth').returns(async () => token_default); + try { + await setAccessToken(); + } catch (_) { + } + sandbox.restore(); +} + +/** + * This fixture injects the default null token via setAccessToken. + */ +async function mockToken(sandbox) { + await setToken(); // Ensure App id set + const token = {token: '#t0k3N'}; + return (sandbox || sinon).stub(appAuth, 'createAppAuth').returns(async () => token); +} /** * This tests 'setAccessToken' which handles the app authentication. */ describe('setAccessToken', () => { - var scope; // Our server mock - var clock; // Our clock mock for replicable JWT - const expiry = new Date(); // Date of token expiry - - /** - * This fixture injects the default null token via setAccessToken. - */ - async function resetToken() { - const token_default = {'tokenType': null}; - const sandbox = sinon.createSandbox({ - useFakeTimers: { - now: new Date(3000, 1, 1, 0, 0) - }}) - sandbox.stub(appAuth, 'createAppAuth').returns(async () => token_default); - try { await setAccessToken(); } catch (_) {} - sandbox.restore(); - } - - before(async function () { - await resetToken(); - expiry.setTime(expiry.getTime() + 60e3); // 60s in the future - // https://runkit.com/gr2m/reproducable-jwt - clock = sinon.useFakeTimers({ - now: 0, - toFake: ['Date'] - }); - }); - - beforeEach(function() { - // Mock for App.installationAccessToken - scope = nock('https://api.github.com', { - reqheaders: { - accept: 'application/vnd.github.machine-man-preview+json', - } - }); - }); - - it('test setAccessToken', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .matchHeader('authorization', `bearer ${token}`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .matchHeader('authorization', `bearer ${token}`) - .reply(201, { - token: '#t0k3N', - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - - setAccessToken().then(function () { - scope.isDone(); - done(); - }); - }); - - it('test install ID cached', (done) => { - // In this test we check that once the install ID is retrieved the app authentication is - // skipped (only to re-auth as installation). - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .matchHeader('authorization', `bearer ${token}`) - .reply(201, {id: APP_ID}) - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .twice() // Should be called twice in a row - .matchHeader('authorization', `bearer ${token}`) - .reply(201, { - token: '#t0k3N', - expires_at: expiry.toISOString(), // expires in 60s - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - - setAccessToken().then(async function () { - await setAccessToken(); - scope.isDone(); - done(); - }); - }); - - it('test token cached', (done) => { - // In this test we restore the clocks and ignore the JWT token, instead we test that a new - // token is not requested so long as the token hasn't expired - clock.restore(); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .reply(201, { - token: '#t0k3N', - expires_at: expiry.toISOString(), - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - - setAccessToken().then(async function () { - await setAccessToken(); - scope.isDone(); - done(); - }); - }); - - after(async function() { - clock.restore(); - await resetToken(); - }) + var scope; // Our server mock + var clock; // Our clock mock for replicable JWT + const expiry = new Date(); // Date of token expiry + + before(async function () { + expiry.setTime(expiry.getTime() + 60e3); // 60s in the future + // https://runkit.com/gr2m/reproducable-jwt + clock = sinon.useFakeTimers({ + now: 0, + toFake: ['Date'] + }); + }); + + beforeEach(async function () { + await resetToken(); + scope = nock('https://api.github.com', { + reqheaders: { + accept: 'application/vnd.github.machine-man-preview+json' + } + }); + }); + + it('test setAccessToken', (done) => { + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) + .matchHeader('authorization', `bearer ${token}`) + .reply(201, {id: APP_ID}); + scope.post(`/app/installations/${APP_ID}/access_tokens`) + .matchHeader('authorization', `bearer ${token}`) + .reply(201, { + token: '#t0k3N', + permissions: { + checks: 'write', + metadata: 'read', + contents: 'read' + } + }); + + setAccessToken().then(function () { + scope.done(); + done(); + }); + }); + + it('test install ID cached', (done) => { + // In this test we check that once the install ID is retrieved the app authentication is + // skipped (only to re-auth as installation). + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) + .matchHeader('authorization', `bearer ${token}`) + .reply(201, {id: APP_ID}); + scope.post(`/app/installations/${APP_ID}/access_tokens`) + .once() // Should be called once + .matchHeader('authorization', `bearer ${token}`) + .reply(201, { + token: '#t0k3N', + expires_at: expiry.toISOString(), // expires in 60s + permissions: { + checks: 'write', + metadata: 'read', + contents: 'read' + } + }); + + setAccessToken().then(async function () { + await setAccessToken(); + scope.done(); + done(); + }); + }); + + it('test token cached', (done) => { + // In this test we restore the clocks and ignore the JWT token, instead we test that a new + // token is not requested so long as the token hasn't expired + clock.restore(); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) + .reply(201, {id: APP_ID}); + scope.post(`/app/installations/${APP_ID}/access_tokens`) + .reply(201, { + token: '#t0k3N', + expires_at: expiry.toISOString(), + permissions: { + checks: 'write', + metadata: 'read', + contents: 'read' + } + }); + + setAccessToken().then(async function () { + await setAccessToken(); + scope.done(); + done(); + }); + }); + + afterEach(() => { + nock.cleanAll(); + }); + + after(function (done) { + clock.restore(); + resetToken().then(done); + }); }); /** * This tests 'updateStatus' which handles updating the GitHub statues. */ -describe("updateStatus", () => { - var scope; // Our server mock - var spy; // A spy for authentication - var data; // Some job data to update the status with - - beforeEach(function() { - // Mock for App.installationAccessToken - scope = nock('https://api.github.com', { - reqheaders: { - accept: 'application/vnd.github.machine-man-preview+json', - } - }); - const token = {token: '#t0k3N'}; - spy = sinon.stub(appAuth, 'createAppAuth').returns(async () => token); - data = { - sha: SHA, - owner: 'okonkwe', - repo: 'borneo-function', - status: 'success', - description: '' - }; - }); - - it('updateStatus should post to given endpoint', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(`/repos/${data['owner']}/${data['repo']}/statuses/${data['sha']}`).reply(201); - updateStatus(data).then(() => { - expect(spy.calledOnce).true; - scope.isDone(); - done(); - }); - }); - - it('updateStatus should contain the correct data', (done) => { - data.base = 'dcb375f0'; - data.description = 'Lorem ipsum '.repeat(13); // Check max char - data.context = 'ci/test'; - const uri = `/repos/${data['owner']}/${data['repo']}/statuses/${data['sha']}`; - const url = `${process.env.WEBHOOK_PROXY_URL}/${ENDPOINT}/${data.sha}`; // target URL - const requestBodyMatcher = (body) => { - return body.state === data.status && - body.target_url === url && +describe('updateStatus', () => { + var scope; // Our server mock + var spy; // A spy for authentication + var data; // Some job data to update the status with + + beforeEach(async function () { + // Mock for App.installationAccessToken + scope = nock('https://api.github.com', { + reqheaders: { + accept: 'application/vnd.github.machine-man-preview+json' + } + }); + spy = await mockToken(); + data = { + sha: SHA, + owner: 'okonkwe', + repo: 'borneo-function', + status: 'success', + description: '' + }; + }); + + afterEach(() => { + nock.cleanAll(); + }); + + it('updateStatus should post to given endpoint', (done) => { + scope.post(`/repos/${data['owner']}/${data['repo']}/statuses/${data['sha']}`).reply(201); + updateStatus(data).then(() => { + expect(spy.calledOnce).true; + scope.done(); + done(); + }); + }); + + it('updateStatus should contain the correct data', (done) => { + data.base = 'dcb375f0'; + data.description = 'Lorem ipsum '.repeat(13); // Check max char + data.context = 'ci/test'; + const uri = `/repos/${data['owner']}/${data['repo']}/statuses/${data['sha']}`; + const url = `${process.env.WEBHOOK_PROXY_URL}/${ENDPOINT}/${data.sha}`; + const requestBodyMatcher = (body) => { + return body.state === data.status && + body.target_url === url + `/?module=${data['repo']}` && body.description.length <= 140 && body.context === data.context; - }; - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(uri, requestBodyMatcher) - .matchHeader('authorization', 'token #t0k3N') - .reply(201); - - // Run - updateStatus(data, url).then(() => { - expect(spy.calledOnce).true; - scope.isDone(); - done(); - }); - }); - - it('updateStatus should validate SHA', () => { - return updateStatus({sha: null}).catch(err => { - expect(err).to.be.instanceOf(ReferenceError); - expect(err).to.have.property('message', 'undefined or invalid sha'); - expect(spy.called).false; - }); - }); - - it('updateStatus should validate status', () => { - return updateStatus({status: 'working', sha: SHA}).catch(err => { - expect(err).to.be.instanceOf(APIError); - expect(err.message).to.contain('status'); - expect(spy.called).false; - }); - }); - - afterEach(function() { - spy.restore(); - }); + }; + scope.post(uri, requestBodyMatcher) + .matchHeader('authorization', 'token #t0k3N') + .reply(201); + + // Run + updateStatus(data, url).then(() => { + expect(spy.calledOnce).true; + scope.done(); + done(); + }); + }); + + it('updateStatus should validate SHA', () => { + return updateStatus({sha: null}).catch(err => { + expect(err).to.be.instanceOf(ReferenceError); + expect(err).to.have.property('message', 'undefined or invalid sha'); + expect(spy.called).false; + }); + }); + + it('updateStatus should validate status', () => { + return updateStatus({status: 'working', sha: SHA}).catch(err => { + expect(err).to.be.instanceOf(APIError); + expect(err.message).to.contain('status'); + expect(spy.called).false; + }); + }); + + afterEach(function () { + spy.restore(); + }); }); @@ -231,117 +263,267 @@ describe("updateStatus", () => { * callback to check whether the event is configured in the settings and if so, should update the * check status to pending for each context, and add each job to the queue. */ -describe("Github event handler callback", () => { - var scope; // Our server mock - var evt; // A payload event loaded from fixtures - var sandbox; // Sandbox for spying on queue - - /** - * This fixture ensures the `token` variable is not null. - */ - async function setToken() { - scope = nock('https://api.github.com'); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .reply(201, { - token: '#t0k3N', - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - await setAccessToken(); - scope.done(); - } - - before(function (done) { - setToken().then(() => done()); - scope = nock('https://api.github.com', { - reqheaders: { - accept: 'application/vnd.github.machine-man-preview+json', - } - }); - }); - - beforeEach(function () { - queue.process(async (_job, _done) => {}) // nop - sandbox = sinon.createSandbox() - evt = JSON.parse(fs.readFileSync('./test/fixtures/pull_payload.json')); - }); - - it('test callback adds pending jobs', (done) => { - let pr = evt.pull_request; - let uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; - scope.post(uri, body => { return body.state === 'pending'}) - .twice() - .reply(201, {}); - sandbox.spy(queue); - eventCallback({payload: evt, event: 'pull_request'}).then(function() { - expect(queue.pile.length).eq(2); // Two jobs should have been added - let data = queue.pile.pop().data; // Last added - let context = config.events.pull_request.checks; - expect(data.sha).eq(pr.head.sha); // Check head commit set - expect(data.base).eq(pr.base.sha); // Check base commit set - expect(data.force).not.true; // Check force is false (the previous job will save its results) - expect(data.owner).eq(pr.head.repo.owner.login); // Check repo owner set - expect(data.repo).eq(pr.head.repo.name); // Check repo name set - - expect(data.context.startsWith(context.pop())).true; - sandbox.assert.calledTwice(queue.add); - expect(queue.pile.pop().data.force).true; - - scope.isDone(); - done(); - }); - }); - - it('test event type error', (done) => { - sandbox.spy(queue); - eventCallback({payload: evt, event: 'page_build'}).then(() => { - done(new Error('Expected method to reject.')); - }) - .catch((err) => { - sandbox.assert.notCalled(queue.add); - assert.instanceOf(err, TypeError); - done(); - }); - }); - - it('test fork', (done) => { - sandbox.spy(queue); - evt.pull_request.head.repo.owner.login = 'k1o0'; - let eventData = {payload: evt, event: 'pull_request'}; - eventCallback(eventData).then(() => { - done(new Error('Expected method to reject.')); - }) - .catch((err) => { - sandbox.assert.notCalled(queue.add); - assert.instanceOf(err, ReferenceError); - done(); - }); - }); - - it('test push event', (done) => { - let pr = { - ref: config.events.push.ref_ignore, // Should ignore this ref - head_commit: { id: SHA }, - before: evt.pull_request.base.sha, - repository: evt.repository, - installation: evt.installation - }; - sandbox.spy(queue); - eventCallback({payload: pr, event: 'push'}).then(function() { - sandbox.assert.notCalled(queue.add); // Should have been skipped - done(); - }); - }); - - afterEach(function () { - queue.pile = []; - sandbox.restore(); - }); +describe('Github event handler callback', () => { + var scope; // Our server mock + var evt; // A payload event loaded from fixtures + var sandbox; // Sandbox for spying on queue + const _events = JSON.parse(JSON.stringify(config['events'])); // Deep clone events + + before(function () { + scope = nock('https://api.github.com', { + reqheaders: { + accept: 'application/vnd.github.machine-man-preview+json' + } + }); + }); + + beforeEach(async function () { + queue.process(async (_job, _done) => { + }); // nop + sandbox = sinon.createSandbox(); + await mockToken(sandbox); + evt = JSON.parse(fs.readFileSync('./test/fixtures/pull_payload.json')); + }); + + it('test callback adds pending jobs', (done) => { + let nCalls = 0; + let pr = evt.pull_request; + let post_uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; + let testable = body => { + nCalls += 1; + if (nCalls === 2) { + expect(queue.pile.length).eq(2); // Two jobs should have been added + let data = queue.pile.pop().data; // Last added + let context = config.events.pull_request.checks; + expect(data.sha).eq(pr.head.sha); // Check head commit set + expect(data.base).eq(pr.base.sha); // Check base commit set + expect(data.force).not.true; // Check force is false (the previous job will save its results) + expect(data.owner).eq(pr.head.repo.owner.login); // Check repo owner set + expect(data.repo).eq(pr.head.repo.name); // Check repo name set + expect(data.routine).eq(config['routines']['*']); // Check routine + + expect(data.context.startsWith(context.pop())).true; + sandbox.assert.calledTwice(queue.add); + expect(queue.pile.pop().data.force).true; + + scope.isDone(); + done(); + } + return body.state === 'pending'; + }; + scope.post(post_uri, testable) + .twice() + .reply(201, {}); + + // Ignore files check + config.events['pull_request']['files_ignore'] = 'file1.txt'; + + const get_uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/pulls/${pr.number}/files`; + const payload = { + files: [ + {filename: 'README.md'}, + {filename: 'file1.txt'} + ] + }; + scope.get(get_uri) + .reply(200, payload); + + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}); + }); + + it('test event type error', (done) => { + sandbox.spy(queue); + eventCallback({payload: evt, event: 'page_build'}).then(() => { + done(new Error('Expected method to reject.')); + }) + .catch((err) => { + sandbox.assert.notCalled(queue.add); + assert.instanceOf(err, TypeError); + done(); + }); + }); + + it('test fork', (done) => { + sandbox.spy(queue); + evt.pull_request.head.repo.owner.login = 'k1o0'; + let eventData = {payload: evt, event: 'pull_request'}; + eventCallback(eventData).then(() => { + done(new Error('Expected method to reject.')); + }) + .catch((err) => { + sandbox.assert.notCalled(queue.add); + assert.instanceOf(err, ReferenceError); + done(); + }); + }); + + it('test push event', (done) => { + let pr = { + ref: config.events.push.ref_ignore, // Should ignore this ref + head_commit: {id: SHA}, + before: evt.pull_request.base.sha, + repository: evt.repository, + installation: evt.installation + }; + sandbox.spy(queue); + eventCallback({payload: pr, event: 'push'}).then(function () { + sandbox.assert.notCalled(queue.add); // Should have been skipped + done(); + }); + }); + + it('test files ignore', async () => { + // Tests push event + let p = { + ref: 'foo', + head_commit: {id: SHA}, + before: evt.pull_request.base.sha, + repository: evt.repository, + installation: evt.installation + }; + config.events['push']['files_ignore'] = [ + '.*.md', + 'file1.txt' + ]; + + // Tests pull request synchronize + evt.action = 'synchronize'; + evt.before = p.before; + evt.after = SHA; + config.events['pull_request']['files_ignore'] = config.events['push']['files_ignore']; + + // Tests pull request synchronize + const uri = `/repos/${p.repository.owner.login}/${p.repository.name}/compare/${p.before}...${SHA}`; + const payload = { + files: [ + {filename: 'README.md'}, + {filename: 'file1.txt'} + ] + }; + scope.get(uri) + .twice() + .reply(200, payload); + + sandbox.spy(queue); + await eventCallback({payload: p, event: 'push'}); + sandbox.assert.notCalled(queue.add); + + await eventCallback({payload: evt, event: 'pull_request'}); + sandbox.assert.notCalled(queue.add); + scope.isDone(); + }); + + it('expect catches error on get files', (done) => { + var nCalls = 0; + let pr = evt.pull_request; + let post_uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; + config.events['pull_request']['files_ignore'] = 'file1.txt'; + let testable = body => { + nCalls += 1; + if (nCalls === 2) { + expect(queue.pile.length).eq(2); // Two jobs should have been added + sandbox.assert.calledTwice(queue.add); + scope.isDone(); + done(); + } + return body.state === 'pending'; + }; + scope.post(post_uri, testable) + .twice() + .reply(201, {}); + + // Ignore files check + const get_uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/pulls/${pr.number}/files`; + scope.get(get_uri) + .reply(404, {}); + + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}); + }); + + it('expect skips on empty checks list', done => { + config.events.pull_request.checks = null; + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}).then(function () { + sandbox.assert.notCalled(queue.add); + scope.isDone(); + done(); + }); + }); + + it('expect skips on missing event', done => { + delete config.events['pull_request']; + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}).then(function () { + sandbox.assert.notCalled(queue.add); + scope.isDone(); + done(); + }); + }); + + it('expect skips draft PR', done => { + config.events.pull_request.ignore_drafts = true; + evt.pull_request.draft = true; + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}).then(function () { + sandbox.assert.notCalled(queue.add); + scope.isDone(); + done(); + }); + }); + + it('expect skips when action not in config', done => { + config.events.pull_request.actions = ['synchronize']; + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}).then(function () { + sandbox.assert.notCalled(queue.add); + scope.isDone(); + done(); + }); + }); + + it('expect error on wrong install id', done => { + evt.installation.id = 456; + sandbox.spy(queue); + eventCallback({payload: evt, event: 'pull_request'}).catch(function (err) { + sandbox.assert.notCalled(queue.add); + expect(err).instanceOf(APIError); + scope.isDone(); + done(); + }); + }); + + it('expect handles set pending error', done => { + sandbox.spy(queue); + var nCalls = 0; + const pr = evt.pull_request; + const post_uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; + const testable = body => { + nCalls += 1; + if (nCalls === 2) { + sandbox.assert.calledTwice(queue.add); + scope.isDone(); + done(); + } + return body.state === 'pending'; + }; + scope.post(post_uri, testable) + .twice() + .reply(500, {}); + + eventCallback({payload: evt, event: 'pull_request'}); + }); + + afterEach(function () { + queue.pile = []; + config.events = JSON.parse(JSON.stringify(_events)); + sandbox.restore(); + }); + + after(() => { + nock.cleanAll(); + }); }); @@ -350,90 +532,112 @@ describe("Github event handler callback", () => { * lib tests. This tests the endpoint. */ describe('shields callback', () => { - var scope; // Our server mock - var info; // URI parameters - - before(function () { - scope = nock('https://api.github.com'); - queue.process(async (_job, _done) => {}); // nop - queue.pile = []; // ensure queue is empty - info = { - repo: 'Hello-World', - owner: 'Codertocat', - branch: 'develop' - }; - }); - - it('expect coverage response', (done) => { - // Set up response to GitHub API query - // GET /repos/:owner/:repo/git/refs/heads/:branch - scope.get(`/repos/${info.owner}/${info.repo}/git/refs/heads/${info.branch}`) - .reply(200, { - ref: `ref/heads/${info.branch}`, - object: { - sha: SHA - } - }); - - request(srv) - .get(`/coverage/${info.repo}/${info.branch}`) - .expect('Content-Type', 'application/json') - .expect(200) - .end(function (err, res) { - scope.isDone(); - if (err) return done(err); - expect(res.body).deep.keys([ - 'schemaVersion', - 'label', - 'message', - 'color' - ]); - done(); - }); - }); - - it('expect errors', (done) => { - // Set up response to GitHub API query - scope.get(`/repos/${info.owner}/${info.repo}/git/refs/heads/${info.branch}`).reply(404); - - request(srv) - .get(`/coverage/${info.repo}/${info.branch}`) - .expect(404) - .end(function (err) { - scope.isDone(); - if (err) return done(err); - done(); - }); - }); - - it('expect job forced', (done) => { - // Set up response to GitHub API query - // GET /repos/:owner/:repo/git/refs/heads/:branch - scope.get(`/repos/${info.owner}/${info.repo}/git/refs/heads/${info.branch}`) - .reply(200, { - ref: `ref/heads/${info.branch}`, - object: { - sha: SHA - } - }); - - request(srv) - .get(`/coverage/${info.repo}/${info.branch}?force=1`) - .expect('Content-Type', 'application/json') - .expect(200) - .end(function (err, res) { - scope.isDone(); - if (err) return done(err); - expect(res.body).deep.keys([ - 'schemaVersion', - 'label', - 'message', - 'color' - ]); - expect(queue.pile.length).eq(1); - done(); - }); - }); + var scope; // Our server mock + var info; // URI parameters + var _routines = JSON.parse(JSON.stringify(config.routines)); + + before(function () { + scope = nock('https://api.github.com'); + queue.process(async (_job, _done) => { + }); // nop + queue.pile = []; // ensure queue is empty + info = { + repo: 'Hello-World', + owner: 'Codertocat', + branch: 'develop' + }; + }); + + after(function () { + delete queue.process; + nock.cleanAll(); + queue.pile = []; // ensure queue is empty + }); + + afterEach(function () { + config.routines = _routines; + }); + + it('expect coverage response', (done) => { + // Set up response to GitHub API query + // GET /repos/:owner/:repo/branches/:branch + scope.get(`/repos/${info.owner}/${info.repo}/branches/${info.branch}`) + .reply(200, { + ref: `ref/heads/${info.branch}`, + commit: { + sha: SHA + } + }); + + request(srv) + .get(`/coverage/${info.repo}/${info.branch}`) + .expect('Content-Type', 'application/json') + .expect(200) + .end(function (err, res) { + scope.done(); + if (err) return done(err); + expect(res.body).deep.keys([ + 'schemaVersion', + 'label', + 'message', + 'color' + ]); + done(); + }); + }); + + it('expect errors', (done) => { + // Set up response to GitHub API query + scope.get(`/repos/${info.owner}/${info.repo}/branches/${info.branch}`).reply(404); + + request(srv) + .get(`/coverage/${info.repo}/${info.branch}`) + .expect(404) + .end(function (err) { + scope.done(); + if (err) return done(err); + done(); + }); + }); + + // In order for this to work we need to clear the routine defaults from the settings + it('expect context not found', done => { + delete config.routines; + request(srv) + .get(`/unknown/${info.repo}/${info.branch}`) + .expect(404) + .end(function (err) { + scope.isDone(); + done(err); + }); + }); + + it('expect job forced', done => { + // Set up response to GitHub API query + // GET /repos/:owner/:repo/git/refs/heads/:branch + scope.get(`/repos/${info.owner}/${info.repo}/commits/${SHA}`) + .reply(200, { + ref: `ref/heads/${SHA}`, + sha: SHA + }); + + request(srv) + .get(`/coverage/${info.repo}/${SHA}?force=1`) + .expect('Content-Type', 'application/json') + .expect(200) + .end(function (err, res) { + scope.done(); + if (err) return done(err); + expect(res.body).deep.keys([ + 'schemaVersion', + 'label', + 'message', + 'color' + ]); + expect(queue.pile.length).eq(1); + done(); + }); + }); }); @@ -443,62 +647,129 @@ describe('shields callback', () => { * it as HTML. */ describe('logs endpoint', () => { - var stub; // Our fs stub - var logData; // The text in our log - var scope; // Our server mock - - before(function () { - const log_path = path.join(config.dataPath, 'reports', SHA); - logData = ['hello world', 'foobar']; - scope = nock('https://api.github.com'); - stub = sinon - .stub(fs, 'readFile') - .withArgs(path.join(log_path, `std_output-${SHA.substr(0,7)}.log`), 'utf8') - .yieldsAsync(null, logData[0]) - .withArgs(path.join(log_path, 'test_output.log'), 'utf8') - .yieldsAsync(null, logData[1]); - }); - - beforeEach(function () { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${SHA}`) - .reply(200, { sha: SHA }); - }) - - it('expect HTML log', (done) => { - request(srv) - .get(`/${ENDPOINT}/${SHA}`) - .expect(200) - .end(function (err, res) { - if (err) return done(err); - expect(res.text).contains(logData[0]); - expect(res.text).to.match(/^.+<\/html>$/); - done(); - }); - }); - - it('expect type param', (done) => { - request(srv) - .get(`/${ENDPOINT}/${SHA}?type=logger`) - .expect(200) - .end(function (err, res) { - if (err) return done(err); - expect(res.text).contains(logData[1]); - expect(res.text).to.match(/^.+<\/html>$/); + var logData; // The text in our log + var scope; // Our server mock + var HTMLlog; // Our HTML log page + + before(function () { + queue.process(() => { + }); // nop + const log_path = path.join(config.dataPath, 'reports', SHA); + logData = ['hello world', 'foobar']; + scope = nock('https://api.github.com'); + let file = path.join(log_path, `std_output-${SHA.substr(0, 7)}.log`); + fs.mkdirSync(log_path, {recursive: true}); + fs.writeFileSync(file, logData[0]); + fs.writeFileSync(path.join(log_path, 'test_output.log'), logData[1]); + HTMLlog = fs.readFileSync('./public/log.html', 'utf8'); + }); + + beforeEach(function () { + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${SHA}`) + .reply(200, {sha: SHA}); + queue.pile = []; + }); + + it('expect HTML log', done => { + request(srv) + .get(`/log/${SHA}`) + .expect(200) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).eq(HTMLlog); + done(); + }); + }); + + it('expect redirect to log', done => { + request(srv) + .get(`/${ENDPOINT}/${SHA}`) + .expect(301) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('Moved'); + expect(res.header.location).eq('/log/' + SHA); + done(); + }); + }); + + it('expect raw log', done => { + request(srv) + .get(`/${ENDPOINT}/raw/${SHA}`) + .expect(200) + .expect('X-CI-JobStatus', 'finished') + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains(logData[0]); + done(); + }); + }); + + it('expect running status', done => { + queue.add({sha: SHA}); + request(srv) + .get(`/${ENDPOINT}/raw/${SHA}`) + .expect(200) + .expect('X-CI-JobStatus', 'running') + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains(logData[0]); + done(); + }); + }); + + it('expect queued status', done => { + queue.add({sha: SHA}); + queue.pile[0].running = false; + request(srv) + .get(`/${ENDPOINT}/raw/${SHA}`) + .expect(200) + .expect('X-CI-JobStatus', 'queued') + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('waiting'); + done(); + }); + }); + + it('expect type param', (done) => { + request(srv) + .get(`/${ENDPOINT}/raw/${SHA}?type=logger`) + .expect(200) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains(logData[1]); + done(); + }); + }); + + it('expect not found', done => { + let id = '1c33a6e2ac7d7fc098105b21a702e104e09767cf'; + request(srv) + .get(`/${ENDPOINT}/raw/${id}`) + .expect(404) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains(`${id} not found`); + done(); + }); + }); + + afterEach(() => { + nock.cleanAll(); + }); + + after(done => { + sinon.restore(); + queue.pile = []; + delete queue.process; + const logDir = path.join(config.dataPath, 'reports'); + fs.rmdir(logDir, {recursive: true}, err => { + if (err) throw err; done(); - }); - }); - - it('expect not found', (done) => { - sinon.restore(); - request(srv) - .get(`/${ENDPOINT}/${SHA}`) - .expect(404) - .end(function (err, res) { - if (err) return done(err); - expect(res.text).contains(`${SHA} not found`) - done(); - }); - }); + }); + }); + }); @@ -507,40 +778,46 @@ describe('logs endpoint', () => { * return the full commit hash. */ describe('fetchCommit', () => { - var scope; // Our server mock - - before(function () { - scope = nock('https://api.github.com'); - }); - - it('expect full SHA from short id', (done) => { - const id = SHA.slice(0, 7); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) - .reply(200, {sha: SHA}); - // Check full ID returned - fetchCommit(id) - .then(id => { - expect(id).eq(SHA); - done(); - }); - }); - - it('expect full SHA from branch and module', (done) => { - const branch = 'develop'; - const repo = 'foobar'; - scope.get(`/repos/${process.env.REPO_OWNER}/${repo}/branches/${branch}`) - .reply(200, { - commit: { - sha: SHA - } - }); - // Check full ID returned - fetchCommit(branch, true, repo) - .then(id => { - expect(id).eq(SHA); - done(); - }); - }); + var scope; // Our server mock + + before(function () { + scope = nock('https://api.github.com'); + }); + + after(function () { + nock.cleanAll(); + }); + + it('expect full SHA from short id', (done) => { + const id = SHA.slice(0, 7); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(200, {sha: SHA}); + // Check full ID returned + fetchCommit(id) + .then(id => { + expect(id).eq(SHA); + scope.done(); + done(); + }); + }); + + it('expect full SHA from branch and module', (done) => { + const branch = 'develop'; + const repo = 'foobar'; + scope.get(`/repos/${process.env.REPO_OWNER}/${repo}/branches/${branch}`) + .reply(200, { + commit: { + sha: SHA + } + }); + // Check full ID returned + fetchCommit(branch, true, repo) + .then(id => { + expect(id).eq(SHA); + scope.done(); + done(); + }); + }); }); @@ -550,227 +827,212 @@ describe('fetchCommit', () => { * JSON record. */ describe('records endpoint', () => { - var scope; // Our server mock - - before(function () { - scope = nock('https://api.github.com'); - }); - - it('expect JSON log', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${SHA}`) - .reply(200, { sha: SHA }); - // Check JSON record returned - request(srv) - .get(`/${ENDPOINT}/records/${SHA}`) - .expect(200) - .expect('Content-Type', 'application/json') - .end(function (err, res) { - if (err) return done(err); - const record = JSON.parse(res.text); - expect(record.commit).eq(SHA); - done(); - }); - }); - - it('expect works with short id', (done) => { - const id = SHA.slice(0, 7); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) - .reply(200, { sha: SHA } ); - // Check JSON record returned - request(srv) - .get(`/${ENDPOINT}/records/${id}`) - .expect(200) - .expect('Content-Type', 'application/json') - .end(function (err, res) { - if (err) return done(err); - const record = JSON.parse(res.text); - expect(record.commit).eq(SHA); - done(); - }); - }); - - it('expect 404 on missing', (done) => { - const id = SHA.replace('2', '3'); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) - .reply(404); - // Check JSON record returned - request(srv) - .get(`/${ENDPOINT}/records/${id}`) - .expect(404) - .end(function (err, res) { - if (err) return done(err); - expect(res.text).contains('not found'); - done(); - }); - }); - - it('expect works with branch and module', (done) => { - const branch = 'develop'; - const repo = 'foobar'; - scope.get(`/repos/${process.env.REPO_OWNER}/${repo}/branches/${branch}`) - .reply(200, { - commit: { - sha: SHA - } - }); - // Check JSON record returned - request(srv) - .get(`/${ENDPOINT}/records/${branch}?module=${repo}`) - .expect(200) - .expect('Content-Type', 'application/json') - .end(function (err, res) { - if (err) return done(err); - const record = JSON.parse(res.text); - expect(record.commit).eq(SHA); - done(); - }); - }); + var scope; // Our server mock + + before(function () { + scope = nock('https://api.github.com'); + queue.process(async (_job, _done) => { + }); // nop + }); + + after(function () { + queue.pile = []; + delete queue.process; + }); + + after(function () { + nock.cleanAll(); + }); + + + it('expect JSON log', (done) => { + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${SHA}`) + .reply(200, {sha: SHA}); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${SHA}`) + .expect(200) + .expect('Content-Type', 'application/json') + .end(function (err, res) { + if (err) return done(err); + const record = JSON.parse(res.text); + expect(record.commit).eq(SHA); + scope.done(); + done(); + }); + }); + + it('expect works with short id', (done) => { + const id = SHA.slice(0, 7); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(200, {sha: SHA}); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${id}`) + .expect(200) + .expect('Content-Type', 'application/json') + .end(function (err, res) { + if (err) return done(err); + const record = JSON.parse(res.text); + expect(record.commit).eq(SHA); + scope.done(); + done(); + }); + }); + + it('expect 404 on missing', (done) => { + const id = SHA.replace('2', '3'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(404); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${id}`) + .expect(404) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('not found'); + scope.done(); + done(); + }); + }); + + it('expect 500 on error', (done) => { + const id = SHA.replace('2', '3'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(500); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${id}`) + .expect(500) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('Failed'); + done(); + }); + }); + + it('expect queued job data', (done) => { + const id = SHA.replace('2', '3'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(200, {sha: id}); + queue.add({sha: id, status: 'pending'}); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${id}`) + .expect(200) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('pending'); + done(); + }); + }); + + it('expect works with branch and module', (done) => { + const branch = 'develop'; + const repo = 'foobar'; + scope.get(`/repos/${process.env.REPO_OWNER}/${repo}/branches/${branch}`) + .reply(200, { + commit: { + sha: SHA + } + }); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${branch}?module=${repo}`) + .expect(200) + .expect('Content-Type', 'application/json') + .end(function (err, res) { + if (err) return done(err); + const record = JSON.parse(res.text); + expect(record.commit).eq(SHA); + scope.done(); + done(); + }); + }); }); /** - * This tests the coverage endpoint. Directly accessing endpoint should return 403. + * This tests the jobs endpoint. This endpoint should return the jobs pile. */ -describe('coverage endpoint', () => { - - before(function(done) { - let reportsDir = path.join(config.dataPath, 'reports', SHA); - fs.mkdir(reportsDir, { recursive: true }, async (err) => { - if (err) throw err; - await fs.writeFile(path.join(reportsDir, 'foobar.log'), '', (err) => { if (err) throw err; }) - await fs.writeFile(path.join(reportsDir, 'index.html'), '', (err) => { if (err) throw err; }) - done() - }); - }) - - it('expect root not found', (done) => { - request(srv) - .get(`/${ENDPOINT}/coverage/`) // trailing slash essential - .expect(404) - .end(err => { - err? done(err) : done(); - }); - }); - - it('expect dir to be served', (done) => { - request(srv) - .get(`/${ENDPOINT}/coverage/${SHA}/`) // trailing slash essential - .expect(200) - .end(function (err, res) { - err? done(err) : done(); - }); - }); - - after(function() { - fs.rmdir(path.join(config.dataPath, 'reports'), {recursive: true}, err => { - if (err) throw err; - }) - - }) +describe('jobs endpoint', () => { + + before(function () { + queue.process(async (_job, _done) => { + }); // nop + }); + + after(function () { + queue.pile = []; + delete queue.process; + }); + + it('expect queue JSON', (done) => { + queue.add({sha: SHA, status: 'pending', context: 'continuous-integration'}); + queue.add({sha: SHA, status: 'pending', context: 'coverage'}); + // Check JSON record returned + request(srv) + .get('/jobs') + .expect(200) + .expect('Content-Type', 'application/json') + .end(function (err, res) { + if (err) return done(err); + const payload = JSON.parse(res.text); + expect(payload.total).eq(2); + expect(payload.pile[0].running).true; + expect(payload.pile[1].running).false; + expect(payload.pile[0]._child).undefined; + done(); + }); + }); }); /** - * This tests the runtests and prepareEnv functions. - * @todo Check for log close on exit + * This tests the coverage endpoint. Directly accessing endpoint should return 403. */ -describe('running tests', () => { - var sandbox; // Sandbox for spying on queue - var stub; // Main fileExec stub - - beforeEach(function () { - queue.process(async (_job, _done) => {}) // nop - sandbox = sinon.createSandbox() - stub = sandbox.stub(cp, 'execFile'); - sandbox.stub(fs, 'createWriteStream'); - sandbox.stub(fs, 'mkdir').callsArg(2); - execEvent = new events.EventEmitter(); - execEvent.stdout = new events.EventEmitter(); - execEvent.stdout.pipe = sandbox.spy(); - stub.returns(execEvent); - }); - - it('test prepareEnv', async () => { - const callback = sandbox.spy(); - stub.callsArgAsync(3, null, 'preparing', ''); - const job = {data: {sha: SHA}}; - await prepareEnv(job, callback); - let log = path.join(config.dataPath, 'reports', SHA, 'std_output-cabe27e.log'); - let fn = path.resolve(path.join(__dirname, '..', 'prep_env.BAT')); - stub.calledWith(fn, [SHA, config.repo, config.dataPath]); - expect(callback.calledOnce).true; - expect(callback.calledOnceWithExactly(job)).true; - sandbox.assert.calledWith(fs.createWriteStream, log); - }); - - it('test prepareEnv with error', async (done) => { - stub.callsArgWith(3, {code: 'ENOENT'}, 'preparing', ''); - const job = { - data: {sha: SHA}, - done: (err) => { - expect(err).instanceOf(Error); - expect(err.message).to.have.string('not found'); +describe('coverage endpoint', () => { + + before(function (done) { + let reportsDir = path.join(config.dataPath, 'reports', SHA); + fs.mkdir(reportsDir, {recursive: true}, async (err) => { + if (err) throw err; + await fs.writeFile(path.join(reportsDir, 'foobar.log'), '', (err) => { + if (err) throw err; + }); + await fs.writeFile(path.join(reportsDir, 'index.html'), '', (err) => { + if (err) throw err; + }); done(); - } - }; - prepareEnv(job); - }); - - it('test runtests', async () => { - const callback = sandbox.spy(); - stub.callsArgWith(3, null, 'running tests', ''); - const job = { - data: {sha: SHA}, - done: callback - }; - await runTests(job); - const log = path.join(config.dataPath, 'reports', SHA, 'std_output-cabe27e.log'); - sandbox.assert.calledWith(fs.createWriteStream, log, { flags: 'a' }); - const fn = path.resolve(path.join(__dirname, '..', 'run_tests.BAT')); - stub.calledWith(fn, [SHA, config.repo, config.dataPath]); - expect(callback.calledOnce).true; - expect(callback.calledOnceWithExactly()).true; - }); - - it('runtests parses MATLAB error', (done) => { - var err; - const errmsg = 'Error in MATLAB_function line 23'; - stub.callsArgWith(3, {code: 1}, 'running tests', errmsg); - sandbox.stub(fs.promises, 'writeFile').callsFake(() => { - sandbox.assert.calledWith(fs.promises.writeFile, config.dbFile); - expect(err).instanceOf(Error); - expect(err.message).to.have.string(errmsg); - done(); - }) - const job = { - data: {sha: SHA}, - done: (e) => { err = e; } - }; - runTests(job); - }); - - it('runtests parses Python error', (done) => { - var err; - stub.callsArgWith(3, {code: 1}, 'running tests', stdErr); - sandbox.stub(fs.promises, 'writeFile').callsFake(() => { - sandbox.assert.calledWith(fs.promises.writeFile, config.dbFile); - expect(err).instanceOf(Error); - let errmsg = 'FileNotFoundError: Invalid data root folder E:\\FlatIron\\integration'; - expect(err.message.startsWith(errmsg)).true; - done(); - }) - const job = { - data: {sha: SHA}, - done: (e) => { err = e; } - }; - runTests(job); - }); - - afterEach(function () { - queue.pile = []; - sandbox.verifyAndRestore(); - }); + }); + }); + + it('expect root not found', (done) => { + request(srv) + .get(`/${ENDPOINT}/coverage/`) // trailing slash essential + .expect(404) + .end(err => { + err ? done(err) : done(); + }); + }); + + it('expect dir to be served', (done) => { + request(srv) + .get(`/${ENDPOINT}/coverage/${SHA}/`) // trailing slash essential + .expect(200) + .end(function (err) { + err ? done(err) : done(); + }); + }); + + after(done => { + fs.rmdir(path.join(config.dataPath, 'reports'), {recursive: true}, err => { + if (err) throw err; + done(); + }); + + }); }); @@ -779,69 +1041,71 @@ describe('running tests', () => { * @todo Check for log close on exit */ describe('srv github/', () => { - var scope; // Our server mock - var clock; // Our clock mock for replicable JWT - - before(function() { - // https://runkit.com/gr2m/reproducable-jwt - clock = sinon.useFakeTimers({ - now: 0, - toFake: ['Date'] - }); - }); - - beforeEach(function() { - // Mock for App.installationAccessToken - scope = nock('https://api.github.com', { - reqheaders: { - accept: 'application/vnd.github.machine-man-preview+json', - } - }); - }); - - it('expect skipped', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`).reply(200); - scope.post(`/app/installations/${APP_ID}/access_tokens`).reply(200); - - request(srv) - .post(`/github`) // trailing slash essential - .set({'X-GitHub-Event': 'issues'}) - .end(function (err, res) { - expect(scope.isDone()).not.true; - err ? done(err) : done(); - }); - }); - - it('expect error caught', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .reply(201, { - token: '#t0k3N', - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - - request(srv) - .post(`/github`) // trailing slash essential - .set({ - 'X-GitHub-Event': 'check_suite', - 'x-github-hook-installation-target-id': process.env.GITHUB_APP_IDENTIFIER, - 'X-Hub-Signature': {'sha': null}, - 'X-GitHub-Delivery': '72d3162e-cc78-11e3-81ab-4c9367dc0958' - }) - .end(function (err) { - expect(err).is.null; // Should have caught error - done() - }); - }); - - after(function () { - clock.restore(); - }); + var scope; // Our server mock + var spy; // Token AppAuth spy + + beforeEach(async function () { + // Mock for App.installationAccessToken + spy = await mockToken(); + scope = nock('https://api.github.com', { + reqheaders: { + accept: 'application/vnd.github.machine-man-preview+json' + } + }); + }); + + it('expect skipped', done => { + request(srv) + .post(`/github`) // trailing slash essential + .set({'X-GitHub-Event': 'issues'}) + .end(function (err) { + expect(spy.called).false; + err ? done(err) : done(); + }); + }); + + it('expect error caught', done => { + request(srv) + .post(`/github`) // trailing slash essential + .set({ + 'X-GitHub-Event': 'check_suite', + 'x-github-hook-installation-target-id': process.env.GITHUB_APP_IDENTIFIER, + 'X-Hub-Signature': {'sha': null}, + 'X-GitHub-Delivery': '72d3162e-cc78-11e3-81ab-4c9367dc0958' + }) + .end(function (err) { + expect(err).is.null; // Should have caught error + done(); + }); + }); + + /** + * This is already covered by the setAccessToken test... + */ + it('expect token set', done => { + // Although the blob signature won't match, we can at least test that setAccessToken was called + request(srv) + .post(`/github`) // trailing slash essential + .set({ + 'X-GitHub-Event': 'push', + 'x-github-hook-installation-target-id': process.env.GITHUB_APP_IDENTIFIER, + 'X-Hub-Signature': {'sha': SHA}, + 'X-GitHub-Delivery': '72d3162e-cc78-11e3-81ab-4c9367dc0958' + }) + .end(function (err) { + expect(scope.pendingMocks().length).lt(2); // setAccessToken was called + err ? done(err) : done(); + }); + }); + + afterEach(function () { + nock.cleanAll(); + }); + + afterEach(function () { + spy.restore(); + nock.cleanAll(); + }); }); @@ -852,41 +1116,42 @@ describe('srv github/', () => { * regardless of job data. */ describe('queue finish callback', () => { - var scope; // Our server mock - var spy; // A spy for authentication - - before(function() { - // Mock for App.installationAccessToken - scope = nock('https://api.github.com'); - const token = {token: '#t0k3N'}; - spy = sinon.stub(appAuth, 'createAppAuth').returns(async () => token); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - }); - - it('test error handling', (done) => { - queue.process(async (job) => { job.done(new Error('foobar')); }) // Raise error - queue.on('error', _ => {}); // Error to be handles in finish callback - const data = { - sha: SHA, - skipPost: false, - context: 'coverage', - status: 'success', - }; - const uri = `/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/statuses/${data['sha']}`; - const requestBodyMatcher = (body) => { - expect(body.state).eq('error'); - expect(body.description).eq('foobar'); - expect(body.context).eq(data['context']); - expect(body.target_url).empty; // URL empty on errors - done(); - return queue.pile.length === 0 - }; - scope.post(uri, requestBodyMatcher).reply(201); - queue.add(data) // Create new job to process - }); - - after(function() { - delete queue.process; - }); + var scope; // Our server mock + var spy; // A spy for authentication + + before(async function () { + scope = nock('https://api.github.com'); + spy = await mockToken(); + }); + + it('test error handling', (done) => { + queue.process(async (job) => { + job.done(new Error('foobar')); + }); // Raise error + queue.on('error', _ => { + }); // Error to be handles in finish callback + const data = { + sha: SHA, + skipPost: false, + context: 'coverage', + status: 'success' + }; + const uri = `/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/statuses/${data['sha']}`; + const requestBodyMatcher = (body) => { + expect(body.state).eq('error'); + expect(body.description).eq('foobar'); + expect(body.context).eq(data['context']); + expect(body.target_url).empty; // URL empty on errors + done(); + return queue.pile.length === 0; + }; + scope.post(uri, requestBodyMatcher).reply(201); + queue.add(data); // Create new job to process + }); + + after(function () { + delete queue.process; + nock.cleanAll(); + spy.restore(); + }); });