mediawiki/services/parsoid/testreduce (master)

sourcepatches
$ date
Thu Mar  4 13:15:02 UTC 2021

$ git clone file:///srv/git/mediawiki-services-parsoid-testreduce.git repo --depth=1 -b master
Cloning into 'repo'...

$ git config user.name libraryupgrader

$ git config user.email tools.libraryupgrader@tools.wmflabs.org

$ git submodule update --init

$ grr init
Installed commit-msg hook.

$ git show-ref refs/heads/master
31b96edaba68ec6b42e0ec7b5c3109277b2629fb refs/heads/master

Upgrading n:eslint-config-wikimedia from 0.15.3 -> 0.18.1
$ npm install

> dtrace-provider@0.8.8 install /src/repo/node_modules/dtrace-provider
> node-gyp rebuild || node suppress-error.js

make: Entering directory '/src/repo/node_modules/dtrace-provider/build'
  TOUCH Release/obj.target/DTraceProviderStub.stamp
make: Leaving directory '/src/repo/node_modules/dtrace-provider/build'

> heapdump@0.3.15 install /src/repo/node_modules/heapdump
> node-gyp rebuild

make: Entering directory '/src/repo/node_modules/heapdump/build'
  CXX(target) Release/obj.target/addon/src/heapdump.o
In file included from ../src/heapdump.cc:15:
/cache/node-gyp/10.21.0/include/node/node.h:573:43: warning: cast between incompatible function types from ‘void (*)(Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE)’ {aka ‘void (*)(v8::Local<v8::Object>)’} to ‘node::addon_register_func’ {aka ‘void (*)(v8::Local<v8::Object>, v8::Local<v8::Value>, void*)’} [-Wcast-function-type]
       (node::addon_register_func) (regfunc),                          \
                                           ^
/cache/node-gyp/10.21.0/include/node/node.h:607:3: note: in expansion of macro ‘NODE_MODULE_X’
   NODE_MODULE_X(modname, regfunc, NULL, 0)  // NOLINT (readability/null_usage)
   ^~~~~~~~~~~~~
../src/heapdump.cc:136:1: note: in expansion of macro ‘NODE_MODULE’
 NODE_MODULE(addon, Initialize)
 ^~~~~~~~~~~
In file included from /cache/node-gyp/10.21.0/include/node/node.h:63,
                 from ../src/heapdump.cc:15:
/cache/node-gyp/10.21.0/include/node/v8.h: In instantiation of ‘void v8::PersistentBase<T>::SetWeak(P*, typename v8::WeakCallbackInfo<P>::Callback, v8::WeakCallbackType) [with P = node::ObjectWrap; T = v8::Object; typename v8::WeakCallbackInfo<P>::Callback = void (*)(const v8::WeakCallbackInfo<node::ObjectWrap>&)]’:
/cache/node-gyp/10.21.0/include/node/node_object_wrap.h:84:78:   required from here
/cache/node-gyp/10.21.0/include/node/v8.h:9502:16: warning: cast between incompatible function types from ‘v8::WeakCallbackInfo<node::ObjectWrap>::Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<node::ObjectWrap>&)’} to ‘Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<void>&)’} [-Wcast-function-type]
                reinterpret_cast<Callback>(callback), type);
                ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/cache/node-gyp/10.21.0/include/node/v8.h: In instantiation of ‘void v8::PersistentBase<T>::SetWeak(P*, typename v8::WeakCallbackInfo<P>::Callback, v8::WeakCallbackType) [with P = Nan::ObjectWrap; T = v8::Object; typename v8::WeakCallbackInfo<P>::Callback = void (*)(const v8::WeakCallbackInfo<Nan::ObjectWrap>&)]’:
../../nan/nan_object_wrap.h:65:61:   required from here
/cache/node-gyp/10.21.0/include/node/v8.h:9502:16: warning: cast between incompatible function types from ‘v8::WeakCallbackInfo<Nan::ObjectWrap>::Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<Nan::ObjectWrap>&)’} to ‘Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<void>&)’} [-Wcast-function-type]
  SOLINK_MODULE(target) Release/obj.target/addon.node
  COPY Release/addon.node
make: Leaving directory '/src/repo/node_modules/heapdump/build'

> unix-dgram@2.0.4 install /src/repo/node_modules/unix-dgram
> node-gyp rebuild

make: Entering directory '/src/repo/node_modules/unix-dgram/build'
  CXX(target) Release/obj.target/unix_dgram/src/unix_dgram.o
In file included from ../../nan/nan.h:56,
                 from ../src/unix_dgram.cc:5:
/cache/node-gyp/10.21.0/include/node/node.h:573:43: warning: cast between incompatible function types from ‘void (*)(v8::Local<v8::Object>)’ to ‘node::addon_register_func’ {aka ‘void (*)(v8::Local<v8::Object>, v8::Local<v8::Value>, void*)’} [-Wcast-function-type]
       (node::addon_register_func) (regfunc),                          \
                                           ^
/cache/node-gyp/10.21.0/include/node/node.h:607:3: note: in expansion of macro ‘NODE_MODULE_X’
   NODE_MODULE_X(modname, regfunc, NULL, 0)  // NOLINT (readability/null_usage)
   ^~~~~~~~~~~~~
../src/unix_dgram.cc:404:1: note: in expansion of macro ‘NODE_MODULE’
 NODE_MODULE(unix_dgram, Initialize)
 ^~~~~~~~~~~
In file included from /cache/node-gyp/10.21.0/include/node/node.h:63,
                 from ../../nan/nan.h:56,
                 from ../src/unix_dgram.cc:5:
/cache/node-gyp/10.21.0/include/node/v8.h: In instantiation of ‘void v8::PersistentBase<T>::SetWeak(P*, typename v8::WeakCallbackInfo<P>::Callback, v8::WeakCallbackType) [with P = node::ObjectWrap; T = v8::Object; typename v8::WeakCallbackInfo<P>::Callback = void (*)(const v8::WeakCallbackInfo<node::ObjectWrap>&)]’:
/cache/node-gyp/10.21.0/include/node/node_object_wrap.h:84:78:   required from here
/cache/node-gyp/10.21.0/include/node/v8.h:9502:16: warning: cast between incompatible function types from ‘v8::WeakCallbackInfo<node::ObjectWrap>::Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<node::ObjectWrap>&)’} to ‘Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<void>&)’} [-Wcast-function-type]
                reinterpret_cast<Callback>(callback), type);
                ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/cache/node-gyp/10.21.0/include/node/v8.h: In instantiation of ‘void v8::PersistentBase<T>::SetWeak(P*, typename v8::WeakCallbackInfo<P>::Callback, v8::WeakCallbackType) [with P = Nan::ObjectWrap; T = v8::Object; typename v8::WeakCallbackInfo<P>::Callback = void (*)(const v8::WeakCallbackInfo<Nan::ObjectWrap>&)]’:
../../nan/nan_object_wrap.h:65:61:   required from here
/cache/node-gyp/10.21.0/include/node/v8.h:9502:16: warning: cast between incompatible function types from ‘v8::WeakCallbackInfo<Nan::ObjectWrap>::Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<Nan::ObjectWrap>&)’} to ‘Callback’ {aka ‘void (*)(const v8::WeakCallbackInfo<void>&)’} [-Wcast-function-type]
  SOLINK_MODULE(target) Release/obj.target/unix_dgram.node
  COPY Release/unix_dgram.node
make: Leaving directory '/src/repo/node_modules/unix-dgram/build'

> gc-stats@git+https://github.com/dainis/node-gcstats.git#5be60dfd24293d6cefbc8a459c1537611373fac5 install /src/repo/node_modules/gc-stats
> node-pre-gyp install --fallback-to-build

node-pre-gyp WARN Using request for node-pre-gyp https download 
[gc-stats] Success: "/src/repo/node_modules/gc-stats/build/gcstats/v1.5.0/Release/node-v64-linux-x64/gcstats.node" is installed via remote

> core-js@2.6.12 postinstall /src/repo/node_modules/core-js
> node -e "try{require('./postinstall')}catch(e){}"

Thank you for using core-js ( https://github.com/zloirock/core-js ) for polyfilling JavaScript standard library!

The project needs your help! Please consider supporting of core-js on Open Collective or Patreon: 
> https://opencollective.com/core-js 
> https://www.patreon.com/zloirock 

Also, the author of core-js ( https://github.com/zloirock ) is looking for a good job -)


> core-js@3.9.1 postinstall /src/repo/node_modules/eslint-plugin-compat/node_modules/core-js
> node -e "try{require('./postinstall')}catch(e){}"

added 427 packages from 313 contributors and audited 427 packages in 96.018s

27 packages are looking for funding
  run `npm fund` for details

found 0 vulnerabilities


$ npm update eslint -depth 10

$ ./node_modules/.bin/eslint . --fix

/src/repo/client/client-cluster.js
  44:3  error  Don't use process.exit(); throw an error instead  no-process-exit

/src/repo/client/client.js
   57:6   error    Don't use process.exit(); throw an error instead  no-process-exit
  121:51  error    Don't use process.exit(); throw an error instead  no-process-exit
  144:1   warning  Missing JSDoc @return declaration                 jsdoc/require-returns
  225:3   error    Don't use process.exit(); throw an error instead  no-process-exit
  253:6   error    Don't use process.exit(); throw an error instead  no-process-exit
  259:5   error    Don't use process.exit(); throw an error instead  no-process-exit

/src/repo/server/importJson.js
  22:21  error  "./server.settings.js" is not found               node/no-missing-require
  64:2   error  Don't use process.exit(); throw an error instead  no-process-exit

/src/repo/server/scripts/testdb.info.js
  3:1  error  Use the global form of 'use strict'  strict

/src/repo/server/server.js
   92:2   error  Don't use process.exit(); throw an error instead  no-process-exit
  168:3   error  Don't use process.exit(); throw an error instead  no-process-exit
  728:32  error  'res' is already declared in the upper scope      no-shadow
  736:41  error  'err' is already declared in the upper scope      no-shadow
  736:46  error  'row' is already declared in the upper scope      no-shadow
  761:42  error  'err' is already declared in the upper scope      no-shadow
  761:47  error  'row' is already declared in the upper scope      no-shadow

/src/repo/server/static/js/app.js
  2:1  error  Use the global form of 'use strict'  strict
  3:2  error  Use the global form of 'use strict'  strict

/src/repo/server/static/js/commitList.js
  2:1  error  Use the global form of 'use strict'  strict
  3:2  error  Use the global form of 'use strict'  strict

/src/repo/utils/promise.js
  1:1  error  Use the global form of 'use strict'  strict

✖ 22 problems (21 errors, 1 warning)


$ ./node_modules/.bin/eslint . -f json
[{"filePath":"/src/repo/client/client-cluster.js","messages":[{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":44,"column":3,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":44,"endColumn":18}],"errorCount":1,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"#!/usr/bin/env node\n'use strict';\n\nvar cluster = require('cluster');\nvar path = require('path');\n\nvar opts = require('yargs')\n\t.default({\n\t\t// By default, start one client + api server per core.\n\t\tc: require('os').cpus().length,\n\t})\n\t.alias('c', 'children').argv;\n\nif (!module.parent) {\n\tvar numClients = opts.c;\n\n\tcluster.setupMaster({\n\t\texec: path.join(__dirname, 'client.js'),\n\t\targs: opts._,\n\t});\n\n\tconsole.log(\"client-cluster initializing\", numClients, \" testreduce clients\");\n\tfor (var i = 0; i < numClients; i++) {\n\t\tcluster.fork();\n\t}\n\n\tcluster.on('exit', function(worker, code, signal) {\n\t\tif (!worker.suicide) {\n\t\t\tvar exitCode = worker.process.exitCode;\n\t\t\tconsole.log('client', worker.process.pid,\n\t\t\t\t'died (' + exitCode + '), restarting.');\n\t\t\tcluster.fork();\n\t\t}\n\t});\n\n\tvar shutdownCluster = function() {\n\t\tconsole.log('client cluster shutting down, killing all testreduce clients');\n\t\tvar workers = cluster.workers;\n\t\tObject.keys(workers).forEach(function(id) {\n\t\t\tconsole.log('Killing client', id);\n\t\t\tworkers[id].kill('SIGKILL');\n\t\t});\n\t\tconsole.log('Done killing testreduce clients, exiting client-cluster.');\n\t\tprocess.exit(0);\n\t};\n\n\tprocess.on('SIGINT', shutdownCluster);\n\tprocess.on('SIGTERM', shutdownCluster);\n}\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/client/client.js","messages":[{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":57,"column":6,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":57,"endColumn":21},{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":121,"column":51,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":121,"endColumn":66},{"ruleId":"jsdoc/require-returns","severity":1,"message":"Missing JSDoc @return declaration.","line":144,"column":1,"nodeType":"Block","endLine":148,"endColumn":4},{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":225,"column":3,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":225,"endColumn":18},{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":253,"column":6,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":253,"endColumn":21},{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":259,"column":5,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":259,"endColumn":20}],"errorCount":5,"warningCount":1,"fixableErrorCount":0,"fixableWarningCount":0,"source":"#!/usr/bin/env node\n'use strict';\n\n/**\n * A client for testing round-tripping of articles.\n */\nvar request = require('request');\nvar cluster = require('cluster');\nvar exec = require('child_process').exec;\nvar Utils = require('../utils/Utils.js').Utils;\nvar Promise = require('../utils/promise.js');\n\nvar commit;\nvar ctime;\nvar lastCommit;\nvar lastCommitTime;\nvar lastCommitCheck;\n\nvar config = require(process.argv[2] || './config.js');\n\nvar pidPrefix = '[' + process.pid + ']: ';\n\nvar logger = function(msg) { console.log(pidPrefix + msg); };\n\nvar getTitle = function(cb) {\n\tvar requestOptions = {\n\t\turi: 'http://' + config.server.host + ':' +\n\t\t\tconfig.server.port + '/title?commit=' + commit + '&ctime=' + encodeURIComponent(ctime),\n\t\tmethod: 'GET',\n\t};\n\n\tvar callback = function(error, response, body) {\n\t\tif (error || !response) {\n\t\t\tsetTimeout(function() { cb('start'); }, 15000);\n\t\t\treturn;\n\t\t}\n\n\t\tvar resp;\n\t\tswitch (response.statusCode) {\n\t\t\tcase 200:\n\t\t\t\tresp = JSON.parse(body);\n\t\t\t\tcb('runTest', resp);\n\t\t\t\tbreak;\n\t\t\tcase 404:\n\t\t\t\tlogger('The server does not have any work for us right now, waiting half a minute....');\n\t\t\t\tsetTimeout(function() { cb('start'); }, 30000);\n\t\t\t\tbreak;\n\t\t\tcase 426:\n\t\t\t\tlogger('Update required, exiting.');\n\t\t\t\t// Signal our voluntary suicide to the parent if running as a\n\t\t\t\t// cluster worker, so that it does not restart this client.\n\t\t\t\t// Without this, the code is never actually updated as a newly\n\t\t\t\t// forked client will still run the old code.\n\t\t\t\tif (cluster.worker) {\n\t\t\t\t\tcluster.worker.kill();\n\t\t\t\t} else {\n\t\t\t\t\tprocess.exit(0);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tlogger('There was some error (' + response.statusCode + '), but that is fine. Waiting 15 seconds to resume....');\n\t\t\t\tsetTimeout(function() { cb('start'); }, 15000);\n\t\t}\n\t};\n\n\tUtils.retryingHTTPRequest(10, requestOptions, callback);\n};\n\nvar runTest = function(cb, test, retryCount) {\n\tif (!config.opts.testTimeout) {\n\t\t// Default: 5 minutes.\n\t\tconfig.opts.testTimeout = 5 * 60 * 1000;\n\t}\n\t// Add a random (max 500ms) shift in case multiple testreduce\n\t// clients fails and they don't all retry in lockstep fashion.\n\tvar timeoutVal = Math.round(Math.random() * 500) + config.opts.testTimeout;\n\n\tconfig.runTest(config.opts, test).then(function(results) {\n\t\tcb('postResult', null, results, test, null);\n\t})\n\t// Abort test if no result is returned within a fixed timeframe\n\t.timeout(timeoutVal)\n\t.catch(function(err) {\n\t\t// Log it to console\n\t\tconsole.error(pidPrefix + 'Error in %s:%s: %s\\n%s', test.prefix, test.title, err, err.stack || '');\n\n\t\t// Can be one of many errors ...\n\t\t// 1. Timeout because of a stuck test\n\t\t//    (ex: phantomjs in visualdiffs)\n\t\t// 2. Other transient retry-able error\n\t\t//    (ex: failed uprightdiff, failed postprocessing in visualdiffs)\n\t\tvar maxRetries = config.opts.maxRetries || 1;\n\t\tif (retryCount === undefined) {\n\t\t\tretryCount = 0;\n\t\t}\n\t\tif (retryCount < maxRetries) {\n\t\t\tconsole.error(pidPrefix + 'Retry # ' + retryCount);\n\t\t\tvar origCb = cb;\n\t\t\t// Replace cb to prevent a delayed response from\n\t\t\t// overwriting results from a later retry.\n\t\t\t// FIXME: Redo this side-effecty crap.\n\t\t\tcb = function() {\n\t\t\t\tlogger('Rejecting delayed result for: ' + test.prefix + ':' + test.title);\n\t\t\t};\n\t\t\trunTest(origCb, test, retryCount + 1);\n\t\t\treturn;\n\t\t}\n\n\t\tconsole.error(pidPrefix + 'No more retries!');\n\n\t\t/*\n\t\t * If you're looking at the line below and thinking \"Why in the\n\t\t * hell would they have done that, it causes unnecessary problems\n\t\t * with the clients crashing\", you're absolutely right. This is\n\t\t * here because we use a supervisor instance to run our test\n\t\t * clients, and we rely on it to restart dead'ns.\n\t\t *\n\t\t * In sum, easier to die than to worry about having to reset any\n\t\t * broken application state.\n\t\t */\n\t\tcb('postResult', err, null, test,  function() { process.exit(1); });\n\t});\n};\n\nvar defaultGitCommitFetch = function(repoPath) {\n\treturn new Promise(function(resolve, reject) {\n\t\texec('git log --max-count=1 --pretty=format:\"%H %ci\"', { cwd: repoPath }, function(err, data) {\n\t\t\tif (err) {\n\t\t\t\treject(err);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tvar cobj = data.match(/^([^ ]+) (.*)$/);\n\t\t\tif (!cobj) {\n\t\t\t\treject(\"Error, couldn't find the current commit\");\n\t\t\t} else {\n\t\t\t\t// convert the timestamp to UTC\n\t\t\t\tresolve([cobj[1], new Date(cobj[2]).toISOString()]);\n\t\t\t}\n\t\t});\n\t});\n};\n\n/**\n * Get the current git commit hash.\n * Returns a fulfillment promise.\n * Checks for updated code every 5 minutes.\n */\nvar getGitCommit = function() {\n\tvar p;\n\tvar now = Date.now();\n\tif (!lastCommitCheck || (now - lastCommitCheck) > (5 * 60 * 1000)) {\n\t\tlastCommitCheck = now;\n\t\tif (config.gitCommitFetch) {\n\t\t\tp = config.gitCommitFetch(config.opts);\n\t\t\t// If we got a fixed string, construct\n\t\t\t// an immediately resolved promise.\n\t\t\tif (typeof p === 'string') {\n\t\t\t\tp = Promise.resolve([p, new Date().toISOString()]);\n\t\t\t}\n\t\t} else {\n\t\t\tp = defaultGitCommitFetch(config.gitRepoPath);\n\t\t}\n\t} else {\n\t\tp = Promise.resolve([lastCommit, lastCommitTime]);\n\t}\n\treturn p;\n};\n\nvar postResult = function(err, result, test, finalCB, cb) {\n\tgetGitCommit().then(function(res) {\n\t\tif (!res[0]) {\n\t\t\tthrow new Error('Could not find the current commit.');\n\t\t}\n\n\t\tif (err) {\n\t\t\tif (config.postJSON) {\n\t\t\t\tresult = {\n\t\t\t\t\terr: { name: err.name, msg: err.toString(), },\n\t\t\t\t};\n\t\t\t} else {\n\t\t\t\tresult =\n\t\t\t\t\t'<error type=\"' + err.name + '\">' +\n\t\t\t\t\terr.toString() +\n\t\t\t\t\t'</error>';\n\t\t\t}\n\t\t}\n\n\t\tvar postOpts = {\n\t\t\turi: 'http://' + config.server.host + \":\" + config.server.port + '/result/' + encodeURIComponent(test.title) + '/' + test.prefix,\n\t\t\tmethod: 'POST',\n\t\t\theaders: {\n\t\t\t\t'Connection': 'close',\n\t\t\t},\n\t\t};\n\n\t\tvar out = {\n\t\t\tresults: result,\n\t\t\tcommit: res[0],\n\t\t\tctime: res[1],\n\t\t\ttest: test,\n\t\t};\n\n\t\tif (config.postJSON) {\n\t\t\tpostOpts.headers['Content-Type'] = 'application/json; charset=utf-8';\n\t\t\tpostOpts.body = JSON.stringify(out);\n\t\t} else {\n\t\t\tpostOpts.headers['Content-Type'] = 'application/x-www-form-urlencoded';\n\t\t\tpostOpts.form = out;\n\t\t}\n\n\t\trequest(postOpts, function(err2) {\n\t\t\tif (err2) {\n\t\t\t\tlogger('Error processing posted result: ' + err2);\n\t\t\t\tlogger('Posted form: ' + JSON.stringify(out));\n\t\t\t}\n\t\t\tif (finalCB) {\n\t\t\t\tfinalCB();\n\t\t\t} else {\n\t\t\t\tcb('start');\n\t\t\t}\n\t\t});\n\t}).catch(function(err3) {\n\t\tlogger('Error: ' + err3 + '; stack: ' + err3.stack);\n\t\tprocess.exit(1);\n\t});\n};\n\nvar callbackOmnibus = function(which) {\n\tvar args = Array.prototype.slice.call(arguments);\n\tvar test;\n\tswitch (args.shift()) {\n\t\tcase 'runTest':\n\t\t\ttest = args[0];\n\t\t\tlogger('Running a test on ' + test.prefix + ':' + test.title + ' ....');\n\t\t\targs.unshift(callbackOmnibus);\n\t\t\trunTest.apply(null, args);\n\t\t\tbreak;\n\n\t\tcase 'postResult':\n\t\t\ttest = args[2];\n\t\t\tlogger('Posting a result for ' + test.prefix + ':' + test.title + ' ....');\n\t\t\targs.push(callbackOmnibus);\n\t\t\tpostResult.apply(null, args);\n\t\t\tbreak;\n\n\t\tcase 'start':\n\t\t\tgetGitCommit().then(function(res) {\n\t\t\t\tif (res[0] !== commit) {\n\t\t\t\t\tlogger('Exiting because the commit hash change. ' +\n\t\t\t\t\t\t'Expected: ' + commit +\n\t\t\t\t\t\t'; Got: ' + res[0]);\n\t\t\t\t\tprocess.exit(0);\n\t\t\t\t}\n\n\t\t\t\tgetTitle(callbackOmnibus);\n\t\t\t}).catch(function(err) {\n\t\t\t\tlogger('Could not find latest commit. ' + err);\n\t\t\t\tprocess.exit(1);\n\t\t\t});\n\t\t\tbreak;\n\n\t\tdefault:\n\t\t\tconsole.assert(false, 'Bad callback argument: ' + which);\n\t}\n};\n\nif (typeof module === 'object') {\n\tmodule.exports.getTitle = getTitle;\n\tmodule.exports.runTest = runTest;\n\tmodule.exports.postResult = postResult;\n}\n\nif (module && !module.parent) {\n\tvar getGitCommitCb = function(commitHash, commitTime) {\n\t\tlastCommit = commit = commitHash;\n\t\tlastCommitTime = ctime = commitTime;\n\t\tcallbackOmnibus('start');\n\t};\n\n\t// Enable heap dumps in /tmp on kill -USR2.\n\t// See https://github.com/bnoordhuis/node-heapdump/\n\t// For node 0.6/0.8: npm install heapdump@0.1.0\n\t// For 0.10: npm install heapdump\n\tprocess.on('SIGUSR2', function() {\n\t\tvar heapdump = require('heapdump');\n\t\tconsole.error('SIGUSR2 received! Writing snapshot.');\n\t\tprocess.chdir('/tmp');\n\t\theapdump.writeSnapshot();\n\t});\n\n\tgetGitCommit().spread(getGitCommitCb).done();\n}\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/client/config.example.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/package-lock.json","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/package.json","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/diff.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/importJson.js","messages":[{"ruleId":"node/no-missing-require","severity":2,"message":"\"./server.settings.js\" is not found.","line":22,"column":21,"nodeType":"Literal","endLine":22,"endColumn":43},{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":64,"column":2,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":64,"endColumn":17}],"errorCount":2,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"#!/usr/bin/env node\n'use strict';\n\n/**\n * A utility for reading in a JSON-y list of articles to the database.\n */\n\nvar yargs = require('yargs');\n\n// Default options\nvar defaults = {\n\t'host':     'localhost',\n\t'port':     3306,\n\t'database': 'testreduce',\n\t'user':     'testreduce',\n\t'password': 'testreduce',\n};\n\n// Settings file\nvar settings;\ntry {\n\tsettings = require('./server.settings.js');\n} catch (e) {\n\tsettings = {};\n}\n\n// Command line options\nvar opts = yargs.usage('Usage: ./importJson.js titles.example.json')\n\t.options('help', {\n\t\tdescription: 'Show this message',\n\t\t'boolean': true,\n\t\t'default': false,\n\t})\n\t.options('prefix', {\n\t\tdescription: 'Which wiki prefix to use; e.g. \"enwiki\" for English wikipedia, \"eswiki\" for Spanish, \"mediawikiwiki\" for mediawiki.org',\n\t\t'boolean': false,\n\t\t'default': 'enwiki',\n\t})\n\t.options('h', {\n\t\talias: 'host',\n\t\tdescribe: 'Hostname of the database server.',\n\t})\n\t.options('P', {\n\t\talias: 'port',\n\t\tdescribe: 'Port number to use for connection.',\n\t})\n\t.options('D', {\n\t\talias: 'database',\n\t\tdescribe: 'Database to use.',\n\t})\n\t.options('u', {\n\t\talias: 'user',\n\t\tdescribe: 'User for login.',\n\t})\n\t.options('p', {\n\t\talias: 'password',\n\t\tdescribe: 'Password.',\n\t})\n\t.demand(1);\nvar argv = opts.argv;\n\nif (argv.help) {\n\topts.showHelp();\n\tprocess.exit(0);\n}\n\nvar getOption = function(opt) {\n\t// Check possible options in this order: command line, settings file, defaults.\n\tif (argv.hasOwnProperty(opt)) {\n\t\treturn argv[ opt ];\n\t} else if (settings.hasOwnProperty(opt)) {\n\t\treturn settings[ opt ];\n\t} else if (defaults.hasOwnProperty(opt)) {\n\t\treturn defaults[ opt ];\n\t} else {\n\t\treturn undefined;\n\t}\n};\n\nvar mysql = require('mysql');\nvar db = mysql.createConnection({\n\thost:               getOption('host'),\n\tport:               getOption('port'),\n\tdatabase:           getOption('database'),\n\tuser:               getOption('user'),\n\tpassword:           getOption('password'),\n\tcharset:            'UTF8_BIN',\n\tmultipleStatements: true,\n});\n\nvar waitingCount = 0.5;\n\nvar dbInsert = 'INSERT IGNORE INTO pages ( title, prefix ) VALUES ( ?, ? )';\n\nvar insertRecord = function(record, prefix) {\n\twaitingCount++;\n\tdb.query(dbInsert, [ record, prefix ], function(err) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t} else {\n\t\t\twaitingCount--;\n\n\t\t\tif (waitingCount <= 0) {\n\t\t\t\tconsole.log('Done!');\n\t\t\t}\n\t\t}\n\t});\n};\n\nvar loadJSON = function(json, options) {\n\tvar titles = require(json);\n\n\tdb.query('START TRANSACTION;');\n\n\tfor (var i = 0; i < titles.length; i++) {\n\t\tinsertRecord(titles[i], options.prefix || 'enwiki');\n\t}\n\n\tdb.query('COMMIT;');\n\n\twaitingCount -= 0.5;\n\tif (waitingCount <= 0) {\n\t\tconsole.log('Done!');\n\t}\n};\n\ndb.connect(function(err) {\n\tvar filepath;\n\tif (err) {\n\t\tconsole.error(err);\n\t} else {\n\t\tfilepath = argv._[0];\n\t\tif (!filepath.match(/^\\//)) {\n\t\t\tfilepath = './' + filepath;\n\t\t}\n\t\tloadJSON(filepath, argv);\n\t\tdb.end();\n\t}\n});\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/render.helpers.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/scripts/fetch_rc.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/scripts/gen_titles.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/scripts/jsonify.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/scripts/testdb.info.js","messages":[{"ruleId":"strict","severity":2,"message":"Use the global form of 'use strict'.","line":3,"column":1,"nodeType":"Program","messageId":"global","endLine":92,"endColumn":3}],"errorCount":1,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"#!/usr/bin/env node\n\nmodule.exports = {\n\t// How many titles do you want?\n\tsize: 190000,\n\n\t// How many of those do you want from the dumps?\n\t// Rest will come from recent changes stream\n\tdump_percentage: 75,\n\n\twikis: [\n\t\t// 1M+ wikipedias (ordered by depth)\n\t\t{ prefix: 'enwiki',  limit: 60 },\n\t\t{ prefix: 'frwiki',  limit: 20 },\n\t\t{ prefix: 'eswiki',  limit: 20 },\n\t\t{ prefix: 'ruwiki',  limit: 20 },\n\t\t{ prefix: 'itwiki',  limit: 15 },\n\t\t{ prefix: 'dewiki',  limit: 15 },\n\t\t{ prefix: 'jawiki',  limit: 10 },\n\t\t{ prefix: 'viwiki',  limit: 10 },\n\t\t{ prefix: 'plwiki',  limit: 10 },\n\t\t{ prefix: 'nlwiki',  limit: 10 },\n\t\t{ prefix: 'svwiki',  limit: 8 },\n\t\t{ prefix: 'warwiki', limit: 8 },\n\t\t{ prefix: 'cebwiki', limit: 8 },\n\n\t\t// Special wikis\n\t\t{ prefix: 'commonswiki', limit: 6 },\n\t\t{ prefix: 'metawiki', limit: 6 },\n\n\t\t// 100K+ wikipedias (ordered by edits)\n\t\t{ prefix: 'ptwiki',  limit: 6 },\n\t\t{ prefix: 'zhwiki',  limit: 6 },\n\t\t{ prefix: 'shwiki',  limit: 6 },\n\t\t{ prefix: 'arwiki',  limit: 4 },\n\t\t{ prefix: 'hewiki',  limit: 4 },\n\t\t{ prefix: 'kowiki',  limit: 3 },\n\t\t{ prefix: 'ukwiki',  limit: 3 },\n\t\t{ prefix: 'trwiki',  limit: 2 },\n\t\t{ prefix: 'huwiki',  limit: 2 },\n\t\t{ prefix: 'cawiki',  limit: 2 },\n\t\t{ prefix: 'nowiki',  limit: 2 },\n\n\t\t// Other language wikipedias\n\t\t{ prefix: 'zh_yuewiki',  limit: 2 },\n\t\t{ prefix: 'thwiki',  limit: 2 },\n\t\t{ prefix: 'hiwiki',  limit: 2 },\n\t\t{ prefix: 'bnwiki',  limit: 2 },\n\t\t{ prefix: 'mlwiki',  limit: 2 },\n\n\t\t// link prefix languages\n\t\t{ prefix: 'ckbwiki', limit: 1 },\n\t\t{ prefix: 'cuwiki',  limit: 1 },\n\t\t{ prefix: 'cvwiki',  limit: 1 },\n\t\t{ prefix: 'hywiki',  limit: 1 },\n\t\t{ prefix: 'iswiki',  limit: 1 },\n\t\t{ prefix: 'kaawiki', limit: 1 },\n\t\t{ prefix: 'kawiki',  limit: 1 },\n\t\t{ prefix: 'lbewiki', limit: 1 },\n\t\t{ prefix: 'lnwiki',  limit: 1 },\n\t\t{ prefix: 'mznwiki', limit: 1 },\n\t\t{ prefix: 'pnbwiki', limit: 1 },\n\t\t{ prefix: 'uzwiki',  limit: 1 },\n\n\t\t// wiktionary\n\t\t{ prefix: 'enwiktionary', limit: 1 },\n\t\t{ prefix: 'frwiktionary', limit: 1 },\n\t\t{ prefix: 'itwiktionary', limit: 1 },\n\t\t{ prefix: 'eswiktionary', limit: 1 },\n\n\t\t// wikisource\n\t\t{ prefix: 'enwikisource', limit: 1 },\n\t\t{ prefix: 'frwikisource', limit: 1 },\n\t\t{ prefix: 'itwikisource', limit: 1 },\n\t\t{ prefix: 'eswikisource', limit: 1 },\n\n\t\t// wikivoyage\n\t\t{ prefix: 'enwikivoyage', limit: 1 },\n\t\t{ prefix: 'frwikivoyage', limit: 1 },\n\t\t{ prefix: 'itwikivoyage', limit: 1 },\n\t\t{ prefix: 'eswikivoyage', limit: 1 },\n\n\t\t// Talk namespaces from some wikis\n\t\t{ prefix: 'enwiki',  ns: 1, limit: 5 },\n\t\t{ prefix: 'arwiki',  ns: 1, limit: 3 },\n\t\t{ prefix: 'dewiki',  ns: 1, limit: 3 },\n\t\t{ prefix: 'ptwiki',  ns: 1, limit: 2 },\n\t\t{ prefix: 'itwiki',  ns: 1, limit: 2 },\n\t\t{ prefix: 'hewiki',  ns: 1, limit: 1 },\n\t\t{ prefix: 'zhwiki',  ns: 1, limit: 1 },\n\t],\n};\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/server.js","messages":[{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":92,"column":2,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":92,"endColumn":17},{"ruleId":"no-process-exit","severity":2,"message":"Don't use process.exit(); throw an error instead.","line":168,"column":3,"nodeType":"CallExpression","messageId":"noProcessExit","endLine":168,"endColumn":18},{"ruleId":"no-shadow","severity":2,"message":"'res' is already declared in the upper scope.","line":728,"column":32,"nodeType":"Identifier","messageId":"noShadow","endLine":728,"endColumn":35},{"ruleId":"no-shadow","severity":2,"message":"'err' is already declared in the upper scope.","line":736,"column":41,"nodeType":"Identifier","messageId":"noShadow","endLine":736,"endColumn":44},{"ruleId":"no-shadow","severity":2,"message":"'row' is already declared in the upper scope.","line":736,"column":46,"nodeType":"Identifier","messageId":"noShadow","endLine":736,"endColumn":49},{"ruleId":"no-shadow","severity":2,"message":"'err' is already declared in the upper scope.","line":761,"column":42,"nodeType":"Identifier","messageId":"noShadow","endLine":761,"endColumn":45},{"ruleId":"no-shadow","severity":2,"message":"'row' is already declared in the upper scope.","line":761,"column":47,"nodeType":"Identifier","messageId":"noShadow","endLine":761,"endColumn":50}],"errorCount":7,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"#!/usr/bin/env node\n\"use strict\";\n\nvar bodyParser = require('body-parser');\nvar busboy = require('connect-busboy');\nvar express = require('express');\nvar yargs = require('yargs');\nvar ehbs = require('express-handlebars');\nvar path = require('path');\nvar Diff = require('./diff.js').Diff;\nvar RH = require('./render.helpers.js').RenderHelpers;\nvar Promise = require('../utils/promise.js');\n\n// Default options\nvar defaults = {\n\t'host':           'localhost',\n\t'port':           3306,\n\t'database':       'testreduce',\n\t'user':           'testreduce',\n\t'password':       'testreduce',\n\t'debug':          false,\n\t'fetches':        6,\n\t'tries':          6,\n\t'cutofftime':     600,\n\t'batch':          50,\n\tgenerateTitleUrl: function(server, prefix, title) {\n\t\treturn server.replace(/\\/$/, '') + \"/_rt/\" + prefix + \"/\" + title;\n\t},\n};\n\n// Command line options\nvar opts = yargs.usage('Usage: $0 [connection parameters]')\n\t.options('help', {\n\t\t'boolean': true,\n\t\t'default': false,\n\t\tdescribe: \"Show usage information.\",\n\t})\n\t.options('config', {\n\t\tdescribe: 'Configuration file for the server',\n\t\t'default': './server.settings.js',\n\t})\n\t.options('s', {\n\t\talias: 'socketPath',\n\t\tdescribe: 'Socket path for the database server (if set, host/port will be ignored).',\n\t})\n\t.options('h', {\n\t\talias: 'host',\n\t\tdescribe: 'Hostname of the database server.',\n\t})\n\t.options('P', {\n\t\talias: 'port',\n\t\tdescribe: 'Port number to use for connection.',\n\t})\n\t.options('D', {\n\t\talias: 'database',\n\t\tdescribe: 'Database to use.',\n\t})\n\t.options('u', {\n\t\talias: 'user',\n\t\tdescribe: 'User for MySQL login.',\n\t})\n\t.options('p', {\n\t\talias: 'password',\n\t\tdescribe: 'Password.',\n\t})\n\t.options('d', {\n\t\talias: 'debug',\n\t\t'boolean': true,\n\t\tdescribe: \"Output MySQL debug data.\",\n\t})\n\t.options('f', {\n\t\talias: 'fetches',\n\t\tdescribe: \"Number of times to try fetching a page.\",\n\t})\n\t.options('t', {\n\t\talias: 'tries',\n\t\tdescribe: \"Number of times an article will be sent for testing \" +\n\t\t\t\"before it's considered an error.\",\n\t})\n\t.options('c', {\n\t\talias: 'cutofftime',\n\t\tdescribe: \"Time in seconds to wait for a test result.\",\n\t})\n\t.options('b', {\n\t\talias: 'batch',\n\t\tdescribe: \"Number of titles to fetch from database in one batch.\",\n\t});\nvar argv = opts.argv;\n\nif (argv.help) {\n\topts.showHelp();\n\tprocess.exit(0);\n}\n\n// Settings file\nvar settings;\ntry {\n\tsettings = require(argv.config);\n} catch (e) {\n\tconsole.error(\"Aborting! Exception reading \" + argv.config + \": \" + e);\n\treturn;\n}\n\n// SSS FIXME: Awkward, but does the job for now.\n// Helpers need settings\nRH.settings = settings;\n\nvar perfConfig = settings.perfConfig;\nvar parsoidRTConfig = settings.parsoidRTConfig;\n\nvar getOption = function(opt) {\n\tvar value;\n\n\t// Check possible options in this order: command line, settings file, defaults.\n\tif (argv.hasOwnProperty(opt)) {\n\t\tvalue = argv[ opt ];\n\t} else if (settings.hasOwnProperty(opt)) {\n\t\tvalue = settings[ opt ];\n\t} else if (defaults.hasOwnProperty(opt)) {\n\t\tvalue = defaults[ opt ];\n\t} else {\n\t\treturn undefined;\n\t}\n\n\t// Check the boolean options, 'false' and 'no' should be treated as false.\n\t// Copied from mediawiki.Util.js.\n\tif (opt === 'debug') {\n\t\tif ((typeof value) === 'string' && /^(no|false)$/i.test(value)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\treturn value;\n};\n\n// The maximum number of tries per article\nvar maxTries = getOption('tries');\n// The maximum number of fetch retries per article\nvar maxFetchRetries = getOption('fetches');\n// The time to wait before considering a test has failed\nvar cutOffTime = getOption('cutofftime');\n// The number of pages to fetch at once\nvar batchSize = getOption('batch');\nvar debug = getOption('debug');\n\nvar mysql = require('mysql');\nvar db = mysql.createConnection({\n\tsocketPath:         getOption('socketPath'), // if set, host:port will be ignored\n\thost:               getOption('host'),\n\tport:               getOption('port'),\n\tdatabase:           getOption('database'),\n\tuser:               getOption('user'),\n\tpassword:           getOption('password'),\n\tmultipleStatements: true,\n\tcharset:            'UTF8_BIN',\n\tdebug:              debug,\n});\n\nvar queues = require('mysql-queues');\nqueues(db, debug);\n\n// Try connecting to the database.\nprocess.on('exit', function() {\n\tdb.end();\n});\ndb.connect(function(err) {\n\tif (err) {\n\t\tconsole.error(\"Unable to connect to database, error: \" + err.toString());\n\t\tprocess.exit(1);\n\t}\n});\n\n// ----------------- The queries --------------\nvar dbGetTitle =\n\t'SELECT * FROM (' +\n\t'  SELECT id, title, prefix, claim_hash, claim_num_tries ' +\n\t'  FROM pages ' +\n\t'  WHERE num_fetch_errors < ? AND ' +\n\t'  ( claim_hash != ? OR ( claim_num_tries < ? AND claim_timestamp < ? ) )' +\n\t'  ORDER BY claim_num_tries DESC, latest_score DESC, ' +\n\t'  claim_timestamp ASC LIMIT 500 ' +\n\t// Stop other transactions from reading until we finish this one.\n\t'  FOR UPDATE' +\n\t') AS titles ORDER BY RAND() LIMIT ?';\n\nvar dbIncrementFetchErrorCount =\n\t'UPDATE pages SET ' +\n\t\t'claim_hash = ?, ' +\n\t\t'num_fetch_errors = num_fetch_errors + 1, ' +\n\t\t'claim_num_tries = 0 ' +\n\t\t'WHERE title = ? AND prefix = ?';\n\nvar dbInsertCommit =\n\t'INSERT IGNORE INTO commits ( hash, timestamp ) ' +\n\t'VALUES ( ?, ? )';\n\nvar dbFindPage =\n\t'SELECT id ' +\n\t'FROM pages ' +\n\t'WHERE title = ? AND prefix = ?';\n\nvar dbUpdatePageClaims =\n\t'UPDATE pages SET claim_hash = ?, claim_timestamp = ?, claim_num_tries = claim_num_tries + 1 ' +\n\t'WHERE id IN ( ? )';\n\nvar dbInsertResult =\n\t'INSERT INTO results ( page_id, commit_hash, result ) ' +\n\t'VALUES ( ?, ?, ? ) ' +\n\t'ON DUPLICATE KEY UPDATE id = LAST_INSERT_ID( id ), ' +\n\t\t'result = VALUES( result )';\n\nvar dbInsertStats =\n\t'INSERT INTO stats ' +\n\t'( skips, fails, errors, selser_errors, score, page_id, commit_hash ) ' +\n\t'VALUES ( ?, ?, ?, ?, ?, ?, ? ) ' +\n\t'ON DUPLICATE KEY UPDATE id = LAST_INSERT_ID( id ), ' +\n\t\t'skips = VALUES( skips ), fails = VALUES( fails ), ' +\n\t\t'errors = VALUES( errors ), selser_errors = VALUES(selser_errors), ' +\n\t\t'score = VALUES( score )';\n\nvar dbUpdatePageLatestResults =\n\t'UPDATE pages ' +\n\t'SET latest_stat = ?, latest_score = ?, latest_result = ?, ' +\n\t'claim_hash = ?, claim_timestamp = NULL, claim_num_tries = 0 ' +\n    'WHERE id = ?';\n\nvar dbUpdateCrashersClearTries =\n\t'UPDATE pages ' +\n\t'SET claim_num_tries = 0 ' +\n\t'WHERE claim_hash != ? AND claim_num_tries >= ?';\n\nvar dbLatestHash = 'SELECT hash FROM commits ORDER BY timestamp DESC LIMIT 1';\nvar dbPreviousHash = 'SELECT hash FROM commits ORDER BY timestamp DESC LIMIT 1 OFFSET 1';\n\nvar dbStatsQuery =\n\t'SELECT ? AS maxhash, ? AS secondhash, ' +\n\t'(SELECT count(*) FROM stats WHERE stats.commit_hash = ?) AS maxresults, ' +\n\t'count(*) AS total, ' +\n\t'count(CASE WHEN stats.errors=0 THEN 1 ELSE NULL END) AS no_errors, ' +\n\t'count(CASE WHEN stats.errors=0 AND stats.fails=0 ' +\n\t\t'then 1 else null end) AS no_fails, ' +\n\t'count(CASE WHEN stats.errors=0 AND stats.fails=0 AND stats.skips=0 ' +\n\t\t'then 1 else null end) AS no_skips, ' +\n\t// get regression count between last two commits\n\t'(SELECT count(*) ' +\n\t'FROM pages p ' +\n\t'JOIN stats AS s1 ON s1.page_id = p.id ' +\n\t'JOIN stats AS s2 ON s2.page_id = p.id ' +\n\t'WHERE s1.commit_hash = ? ' +\n\t'AND s2.commit_hash = ? ' +\n\t'AND s1.score > s2.score ) as numregressions, ' +\n\t// get fix count between last two commits\n\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t\t'WHERE s1.commit_hash = ? ' +\n\t\t'AND s2.commit_hash = ? ' +\n\t\t'AND s1.score < s2.score ) AS numfixes, '  +\n\t// Get latest commit crashers\n\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t'WHERE claim_hash = ? ' +\n\t\t\t'AND claim_num_tries >= ? ' +\n\t\t\t'AND claim_timestamp < ?) AS crashers, ' +\n\t// Get num of rt selser errors\n\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t'JOIN stats ON pages.id = stats.page_id ' +\n\t\t'WHERE stats.commit_hash = ? ' +\n\t\t\t'AND stats.selser_errors > 0) AS rtselsererrors ' +\n\n\t'FROM pages JOIN stats on pages.latest_stat = stats.id';\n\nvar dbPerWikiStatsQuery =\n\t'SELECT ' +\n\t'(select hash from commits order by timestamp desc limit 1) as maxhash, ' +\n\t'(select hash from commits order by timestamp desc limit 1 offset 1) as secondhash, ' +\n\t'(select count(*) from stats join pages on stats.page_id = pages.id ' +\n\t\t'where stats.commit_hash = ' +\n\t\t'(select hash from commits order by timestamp desc limit 1) ' +\n\t\t'and pages.prefix = ?) as maxresults, ' +\n\t'count(*) AS total, ' +\n\t'count(CASE WHEN stats.errors=0 THEN 1 ELSE NULL END) AS no_errors, ' +\n\t'count(CASE WHEN stats.errors=0 AND stats.fails=0 ' +\n\t\t'then 1 else null end) AS no_fails, ' +\n\t'count(CASE WHEN stats.errors=0 AND stats.fails=0 AND stats.skips=0 ' +\n\t\t'then 1 else null end) AS no_skips, ' +\n\t// get regression count between last two commits\n\t'(SELECT count(*) ' +\n\t'FROM pages p ' +\n\t'JOIN stats AS s1 ON s1.page_id = p.id ' +\n\t'JOIN stats AS s2 ON s2.page_id = p.id ' +\n\t'WHERE s1.commit_hash = (SELECT hash ' +\n\t\t'FROM commits ORDER BY timestamp DESC LIMIT 1 ) ' +\n\t\t'AND s2.commit_hash = (SELECT hash ' +\n\t\t'FROM commits ORDER BY timestamp DESC LIMIT 1 OFFSET 1) ' +\n\t\t'AND p.prefix = ? ' +\n\t\t'AND s1.score > s2.score ) as numregressions, ' +\n\t// get fix count between last two commits\n\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t\t'WHERE s1.commit_hash = (SELECT hash FROM commits ORDER BY timestamp DESC LIMIT 1 ) ' +\n\t\t'AND s2.commit_hash = (SELECT hash FROM commits ORDER BY timestamp DESC LIMIT 1 OFFSET 1 ) ' +\n\t\t'AND pages.prefix = ? ' +\n\t\t'AND s1.score < s2.score ) as numfixes, ' +\n\t// Get latest commit crashers\n\t'(SELECT count(*) ' +\n\t\t'FROM pages WHERE prefix = ? ' +\n\t\t\t'AND claim_hash = (SELECT hash FROM commits ORDER BY timestamp DESC LIMIT 1) ' +\n\t\t\t'AND claim_num_tries >= ? ' +\n\t\t\t'AND claim_timestamp < ?) AS crashers, ' +\n\t// Get num of rt selser errors\n\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t'JOIN stats ON pages.id = stats.page_id ' +\n\t\t'WHERE pages.prefix = ? ' +\n\t\t\t'AND stats.commit_hash = (SELECT hash FROM commits ORDER BY timestamp DESC LIMIT 1 ) ' +\n\t\t\t'AND stats.selser_errors > 0) AS rtselsererrors ' +\n\n\t'FROM pages JOIN stats on pages.latest_stat = stats.id WHERE pages.prefix = ?';\n\nvar dbFailsQuery =\n\t'SELECT pages.title, pages.prefix, commits.hash, stats.errors, stats.fails, stats.skips ' +\n\t'FROM stats ' +\n\t'JOIN (' +\n\t'\tSELECT MAX(id) AS most_recent FROM stats GROUP BY page_id' +\n\t') AS s1 ON s1.most_recent = stats.id ' +\n\t'JOIN pages ON stats.page_id = pages.id ' +\n\t'JOIN commits ON stats.commit_hash = commits.hash ' +\n\t'ORDER BY stats.score DESC ' +\n\t'LIMIT 40 OFFSET ?';\n\nvar dbGetOneResult =\n\t'SELECT result FROM results ' +\n\t'JOIN commits ON results.commit_hash = commits.hash ' +\n\t'JOIN pages ON pages.id = results.page_id ' +\n\t'WHERE pages.title = ? AND pages.prefix = ? ' +\n\t'ORDER BY commits.timestamp DESC LIMIT 1';\n\nvar dbGetResultWithCommit =\n    'SELECT result FROM results ' +\n    'JOIN pages ON pages.id = results.page_id ' +\n    'WHERE results.commit_hash = ? AND pages.title = ? AND pages.prefix = ?';\n\nvar dbFailedFetches =\n\t'SELECT title, prefix FROM pages WHERE num_fetch_errors >= ?';\n\nvar dbCrashers =\n\t'SELECT pages.title, pages.prefix, pages.claim_hash, commits.timestamp ' +\n\t\t'FROM pages JOIN commits ON (pages.claim_hash = commits.hash) ' +\n\t\t'WHERE claim_num_tries >= ? ' +\n\t\t'AND claim_timestamp < ? ' +\n\t\t'ORDER BY commits.timestamp DESC';\n\nvar dbFailsDistribution =\n\t'SELECT fails, count(*) AS num_pages ' +\n\t'FROM stats ' +\n\t'JOIN pages ON pages.latest_stat = stats.id ' +\n\t'GROUP by fails';\n\nvar dbSkipsDistribution =\n\t'SELECT skips, count(*) AS num_pages ' +\n\t'FROM stats ' +\n\t'JOIN pages ON pages.latest_stat = stats.id ' +\n\t'GROUP by skips';\n\n// Limit to 100 recent commits\nvar dbCommits =\n\t'SELECT hash, timestamp ' +\n\t/*\n\t// get the number of fixes column\n\t\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t\t\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t\t'WHERE s1.commit_hash = (SELECT hash FROM commits c2 where c2.timestamp < c1.timestamp ORDER BY timestamp DESC LIMIT 1 ) ' +\n\t\t\t'AND s2.commit_hash = c1.hash AND s1.score < s2.score) as numfixes, ' +\n\t// get the number of regressions column\n\t\t'(SELECT count(*) ' +\n\t\t'FROM pages ' +\n\t\t\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t\t\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t\t'WHERE s1.commit_hash = (SELECT hash FROM commits c2 where c2.timestamp < c1.timestamp ORDER BY timestamp DESC LIMIT 1 ) ' +\n\t\t\t'AND s2.commit_hash = c1.hash AND s1.score > s2.score) as numregressions, ' +\n\n\t// get the number of tests for this commit column\n\t\t'(select count(*) from stats where stats.commit_hash = c1.hash) as numtests ' +\n\t*/\n\t'FROM commits c1 ' +\n\t'ORDER BY timestamp DESC LIMIT 100';\n\nvar dbCommitHashes =\n\t'SELECT hash FROM commits ORDER BY timestamp DESC';\n\nvar dbFixesBetweenRevs =\n\t'SELECT pages.title, pages.prefix, ' +\n\t's1.commit_hash AS new_commit, s1.errors AS errors, s1.fails AS fails, s1.skips AS skips, ' +\n\t's2.commit_hash AS old_commit, s2.errors AS old_errors, s2.fails AS old_fails, s2.skips AS old_skips ' +\n\t'FROM pages ' +\n\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t'WHERE s1.commit_hash = ? AND s2.commit_hash = ? AND s1.score < s2.score ' +\n\t'ORDER BY s1.score - s2.score ASC ' +\n\t'LIMIT 40 OFFSET ?';\n\nvar dbNumFixesBetweenRevs =\n\t'SELECT count(*) as numFixes ' +\n\t'FROM pages ' +\n\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t'WHERE s1.commit_hash = ? AND s2.commit_hash = ? AND s1.score < s2.score ';\n\nvar dbRegressionsBetweenRevs =\n\t'SELECT pages.title, pages.prefix, ' +\n\t's1.commit_hash AS new_commit, s1.errors AS errors, s1.fails AS fails, s1.skips AS skips, ' +\n\t's2.commit_hash AS old_commit, s2.errors AS old_errors, s2.fails AS old_fails, s2.skips AS old_skips ' +\n\t'FROM pages ' +\n\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t'WHERE s1.commit_hash = ? AND s2.commit_hash = ? AND s1.score > s2.score ' +\n\t'ORDER BY s1.score - s2.score DESC ' +\n\t'LIMIT 40 OFFSET ?';\n\nvar dbNumRegressionsBetweenRevs =\n\t'SELECT count(*) as numRegressions ' +\n\t'FROM pages ' +\n\t'JOIN stats AS s1 ON s1.page_id = pages.id ' +\n\t'JOIN stats AS s2 ON s2.page_id = pages.id ' +\n\t'WHERE s1.commit_hash = ? AND s2.commit_hash = ? AND s1.score > s2.score ';\n\nvar dbResultsQuery =\n\t'SELECT result FROM results';\n\nvar dbResultsPerWikiQuery =\n\t'SELECT result FROM results ' +\n\t'JOIN pages ON pages.id = results.page_id ' +\n\t'WHERE pages.prefix = ?';\n\nvar dbGetTwoResults =\n\t'SELECT result FROM results ' +\n\t'JOIN commits ON results.commit_hash = commits.hash ' +\n\t'JOIN pages ON pages.id = results.page_id ' +\n\t'WHERE pages.title = ? AND pages.prefix = ? ' +\n\t'AND (commits.hash = ? OR commits.hash = ?) ' +\n\t'ORDER BY commits.timestamp';\n\nvar transFetchCB = function(msg, trans, failCb, successCb, err, result) {\n\tif (err) {\n\t\ttrans.rollback(function() {\n\t\t\tif (failCb) {\n\t\t\t\tfailCb(msg ? msg + err.toString() : err, result);\n\t\t\t}\n\t\t});\n\t} else if (successCb) {\n\t\tsuccessCb(result);\n\t}\n};\n\nvar fetchPages = function(commitHash, cutOffTimestamp, cb) {\n\tvar trans = db.startTransaction();\n\ttrans.query(dbGetTitle, [maxFetchRetries, commitHash, maxTries, cutOffTimestamp, batchSize], transFetchCB.bind(null, 'Error getting next titles', trans, cb, function(rows) {\n\t\tif (!rows || rows.length === 0) {\n\t\t\ttrans.commit(cb.bind(null, null, rows));\n\t\t} else {\n\t\t\t// Process the rows: Weed out the crashers.\n\t\t\tvar pages = [];\n\t\t\tvar pageIds = [];\n\t\t\tfor (var i = 0; i < rows.length; i++) {\n\t\t\t\tvar row = rows[i];\n\t\t\t\tpageIds.push(row.id);\n\t\t\t\tpages.push({ id: row.id, prefix: row.prefix, title: row.title });\n\t\t\t}\n\t\t\ttrans.query(dbUpdatePageClaims, [commitHash, new Date(), pageIds], transFetchCB.bind(null, 'Error updating claims', trans, cb, function() {\n\t\t\t\ttrans.commit(cb.bind(null, null, pages));\n\t\t\t}));\n\t\t}\n\t})).execute();\n};\n\nvar fetchedPages = [];\nvar lastFetchedCommit = null;\nvar lastFetchedDate = new Date(0);\nvar knownCommits;\n\nvar getTitle = function(req, res) {\n\tvar commitHash = req.query.commit;\n\tvar commitDate = new Date(req.query.ctime);\n\tvar knownCommit = knownCommits && knownCommits[ commitHash ];\n\n\treq.connection.setTimeout(300 * 1000);\n\tres.setHeader('Content-Type', 'text/plain; charset=UTF-8');\n\n\t// Keep track of known commits so we can discard clients still on older\n\t// versions. If we don't know about the commit, then record it\n\t// Use a transaction to make sure we don't start fetching pages until\n\t// we've done this\n\tif (!knownCommit) {\n\t\tvar trans = db.startTransaction();\n\t\tif (!knownCommits) {\n\t\t\tknownCommits = {};\n\t\t\ttrans.query(dbCommitHashes, null, function(err, resCommitHashes) {\n\t\t\t\tif (err) {\n\t\t\t\t\tconsole.log('Error fetching known commits', err);\n\t\t\t\t} else {\n\t\t\t\t\tresCommitHashes.forEach(function(v) {\n\t\t\t\t\t\tknownCommits[v.hash] = commitDate;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\n\t\t// New commit, record it\n\t\tknownCommits[ commitHash ] = commitDate;\n\t\ttrans.query(dbInsertCommit, [ commitHash, new Date() ], function(err, commitInsertResult) {\n\t\t\tif (err) {\n\t\t\t\tconsole.error(\"Error inserting commit \" + commitHash);\n\t\t\t} else if (commitInsertResult.affectedRows > 0) {\n\t\t\t\t// If this is a new commit, we need to clear the number of times a\n\t\t\t\t// crasher page has been sent out so that each title gets retested\n\t\t\t\ttrans.query(dbUpdateCrashersClearTries, [ commitHash, maxTries ]);\n\t\t\t}\n\t\t});\n\n\t\ttrans.commit();\n\t}\n\tif (knownCommit && commitHash !== lastFetchedCommit) {\n\t\t// It's an old commit, tell the client so it can restart.\n\t\t// HTTP status code 426 Update Required\n\t\tres.status(426).send(\"Old commit\");\n\t\treturn;\n\t}\n\n\tvar fetchCb = function(err, pages) {\n\t\tif (err) {\n\t\t\tres.status(500).send(\"Error: \" + err.toString());\n\t\t\treturn;\n\t\t}\n\n\t\tif (pages) {\n\t\t\t// Get the pages that aren't already fetched, to guard against the\n\t\t\t// case of clients not finishing the whole batch in the cutoff time\n\t\t\tvar newPages = pages.filter(function(p) {\n\t\t\t\treturn fetchedPages.every(function(f) {\n\t\t\t\t\treturn f.id !== p.id;\n\t\t\t\t});\n\t\t\t});\n\t\t\t// Append the new pages to the already fetched ones, in case there's\n\t\t\t// a parallel request.\n\t\t\tfetchedPages = fetchedPages.concat(newPages);\n\t\t}\n\t\tif (fetchedPages.length === 0) {\n\t\t\t// Send 404 to indicate no pages available now, clients depend on\n\t\t\t// this.\n\t\t\tres.status(404).send('No available titles that fit the constraints.');\n\t\t} else {\n\t\t\tvar page = fetchedPages.pop();\n\n\t\t\tconsole.log(' ->', page.prefix + ':' + page.title);\n\t\t\tres.status(200).send(page);\n\t\t}\n\t};\n\n\t// Look if there's a title available in the already fetched ones.\n\t// Ensure that we load a batch when the commit has changed.\n\tif (fetchedPages.length === 0 ||\n\t\t\tcommitHash !== lastFetchedCommit ||\n\t\t\t(lastFetchedDate.getTime() + (cutOffTime * 1000)) < Date.now()) {\n\t\t// Select pages that were not claimed in the 10 minutes.\n\t\t// If we didn't get a result from a client 10 minutes after\n\t\t// it got a rt claim on a page, something is wrong with the client\n\t\t// or with parsing the page.\n\t\t//\n\t\t// Hopefully, no page takes longer than 10 minutes to parse. :)\n\n\t\tlastFetchedCommit = commitHash;\n\t\tlastFetchedDate = new Date();\n\t\tfetchPages(commitHash, new Date(Date.now() - (cutOffTime * 1000)), fetchCb);\n\t} else {\n\t\tfetchCb();\n\t}\n};\n\nvar statsScore = function(skipCount, failCount, errorCount) {\n\t// treat <errors,fails,skips> as digits in a base 1000 system\n\t// and use the number as a score which can help sort in topfails.\n\treturn errorCount * 1000000 + failCount * 1000 + skipCount;\n};\n\nvar receiveResults = function(req, res) {\n\treq.connection.setTimeout(300 * 1000);\n\tvar title = req.params[0];\n\tvar prefix = req.params[1];\n\tvar commitHash = req.body.commit;\n\tvar result = req.body.results;\n\tvar skipCount;\n\tvar failCount;\n\tvar errorCount;\n\tvar dneError;\n\n\tvar contentType = req.headers[\"content-type\"];\n\tvar resultString;\n\tif (contentType.match(/application\\/json/i)) {\n\t\t// console.warn(\"application/json\");\n\t\terrorCount = result.err ? 1 : 0;\n\t\tfailCount = parseInt(result.fails || \"0\");\n\t\tskipCount = parseInt(result.skips || \"0\");\n\t\tresultString = JSON.stringify(result);\n\t} else {\n\t\t// console.warn(\"old xml junit style\");\n\t\terrorCount = result.match(/<error/g);\n\t\terrorCount = errorCount ? errorCount.length : 0;\n\t\tskipCount = result.match(/<skipped/g);\n\t\tskipCount = skipCount ? skipCount.length : 0;\n\t\tfailCount = result.match(/<failure/g);\n\t\tfailCount = failCount ? failCount.length : 0;\n\t\tdneError = result.match(/Error: Got status code: 404/g);\n\t\tresultString = result;\n\t}\n\n\t// Find the number of selser errors\n\tvar selserErrorCount = parsoidRTConfig ? parsoidRTConfig.parseSelserStats(result) : 0;\n\n\t// Get perf stats\n\tvar perfstats = perfConfig ? perfConfig.parsePerfStats(result) : null;\n\n\tres.setHeader('Content-Type', 'text/plain; charset=UTF-8');\n\n\tvar trans = db.startTransaction();\n\tvar transUpdateCB = function(type, successCb, err, result2) {\n\t\tif (err) {\n\t\t\ttrans.rollback();\n\t\t\tvar msg = \"Error inserting/updating \" + type + \" for page: \" +  prefix + ':' + title + \" and hash: \" + commitHash;\n\t\t\tconsole.error(msg);\n\t\t\tconsole.error(err);\n\t\t\tif (res) {\n\t\t\t\tres.status(500).send(msg);\n\t\t\t}\n\t\t} else if (successCb) {\n\t\t\tsuccessCb(result2);\n\t\t}\n\t};\n\n\t// console.warn(\"got: \" + JSON.stringify([title, commitHash, result, skipCount, failCount, errorCount]));\n\tif (errorCount > 0 && dneError) {\n\t\t// Page fetch error, increment the fetch error count so, when it goes\n\t\t// over maxFetchRetries, it won't be considered for tests again.\n\t\tconsole.log('XX', prefix + ':' + title);\n\t\ttrans.query(dbIncrementFetchErrorCount, [commitHash, title, prefix],\n\t\t\t\ttransUpdateCB.bind(null, \"page fetch error count\", null))\n\t\t\t.commit(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tconsole.error(\"Error incrementing fetch count: \" + err.toString());\n\t\t\t\t}\n\t\t\t\tres.status(200).send('');\n\t\t\t});\n\n\t} else {\n\t\ttrans.query(dbFindPage, [ title, prefix ], function(err, pages) {\n\t\t\tif (!err && pages.length === 1) {\n\t\t\t\t// Found the correct page, fill the details up\n\t\t\t\tvar page = pages[0];\n\n\t\t\t\tvar score = statsScore(skipCount, failCount, errorCount);\n\t\t\t\tvar latestResultId = 0;\n\t\t\t\tvar latestStatId = 0;\n\t\t\t\t// Insert the result\n\t\t\t\ttrans.query(dbInsertResult, [ page.id, commitHash, resultString ],\n\t\t\t\t\ttransUpdateCB.bind(null, \"result\", function(insertedResult) {\n\t\t\t\t\t\tlatestResultId = insertedResult.insertId;\n\t\t\t\t\t\t// Insert the stats\n\t\t\t\t\t\ttrans.query(dbInsertStats, [ skipCount, failCount, errorCount, selserErrorCount, score, page.id, commitHash ],\n\t\t\t\t\t\t\ttransUpdateCB.bind(null, \"stats\", function(insertedStat) {\n\t\t\t\t\t\t\t\tlatestStatId = insertedStat.insertId;\n\n\t\t\t\t\t\t\t\t// And now update the page with the latest info\n\t\t\t\t\t\t\t\ttrans.query(dbUpdatePageLatestResults, [ latestStatId, score, latestResultId, commitHash, page.id ],\n\t\t\t\t\t\t\t\t\t\ttransUpdateCB.bind(null, \"latest result\", null))\n\t\t\t\t\t\t\t\t\t.commit(function() {\n\t\t\t\t\t\t\t\t\t\tconsole.log('<- ', prefix + ':' + title, ':', skipCount, failCount,\n\t\t\t\t\t\t\t\t\t\t\terrorCount, commitHash.substr(0, 7));\n\n\t\t\t\t\t\t\t\t\t\tif (perfConfig) {\n\t\t\t\t\t\t\t\t\t\t\t// Insert the performance stats, ignoring errors for now\n\t\t\t\t\t\t\t\t\t\t\tperfConfig.insertPerfStats(db, page.id, commitHash, perfstats, function() {});\n\t\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\t\t// Maybe the perfstats aren't committed yet, but it shouldn't be a problem\n\t\t\t\t\t\t\t\t\t\tres.status(200).send('');\n\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}));\n\t\t\t\t\t}));\n\t\t\t} else {\n\t\t\t\ttrans.rollback(function() {\n\t\t\t\t\tif (err) {\n\t\t\t\t\t\tres.status(500).send(err.toString());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tres.status(200).send(\"Did not find claim for title: \" + prefix + ':' + title);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t}\n\t\t}).execute();\n\t}\n};\n\nvar pageListData = [\n\t{ url: 'topfails', title: 'Results by title' },\n\t{ url: 'failedFetches', title: 'Non-existing test pages' },\n\t{ url: 'semanticDiffsDistr', title: 'Histogram of semantic diffs' },\n\t{ url: 'syntacticDiffsDistr', title: 'Histogram of syntactic diffs' },\n\t{ url: 'commits', title: 'List of all tested commits' },\n];\n\nif (perfConfig) {\n\tperfConfig.updateIndexPageUrls(pageListData);\n}\n\nif (parsoidRTConfig) {\n\tparsoidRTConfig.updateIndexPageUrls(pageListData);\n}\n\nvar statsWebInterface = function(req, res) {\n\tvar query, queryParams;\n\tvar cutoffDate = new Date(Date.now() - (cutOffTime * 1000));\n\tvar prefix = req.params[1] || null;\n\n\tvar handleErr = function(err, res) {\n\t\tres.status(500).send(err.toString());\n\t};\n\n\tdb.query(dbLatestHash, [], function(err, row) {\n\t\tif (err) { return handleErr(err, res); }\n\n\t\tvar latestHash = row[0].hash;\n\t\tdb.query(dbPreviousHash, [], function(err, row) {\n\t\t\tif (err) { return handleErr(err, res); }\n\t\t\tvar previousHash = row.length > 0 ? row[0].hash : 'null';\n\n\t\t\t// Switch the query object based on the prefix\n\t\t\tif (prefix !== null) {\n\t\t\t\tquery = dbPerWikiStatsQuery;\n\t\t\t\tqueryParams = [\n\t\t\t\t\tprefix, prefix, prefix, prefix,\n\t\t\t\t\tprefix, prefix, prefix, prefix,\n\t\t\t\t\tmaxTries, cutoffDate, prefix, prefix,\n\t\t\t\t];\n\t\t\t} else {\n\t\t\t\tquery = dbStatsQuery;\n\t\t\t\tqueryParams = [\n\t\t\t\t\tlatestHash, previousHash,\n\t\t\t\t\tlatestHash,\n\t\t\t\t\tlatestHash, previousHash,\n\t\t\t\t\tlatestHash, previousHash,\n\t\t\t\t\tlatestHash, maxTries, cutoffDate,\n\t\t\t\t\tlatestHash\n\t\t\t\t];\n\t\t\t}\n\n\t\t\t// Fetch stats for commit\n\t\t\tdb.query(query, queryParams, function(err, row) {\n\t\t\t\tif (err) { return handleErr(err, res); }\n\n\t\t\t\tres.status(200);\n\n\t\t\t\tvar tests = row[0].total;\n\t\t\t\tvar errorLess = row[0].no_errors;\n\t\t\t\tvar skipLess = row[0].no_skips;\n\t\t\t\tvar numRegressions = row[0].numregressions;\n\t\t\t\tvar numFixes = row[0].numfixes;\n\t\t\t\tvar noErrors = Math.round(100 * 100 * errorLess / (tests || 1)) / 100;\n\t\t\t\tvar perfects = Math.round(100 * 100 * skipLess / (tests || 1)) / 100;\n\t\t\t\tvar syntacticDiffs = Math.round(100 * 100 *\n\t\t\t\t\t(row[0].no_fails / (tests || 1))) / 100;\n\n\t\t\t\tvar width = 800;\n\n\t\t\t\tvar data = {\n\t\t\t\t\tprefix: prefix,\n\t\t\t\t\tresults: {\n\t\t\t\t\t\ttests: tests,\n\t\t\t\t\t\tnoErrors: noErrors,\n\t\t\t\t\t\tsyntacticDiffs: syntacticDiffs,\n\t\t\t\t\t\tperfects: perfects,\n\t\t\t\t\t},\n\t\t\t\t\tgraphWidths: {\n\t\t\t\t\t\tperfect: width * perfects / 100 || 0,\n\t\t\t\t\t\tsyntacticDiff: width * (syntacticDiffs - perfects) / 100 || 0,\n\t\t\t\t\t\tsemanticDiff: width * (100 - syntacticDiffs) / 100 || 0,\n\t\t\t\t\t},\n\t\t\t\t\tlatestRevision: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tdescription: 'Git SHA1',\n\t\t\t\t\t\t\tvalue: row[0].maxhash,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tdescription: 'Test Results',\n\t\t\t\t\t\t\tvalue: row[0].maxresults,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tdescription: 'Crashers',\n\t\t\t\t\t\t\tvalue: row[0].crashers,\n\t\t\t\t\t\t\turl: 'crashers',\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tdescription: 'Fixes',\n\t\t\t\t\t\t\tvalue: numFixes,\n\t\t\t\t\t\t\turl: 'topfixes/between/' + row[0].secondhash + '/' + row[0].maxhash,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tdescription: 'Regressions',\n\t\t\t\t\t\t\tvalue: numRegressions,\n\t\t\t\t\t\t\turl: 'regressions/between/' + row[0].secondhash + '/' + row[0].maxhash,\n\t\t\t\t\t\t},\n\t\t\t\t\t],\n\t\t\t\t\tpages: pageListData,\n\t\t\t\t};\n\n\t\t\t\tif (perfConfig) {\n\t\t\t\t\tperfConfig.updateIndexData(data, row);\n\t\t\t\t}\n\n\t\t\t\tif (parsoidRTConfig) {\n\t\t\t\t\tparsoidRTConfig.updateIndexData(data, row);\n\t\t\t\t\tdata.parsoidRT = true;\n\t\t\t\t}\n\n\t\t\t\tres.render('index.html', data);\n\t\t\t});\n\t\t});\n\t});\n};\n\nvar makeFailsRow = function(urlPrefix, row) {\n\treturn [\n\t\tRH.pageTitleData(urlPrefix, row),\n\t\tRH.commitLinkData(urlPrefix, row.hash, row.title, row.prefix),\n\t\trow.errors === null ? 0 : row.errors,\n\t\trow.fails,\n\t\trow.skips,\n\t];\n};\n\nvar failsWebInterface = function(req, res) {\n\tvar page = (req.params[0] || 0) - 0;\n\tvar offset = page * 40;\n\tvar relativeUrlPrefix = (req.params[0] ? '../' : '');\n\n\tvar data = {\n\t\tpage: page,\n\t\trelativeUrlPrefix: relativeUrlPrefix,\n\t\turlPrefix: relativeUrlPrefix + 'topfails',\n\t\turlSuffix: '',\n\t\theading: 'Results by title',\n\t\theader: ['Title', 'Commit', 'Errors', 'Semantic diffs', 'Syntactic diffs'],\n\t};\n\tdb.query(dbFailsQuery, [ offset ],\n\t\tRH.displayPageList.bind(null, res, data, makeFailsRow));\n};\n\nvar resultsWebInterface = function(req, res) {\n\tvar query, queryParams;\n\tvar prefix = req.params[1] || null;\n\n\tif (prefix !== null) {\n\t\tquery = dbResultsPerWikiQuery;\n\t\tqueryParams = [ prefix ];\n\t} else {\n\t\tquery = dbResultsQuery;\n\t\tqueryParams = [];\n\t}\n\n\tdb.query(query, queryParams, function(err, rows) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tres.setHeader('Content-Type', 'text/xml; charset=UTF-8');\n\t\t\tvar body = '<?xml-stylesheet href=\"/static/result.css\"?>\\n';\n\t\t\tbody += '<testsuite>';\n\t\t\tfor (var i = 0; i < rows.length; i++) {\n\t\t\t\tbody += rows[i].result;\n\t\t\t\tbody += '</testsuite>';\n\t\t\t}\n\t\t\tres.status(200).send(body);\n\t\t}\n\t});\n};\n\nvar resultWebCallback = function(req, res, err, row) {\n\tif (err) {\n\t\tconsole.error(err);\n\t\tres.status(500).send(err.toString());\n\t} else if (row && row.length > 0) {\n\t\tif (row[0].result.match(/<testsuite/)) {\n\t\t\tres.setHeader('Content-Type', 'text/xml; charset=UTF-8');\n\t\t\tres.status(200);\n\t\t\tres.write('<?xml-stylesheet href=\"/static/result.css\"?>\\n');\n\t\t}\n\t\tres.end(row[0].result);\n\t} else {\n\t\tres.status(200).send('no results for that page at the requested revision');\n\t}\n};\n\nvar resultWebInterface = function(req, res) {\n\tvar commit = req.params[2] ? req.params[0] : null;\n\tvar title = commit === null ? req.params[1] : req.params[2];\n\tvar prefix = commit === null ? req.params[0] : req.params[1];\n\n\tif (commit !== null) {\n\t\tdb.query(dbGetResultWithCommit, [ commit, title, prefix ], resultWebCallback.bind(null, req, res));\n\t} else {\n\t\tdb.query(dbGetOneResult, [ title, prefix ], resultWebCallback.bind(null, req, res));\n\t}\n};\n\nvar getFailedFetches = function(req, res) {\n\tdb.query(dbFailedFetches, [maxFetchRetries], function(err, rows) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tres.status(200);\n\t\t\tvar n = rows.length;\n\t\t\tvar pageData = [];\n\t\t\tfor (var i = 0; i < n; i++) {\n\t\t\t\tvar prefix = rows[i].prefix;\n\t\t\t\tvar title = rows[i].title;\n\t\t\t\tvar name = prefix + ':' + title;\n\t\t\t\tpageData.push({\n\t\t\t\t\turl: prefix.replace(/wiki$/, '') + '.wikipedia.org/wiki/' + title,\n\t\t\t\t\tlinkName: name.replace('&', '&amp;'),\n\t\t\t\t});\n\t\t\t}\n\t\t\tvar heading = n === 0 ? 'No titles returning 404!  All\\'s well with the world!' :\n\t\t\t\t'The following ' + n + ' titles return 404';\n\t\t\tvar data = {\n\t\t\t\talt: n === 0,\n\t\t\t\theading: heading,\n\t\t\t\titems: pageData,\n\t\t\t};\n\t\t\tres.render('list.html', data);\n\t\t}\n\t});\n};\n\nvar getCrashers = function(req, res) {\n\tvar cutoffDate = new Date(Date.now() - (cutOffTime * 1000));\n\tdb.query(dbCrashers, [ maxTries, cutoffDate ], function(err, rows) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tres.status(200);\n\t\t\tvar n = rows.length;\n\t\t\tvar pageData = [];\n\t\t\tfor (var i = 0; i < n; i++) {\n\t\t\t\tvar prefix = rows[i].prefix;\n\t\t\t\tvar title = rows[i].title;\n\t\t\t\tpageData.push({\n\t\t\t\t\tdescription: rows[i].claim_hash,\n\t\t\t\t\turl: prefix.replace(/wiki$/, '') + '.wikipedia.org/wiki/' + title,\n\t\t\t\t\tlinkName: prefix + ':' + title,\n\t\t\t\t});\n\t\t\t}\n\t\t\tvar heading = n === 0 ? 'No titles crash the testers! All\\'s well with the world!' :\n\t\t\t\t'The following ' + n + ' titles crash the testers at least ' +\n\t\t\t\tmaxTries + ' times ';\n\t\t\tvar data = {\n\t\t\t\talt: n === 0,\n\t\t\t\theading: heading,\n\t\t\t\titems: pageData,\n\t\t\t};\n\t\t\tres.render('list.html', data);\n\t\t}\n\t});\n};\n\nvar getFailsDistr = function(req, res) {\n\tdb.query(dbFailsDistribution, null, function(err, rows) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tres.status(200);\n\t\t\tvar n = rows.length;\n\t\t\tvar intervalData = [];\n\t\t\tfor (var i = 0; i < n; i++) {\n\t\t\t\tvar r = rows[i];\n\t\t\t\tintervalData.push({ errors: r.fails, pages: r.num_pages });\n\t\t\t}\n\t\t\tvar data = {\n\t\t\t\theading: 'Distribution of semantic errors',\n\t\t\t\tinterval: intervalData,\n\t\t\t};\n\t\t\tres.render('histogram.html', data);\n\t\t}\n\t});\n};\n\nvar getSkipsDistr = function(req, res) {\n\tdb.query(dbSkipsDistribution, null, function(err, rows) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tres.status(200);\n\t\t\tvar n = rows.length;\n\t\t\tvar intervalData = [];\n\t\t\tfor (var i = 0; i < n; i++) {\n\t\t\t\tvar r = rows[i];\n\t\t\t\tintervalData.push({ errors: r.skips, pages: r.num_pages });\n\t\t\t}\n\t\t\tvar data = {\n\t\t\t\theading: 'Distribution of syntactic errors',\n\t\t\t\tinterval: intervalData,\n\t\t\t};\n\t\t\tres.render('histogram.html', data);\n\t\t}\n\t});\n};\n\nvar getRegressions = function(req, res) {\n\tvar r1 = req.params[0];\n\tvar r2 = req.params[1];\n\tvar page = (req.params[2] || 0) - 0;\n\tvar offset = page * 40;\n\tvar relativeUrlPrefix = '../../../';\n\trelativeUrlPrefix = relativeUrlPrefix + (req.params[0] ? '../' : '');\n\tdb.query(dbNumRegressionsBetweenRevs, [ r2, r1 ], function(err, row) {\n\t\tif (err) {\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tvar data = {\n\t\t\t\tpage: page,\n\t\t\t\trelativeUrlPrefix: relativeUrlPrefix,\n\t\t\t\turlPrefix: relativeUrlPrefix + 'regressions/between/' + r1 + '/' + r2,\n\t\t\t\turlSuffix: '',\n\t\t\t\theading: \"Total regressions between selected revisions: \" +\n\t\t\t\t\trow[0].numRegressions,\n\t\t\t\theadingLink: [{ url: relativeUrlPrefix + 'topfixes/between/' + r1 + '/' + r2, name: 'topfixes' }],\n\t\t\t\theader: RH.regressionsHeaderData,\n\t\t\t};\n\t\t\tdb.query(dbRegressionsBetweenRevs, [ r2, r1, offset ],\n\t\t\t\tRH.displayPageList.bind(null, res, data, RH.makeRegressionRow));\n\t\t}\n\t});\n};\n\nvar getTopfixes = function(req, res) {\n\tvar r1 = req.params[0];\n\tvar r2 = req.params[1];\n\tvar page = (req.params[2] || 0) - 0;\n\tvar offset = page * 40;\n\tvar relativeUrlPrefix = '../../../';\n\trelativeUrlPrefix = relativeUrlPrefix + (req.params[0] ? '../' : '');\n\tdb.query(dbNumFixesBetweenRevs, [ r2, r1 ], function(err, row) {\n\t\tif (err) {\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tvar data = {\n\t\t\t\tpage: page,\n\t\t\t\trelativeUrlPrefix: relativeUrlPrefix,\n\t\t\t\turlPrefix: relativeUrlPrefix + 'topfixes/between/' + r1 + '/' + r2,\n\t\t\t\turlSuffix: '',\n\t\t\t\theading: 'Total fixes between selected revisions: ' + row[0].numFixes,\n\t\t\t\theadingLink: [{ url: relativeUrlPrefix + \"regressions/between/\" + r1 + \"/\" + r2, name: 'regressions' }],\n\t\t\t\theader: RH.regressionsHeaderData,\n\t\t\t};\n\t\t\tdb.query(dbFixesBetweenRevs, [ r2, r1, offset ],\n\t\t\t\tRH.displayPageList.bind(null, res, data, RH.makeRegressionRow));\n\t\t}\n\t});\n};\n\nvar getCommits = function(req, res) {\n\tdb.query(dbCommits, null, function(err, rows) {\n\t\tif (err) {\n\t\t\tconsole.error(err);\n\t\t\tres.status(500).send(err.toString());\n\t\t} else {\n\t\t\tres.status(200);\n\t\t\tvar n = rows.length;\n\t\t\tvar tableRows = [];\n\t\t\tfor (var i = 0; i < n; i++) {\n\t\t\t\tvar row = rows[i];\n\t\t\t\tvar tableRow = { hash: row.hash, timestamp: row.timestamp };\n\t\t\t\tif (i + 1 < n) {\n\t\t\t\t\ttableRow.regUrl = 'regressions/between/' + rows[i + 1].hash + '/' + row.hash;\n\t\t\t\t\ttableRow.fixUrl = 'topfixes/between/' + rows[i + 1].hash + '/' + row.hash;\n\t\t\t\t}\n\t\t\t\ttableRows.push(tableRow);\n\t\t\t}\n\t\t\tvar data = {\n\t\t\t\tnumCommits: n,\n\t\t\t\tlatest: n ? rows[n - 1].timestamp.toString().slice(4, 15) : '',\n\t\t\t\theader: ['Commit hash', 'Timestamp', 'Tests', '-', '+'],\n\t\t\t\trow: tableRows,\n\t\t\t};\n\n\t\t\tres.render('commits.html', data);\n\t\t}\n\t});\n};\n\nvar diffResultWebCallback = function(req, res, flag, err, row) {\n\tif (err) {\n\t\tconsole.error(err);\n\t\tres.status(500).send(err.toString());\n\t} else if (row.length === 2) {\n\t\tvar oldCommit = req.params[0].slice(0, 10);\n\t\tvar newCommit = req.params[1].slice(0, 10);\n\t\tvar oldResult = row[0].result;\n\t\tvar newResult = row[1].result;\n\t\tvar flagResult = Diff.resultFlagged(oldResult, newResult, oldCommit, newCommit, flag);\n\t\tres.setHeader('Content-Type', 'text/xml; charset=UTF-8');\n\t\tres.status(200);\n\t\tres.write('<?xml-stylesheet href=\"/static/result.css\"?>\\n');\n\t\tres.end(flagResult);\n\t} else {\n\t\tvar commit = flag === '+' ? req.params[1] : req.params[0];\n\t\tres.redirect('/result/' + commit + '/' + encodeURIComponent(req.params[2]) + '/' + encodeURIComponent(req.params[3]));\n\t}\n};\n\nvar resultFlagNewWebInterface = function(req, res) {\n\tvar oldCommit = req.params[0];\n\tvar newCommit = req.params[1];\n\tvar prefix = req.params[2];\n\tvar title = req.params[3];\n\n\tdb.query(dbGetTwoResults, [ title, prefix, oldCommit, newCommit ],\n\t\tdiffResultWebCallback.bind(null, req, res, '+'));\n};\n\nvar resultFlagOldWebInterface = function(req, res) {\n\tvar oldCommit = req.params[0];\n\tvar newCommit = req.params[1];\n\tvar prefix = req.params[2];\n\tvar title = req.params[3];\n\n\tdb.query(dbGetTwoResults, [ title, prefix, oldCommit, newCommit ],\n\t\tdiffResultWebCallback.bind(null, req, res, '-'));\n};\n\nvar startCoordApp = Promise.method(function() {\n\t// Make the coordinator app\n\tvar coordApp = express();\n\n\t// application/x-www-form-urlencoded\n\t// multipart/form-data\n\tcoordApp.use(busboy({\n\t\tlimits: {\n\t\t\tfields: 10,\n\t\t\tfieldSize: 1000000,\n\t\t},\n\t}));\n\n\t// application/json\n\tcoordApp.use(bodyParser.json({\n\t\tlimit: 100000,\n\t}));\n\n\tcoordApp.use(function(req, res, next) {\n\t\treq.body = req.body || {};\n\t\tif (!req.busboy) {\n\t\t\treturn next();\n\t\t}\n\t\treq.busboy.on('field', function(field, val) {\n\t\t\treq.body[field] = val;\n\t\t});\n\t\treq.busboy.on('finish', function() {\n\t\t\tnext();\n\t\t});\n\t\treq.pipe(req.busboy);\n\t});\n\n\t// Clients will GET this path if they want to run a test\n\tcoordApp.get(/^\\/title$/, getTitle);\n\n\t// Receive results from clients\n\tcoordApp.post(/^\\/result\\/([^\\/]+)\\/([^\\/]+)/, receiveResults);\n\n\tvar rtResultsServer;\n\treturn new Promise(function(resolve) {\n\t\trtResultsServer = coordApp.listen(settings.coordPort || 8002, process.env.INTERFACE, resolve);\n\t}).then(function() {\n\t\tconsole.log('RT test server listening on: %s', rtResultsServer.address().port);\n\t\treturn rtResultsServer;\n\t});\n});\n\nvar startWebServer = Promise.method(function() {\n\t// Make an app\n\tvar app = express();\n\n\t// Declare static directory\n\tapp.use(\"/static\", express.static(__dirname + \"/static\"));\n\n\t// Add in the bodyParser middleware (because it's pretty standard)\n\tapp.use(bodyParser.json({}));\n\n\t// robots.txt: no indexing.\n\tapp.get(/^\\/robots\\.txt$/, function(req, res) {\n\t\tres.end(\"User-agent: *\\nDisallow: /\\n\");\n\t});\n\n\t// Main interface\n\tapp.get(/^\\/results(\\/([^\\/]+))?$/, resultsWebInterface);\n\n\t// Results for a title (on latest commit)\n\tapp.get(/^\\/latestresult\\/([^\\/]+)\\/(.*)$/, resultWebInterface);\n\n\t// Results for a title on any commit\n\tapp.get(/^\\/result\\/([\\w\\-_]*)\\/([^\\/]+)\\/(.*)$/, resultWebInterface);\n\n\t// List of failures sorted by severity\n\tapp.get(/^\\/topfails\\/(\\d+)$/, failsWebInterface);\n\t// 0th page\n\tapp.get(/^\\/topfails$/, failsWebInterface);\n\n\t// Overview of stats\n\tapp.get(/^\\/$/, statsWebInterface);\n\tapp.get(/^\\/stats(\\/([^\\/]+))?$/, statsWebInterface);\n\n\t// Failed fetches\n\tapp.get(/^\\/failedFetches$/, getFailedFetches);\n\n\t// Crashers\n\tapp.get(/^\\/crashers$/, getCrashers);\n\n\t// Regressions between two revisions.\n\tapp.get(/^\\/regressions\\/between\\/([^\\/]+)\\/([^\\/]+)(?:\\/(\\d+))?$/, getRegressions);\n\n\t// Topfixes between two revisions.\n\tapp.get(/^\\/topfixes\\/between\\/([^\\/]+)\\/([^\\/]+)(?:\\/(\\d+))?$/, getTopfixes);\n\n\t// Results for a title on a commit, flag skips/fails new since older commit\n\tapp.get(/^\\/resultFlagNew\\/([\\w\\-_]*)\\/([\\w\\-_]*)\\/([^\\/]+)\\/(.*)$/, resultFlagNewWebInterface);\n\n\t// Results for a title on a commit, flag skips/fails no longer in newer commit\n\tapp.get(/^\\/resultFlagOld\\/([\\w\\-_]*)\\/([\\w\\-_]*)\\/([^\\/]+)\\/(.*)$/, resultFlagOldWebInterface);\n\n\t// Distribution of fails\n\tapp.get(/^\\/semanticDiffsDistr$/, getFailsDistr);\n\n\t// Distribution of fails\n\tapp.get(/^\\/syntacticDiffsDistr$/, getSkipsDistr);\n\n\t// List of all commits\n\tapp.get('/commits', getCommits);\n\n\t// view engine\n\tvar ve = ehbs.create({\n\t\tdefaultLayout: 'layout',\n\t\tlayoutsDir: path.join(__dirname, '/views'),\n\t\textname: '.html',\n\t\thelpers: {\n\t\t\t// block helper to reference js files in page head.\n\t\t\tjsFiles: function(options) {\n\t\t\t\tthis.javascripts = options.fn(this);\n\t\t\t\treturn null;\n\t\t\t},\n\n\t\t\tformatPerfStat: function(type, value) {\n\t\t\t\tif (type.match(/^time/)) {\n\t\t\t\t\t// Show time in seconds\n\t\t\t\t\tvalue = Math.round((value / 1000) * 100) / 100;\n\t\t\t\t\treturn value.toString() + \"s\";\n\t\t\t\t} else if (type.match(/^size/)) {\n\t\t\t\t\t// Show sizes in KiB\n\t\t\t\t\tvalue = Math.round(value / 1024);\n\t\t\t\t\treturn value.toString() + \"KiB\";\n\t\t\t\t} else {\n\t\t\t\t\t// Other values go as they are\n\t\t\t\t\treturn value.toString();\n\t\t\t\t}\n\t\t\t},\n\n\t\t\t// round numeric data, but ignore others\n\t\t\tround: function(val) {\n\t\t\t\tif (isNaN(val)) {\n\t\t\t\t\treturn val;\n\t\t\t\t} else {\n\t\t\t\t\treturn Math.round(val * 100) / 100;\n\t\t\t\t}\n\t\t\t},\n\n\t\t\tformatHash: function(hash) {\n\t\t\t\treturn hash;\n\t\t\t},\n\n\t\t\tformatDate: function(timestamp) {\n\t\t\t\treturn timestamp.toString().slice(4, 21);\n\t\t\t},\n\n\t\t\tformatUrl: function(url) {\n\t\t\t\treturn 'http://' + encodeURI(url).replace('&', '&amp;');\n\t\t\t},\n\n\t\t\tprevUrl: function(urlPrefix, urlSuffix, page) {\n\t\t\t\treturn (urlPrefix ? urlPrefix + \"/\" : \"\") + (page - 1) + urlSuffix;\n\t\t\t},\n\n\t\t\tnextUrl: function(urlPrefix, urlSuffix, page) {\n\t\t\t\treturn (urlPrefix ? urlPrefix + \"/\" : \"\") + (page + 1) + urlSuffix;\n\t\t\t},\n\t\t},\n\t});\n\n\tapp.set('views', path.join(__dirname, '/views'));\n\tapp.set('view engine', 'html');\n\tapp.engine('html', ve.engine);\n\n\tif (parsoidRTConfig) {\n\t\tparsoidRTConfig.setupEndpoints(settings, app, mysql, db, ve.handlebars);\n\t}\n\n\tif (perfConfig) {\n\t\tperfConfig.setupEndpoints(settings, app, mysql, db);\n\t}\n\n\tvar webServer;\n\treturn new Promise(function(resolve) {\n\t\twebServer = app.listen(settings.webappPort || 8001, process.env.INTERFACE, resolve);\n\t}).then(function() {\n\t\tconsole.log('Testreduce server listening on: %s', webServer.address().port);\n\t\treturn webServer;\n\t});\n});\n\nstartCoordApp().then(startWebServer).catch(function(e) {\n\tconsole.log('Error starting up: ' + e);\n\tconsole.log(e.stack);\n});\n\nmodule.exports = {};\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/server.parsoid_rt.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/server.perf_stats.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/static/js/app.js","messages":[{"ruleId":"strict","severity":2,"message":"Use the global form of 'use strict'.","line":2,"column":1,"nodeType":"Program","messageId":"global","endLine":11,"endColumn":4},{"ruleId":"strict","severity":2,"message":"Use the global form of 'use strict'.","line":3,"column":2,"nodeType":"ExpressionStatement","messageId":"global","endLine":3,"endColumn":15}],"errorCount":2,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"/* global initialCommitList, updateCommitList, $:false */\n$(function() {\n\t\"use strict\";\n\n\tinitialCommitList();\n\n\t$('.revisions input').on('click', function() {\n\t\tvar name = $(this).attr('name');\n\t\tupdateCommitList.bind(this, name).call();\n\t});\n});\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/static/js/commitList.js","messages":[{"ruleId":"strict","severity":2,"message":"Use the global form of 'use strict'.","line":2,"column":1,"nodeType":"Program","messageId":"global","endLine":55,"endColumn":10},{"ruleId":"strict","severity":2,"message":"Use the global form of 'use strict'.","line":3,"column":2,"nodeType":"ExpressionStatement","messageId":"global","endLine":3,"endColumn":15}],"errorCount":2,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"/* global $:false */\n(function(exports) {\n\t\"use strict\";\n\n\tvar numRows, newHash, oldHash;\n\n\tvar setCompareLinks = function() {\n\t\t$('.compare-reg').attr('href', '/regressions/between/' + oldHash + '/' + newHash);\n\t\t$('.compare-fix').attr('href', '/topfixes/between/' + oldHash + '/' + newHash);\n\t};\n\n\tvar button = function(name, index) {\n\t\treturn $('.revisions tr:eq(' + index + ') .buttons input[name=\"' + name + '\"]');\n\t};\n\n\tvar buttonDisplay = function(name, index, visibility) {\n\t\t$(button(name, index)).css('visibility', visibility);\n\t};\n\n\t// set initial regressions/fixes links, button visibility/checkedness\n\texports.initialCommitList = function() {\n\t\tnumRows = $('.revisions tr').length;\n\t\tnewHash = $('.revisions tr:eq(0) .hash').attr('title');\n\t\toldHash = $('.revisions tr:eq(1) .hash').attr('title');\n\t\tsetCompareLinks();\n\t\tbutton('new', 0).attr('checked', 'checked');\n\t\tbutton('old', 1).attr('checked', 'checked');\n\t\tbuttonDisplay('old', 0, 'hidden');\n\t\tfor (var i = 1; i < numRows; i++) {\n\t\t\tbuttonDisplay('new', i, 'hidden');\n\t\t}\n\t};\n\n\t// button click callback: update regressions/fixes links and button visibility\n\texports.updateCommitList = function(name) {\n\t\tif (name === 'old') {\n\t\t\toldHash = this.value;\n\t\t} else {\n\t\t\tnewHash = this.value;\n\t\t}\n\t\tsetCompareLinks();\n\t\tvar index = $(this).closest('tr').index();\n\t\tfor (var i = 0; i < numRows; i++) {\n\t\t\tif (name === 'old' && i < index) {\n\t\t\t\tbuttonDisplay('new', i, 'visible');\n\t\t\t} else if (name === 'old') {\n\t\t\t\tbuttonDisplay('new', i, 'hidden');\n\t\t\t} else if (name === 'new' && i > index) {\n\t\t\t\tbuttonDisplay('old', i, 'visible');\n\t\t\t} else {\n\t\t\t\tbuttonDisplay('old', i, 'hidden');\n\t\t\t}\n\t\t}\n\t};\n})(this);\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/titles.example.bogus.json","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/titles.example.en.json","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/server/titles.example.es.json","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/utils/Utils.js","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]},{"filePath":"/src/repo/utils/promise.js","messages":[{"ruleId":"strict","severity":2,"message":"Use the global form of 'use strict'.","line":1,"column":1,"nodeType":"Program","messageId":"global","endLine":1,"endColumn":61}],"errorCount":1,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"source":"module.exports = require('prfun/wrap')(require('babybird'));\n","usedDeprecatedRules":[{"ruleId":"no-buffer-constructor","replacedBy":[]},{"ruleId":"no-new-require","replacedBy":[]},{"ruleId":"no-process-exit","replacedBy":[]}]}]

Traceback (most recent call last):
  File "/venv/lib/python3.7/site-packages/libup-0.0.1-py3.7.egg/libup/ng.py", line 1188, in main
    libup.run(args.repo, args.output, args.branch)
  File "/venv/lib/python3.7/site-packages/libup-0.0.1-py3.7.egg/libup/ng.py", line 1130, in run
    self.npm_upgrade(plan)
  File "/venv/lib/python3.7/site-packages/libup-0.0.1-py3.7.egg/libup/ng.py", line 854, in npm_upgrade
    hook(update)
  File "/venv/lib/python3.7/site-packages/libup-0.0.1-py3.7.egg/libup/ng.py", line 963, in _handle_eslint
    eslint_cfg = utils.load_ordered_json('.eslintrc.json')
  File "/venv/lib/python3.7/site-packages/libup-0.0.1-py3.7.egg/libup/utils.py", line 58, in load_ordered_json
    with open(fname) as f:
FileNotFoundError: [Errno 2] No such file or directory: '.eslintrc.json'

npm dependencies

Dependencies
Development dependencies

Logs

Source code is licensed under the AGPL.