From 56fffa6e0d375209ad6783074a17498ae5e1c68c Mon Sep 17 00:00:00 2001 From: Jon Moss Date: Thu, 4 Jan 2018 16:50:38 -0500 Subject: [PATCH] Revert "coverage: remove folder (#57)" This reverts commit 5d85de3e054a271a1d2c408a19771d10968a6b1f. PR-URL: https://github.com/nodejs/testing/pull/58 Reviewed-By: Michael Dawson --- coverage/README.md | 114 +++++++++++++++++++++++++++ coverage/gcovr-patches.diff | 23 ++++++ coverage/generate-index-html.py | 116 ++++++++++++++++++++++++++++ coverage/patches.diff | 131 ++++++++++++++++++++++++++++++++ 4 files changed, 384 insertions(+) create mode 100644 coverage/README.md create mode 100644 coverage/gcovr-patches.diff create mode 100755 coverage/generate-index-html.py create mode 100644 coverage/patches.diff diff --git a/coverage/README.md b/coverage/README.md new file mode 100644 index 0000000..84786d9 --- /dev/null +++ b/coverage/README.md @@ -0,0 +1,114 @@ +# Code Coverage Generation + +We have nightly code coverage generation so that we can track test coverage +for Node.js, make the information public and then use that information +to improve coverage over time. + +At this time we only capture coverage results once a day on linux x86. We +believe that coverage will not vary greatly across platforms and that the +process will be too expensive to run on every commit. We will re-evaluate +these assumptions based on data once the process has been in place for +a while. + +This doc captures the infrastructure in place to support the generation +of the coverage information published to https://coverage.nodejs.org. + +# Steps + +Generation/publication of the code coverage results consists of the following: + +* Nightly scheduled job - We have a job in jenkins which is scheduled to run at + 11 EST each night. + [node-test-commit-linux-coverage](https://ci.nodejs.org/view/All/job/node-test-commit-linux-coverage/). +* At the end of the scheduled job it rsync's the generated data to the + benchmarking data machine. We do this so that once the job is complete + the data is in a place where we know we can pull it from, and that pulling + that data will not affect any other jobs (for example jobs measuring + performance on the benchmark machine). +* At hourly intervals the the data is rsync'd from the benchmarking + data machine to the website. This is triggered from the nodejs.org website + machine and data is pulled from the benchmarking data machine. This allows + us to minimize who can modify the nodejs.org website as no additional + access is required. + +# Coverage Job + +The coverage job follows the same pattern as our other build jobs in order +to check out the version of node to be build/tested. It requires the following +additions: + +1. Build/test with the coverage targets. This is currently: + + ``` + ./configure --coverage + make coverage-clean + NODE_TEST_DIR=${HOME}/node-tmp PYTHON=python COVTESTS=test-ci make coverage -j $(getconf _NPROCESSORS_ONLN) + ``` + +2. Generate html summary page and push results to the benchmarking data machine: + + ``` + #!/bin/bash + + # copy the coverage results to the directory where we keep them + # generate the summaries and transfer to the benchmarking data + # machine from which the website will pull them + + export PATH="$(pwd):$PATH" + + # copy over results + COMMIT_ID=$(git rev-parse --short=16 HEAD) + mkdir -p "$HOME/coverage-out" + OUTDIR="$HOME/coverage-out/out" + mkdir -p "$OUTDIR" + rm -rf "$OUTDIR/coverage-$COMMIT_ID" || true + cp -r coverage "$OUTDIR/coverage-$COMMIT_ID" + + # add entry into the index and generate the html version + JSCOVERAGE=$(grep -B1 Lines coverage/index.html | \ + head -n1 | grep -o '[0-9\.]*') + CXXCOVERAGE=$(grep -A3 Lines coverage/cxxcoverage.html | \ + grep style | grep -o '[0-9]\{1,3\}\.[0-9]\{1,2\}') + NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + + echo "$JSCOVERAGE,$CXXCOVERAGE,$NOW,$COMMIT_ID" >> "$OUTDIR/index.csv" + + cd $OUTDIR/.. + $WORKSPACE/testing/coverage/generate-index-html.py + + # transfer results to machine where coverage data is staged. + rsync -r out coveragedata:coverage-out + ``` + +The current setup depends on past runs being in /home/iojs/coverage-out/out +on the machine that it is run on so that the generated index +includes the current and past data. For this and other reasons described +in the other sections, the job is pegged to run on: +[iojs-softlayer-benchmark](https://ci.nodejs.org/computer/iojs-softlayer-benchmark/) + + +# Tranfer to benchmarking data machine +The rsync from the machine on which the job runs to the benchmarking +data machine requires an ssh key. Currently we have pegged the job to the +benchmarking machine +[iojs-softlayer-benchmark](https://ci.nodejs.org/computer/iojs-softlayer-benchmark/), +have installed the key there, and have added an entry in +the ```.ssh/config``` file for the iojs user so that connections to the +'coveragedata' go to the benchmarking machine and use the correct key +(uses the softlayer internal network as opposed to public ip) + +``` +Host coveragedata + HostName 10.52.6.151 + User benchmark + IdentityFile ~/coverage-out/key/id_rsa +``` + +The results are pushed to /home/benchmark/coverage-out/out. + +# Transfer to the website +As mentioned earlier, the website will pull updates hourly from +/home/benchmark/coverage-out/out and put +them in the right place to be served at coverage.nodejs.org. The key +required to do this is already in place in order to support the similar process +for benchmarking.nodejs.org diff --git a/coverage/gcovr-patches.diff b/coverage/gcovr-patches.diff new file mode 100644 index 0000000..175f6ee --- /dev/null +++ b/coverage/gcovr-patches.diff @@ -0,0 +1,23 @@ +diff --git a/scripts/gcovr b/scripts/gcovr +index 034779c86d29..e68b239c424f 100755 +--- a/scripts/gcovr ++++ b/scripts/gcovr +@@ -496,7 +496,7 @@ def process_gcov_data(data_fname, covdata, options): + if filtered_fname is None: + if options.verbose: + sys.stdout.write(" Filtering coverage data for file %s\n" % fname) +- return ++ #return + # + # Return if the filename matches the exclude pattern + # +@@ -2141,6 +2141,9 @@ if options.objdir: + for i in range(0, len(options.exclude)): + options.exclude[i] = re.compile(options.exclude[i]) + ++if options.output is not None: ++ options.output = os.path.abspath(options.output) ++ + if options.root is not None: + if not options.root: + sys.stderr.write( diff --git a/coverage/generate-index-html.py b/coverage/generate-index-html.py new file mode 100755 index 0000000..695b597 --- /dev/null +++ b/coverage/generate-index-html.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import datetime + +with open('out/index.csv') as index: + index_csv = filter(lambda line: line, index.read().split('\n')) + +with open('out/index.html', 'w') as out: + out.write( +''' + + + + + Node.js Code Coverage + + + + + + + + + + + + + + + + +
+ +
+
+
+

Node.js Nightly Code Coverage

+

+ Node.js Core   +

+
+
+
+
+
Date (UTC)
+
HEAD
+
JS Coverage
+
C++ Coverage
+
+''') + for line in reversed(index_csv): + jscov, cxxcov, date, sha = line.split(',') + date = datetime.datetime.strptime(date, '%Y-%m-%dT%H:%M:%S%fZ').strftime("%d/%m/%Y %H:%M") + out.write(''' +
+
Date (UTC)
{0}
+
HEAD
+
JS Coverage
+
C++ Coverage
+
'''.format(date, sha, float(jscov), float(cxxcov))) + out.write(''' +
+
+
+
+ + +''') diff --git a/coverage/patches.diff b/coverage/patches.diff new file mode 100644 index 0000000..6b3d41f --- /dev/null +++ b/coverage/patches.diff @@ -0,0 +1,131 @@ +diff --git a/.gitignore b/.gitignore +index c7361af80c79..e56b7f913845 100644 +--- a/.gitignore ++++ b/.gitignore +@@ -21,6 +21,8 @@ node_g + icu_config.gypi + + /out ++/coverage ++/lib_ + + # various stuff that VC++ produces/uses + Debug/ +diff --git a/lib/internal/bootstrap_node.js b/lib/internal/bootstrap_node.js +index 27f05a4fcf14..ae0fed9e1c00 100644 +--- a/lib/internal/bootstrap_node.js ++++ b/lib/internal/bootstrap_node.js +@@ -42,6 +42,7 @@ + NativeModule.require('internal/process/stdio').setup(); + _process.setupKillAndExit(); + _process.setupSignalHandlers(); ++ NativeModule.require('internal/process/write-coverage').setup(); + + // Do not initialize channel in debugger agent, it deletes env variable + // and the main thread won't see it. +diff --git a/lib/internal/process/write-coverage.js b/lib/internal/process/write-coverage.js +new file mode 100644 +index 000000000000..666939bc3389 +--- /dev/null ++++ b/lib/internal/process/write-coverage.js +@@ -0,0 +1,46 @@ ++'use strict'; ++const process = require('process'); ++const path = require('path'); ++const fs = require('fs'); ++const mkdirSync = fs.mkdirSync; ++const writeFileSync = fs.writeFileSync; ++ ++var isWritingCoverage = false; ++function writeCoverage() { ++ if (isWritingCoverage || !global.__coverage__) { ++ return; ++ } ++ isWritingCoverage = true; ++ ++ const dirname = path.join(path.dirname(process.execPath), '.coverage'); ++ const filename = `coverage-${process.pid}-${Date.now()}.json`; ++ try { ++ mkdirSync(dirname); ++ } catch (err) { ++ if (err.code !== 'EEXIST') { ++ console.error(err); ++ return; ++ } ++ } ++ ++ const target = path.join(dirname, filename); ++ const coverageInfo = JSON.stringify(global.__coverage__); ++ try { ++ writeFileSync(target, coverageInfo); ++ } catch (err) { ++ console.error(err); ++ } ++} ++ ++function setup() { ++ var reallyReallyExit = process.reallyExit; ++ ++ process.reallyExit = function(code) { ++ writeCoverage(); ++ reallyReallyExit(code); ++ }; ++ ++ process.on('exit', writeCoverage); ++} ++ ++exports.setup = setup; +diff --git a/node.gyp b/node.gyp +index 2254a6e..2e91bd9 100644 +--- a/node.gyp ++++ b/node.gyp +@@ -86,6 +86,7 @@ + 'lib/internal/process/promises.js', + 'lib/internal/process/stdio.js', + 'lib/internal/process/warning.js', ++ 'lib/internal/process/write-coverage.js', + 'lib/internal/process.js', + 'lib/internal/readline.js', + 'lib/internal/repl.js', +diff --git a/test/common.js b/test/common.js +index 5aefdc3bcee5..750c134d33ab 100644 +--- a/test/common.js ++++ b/test/common.js +@@ -258,6 +258,9 @@ exports.platformTimeout = function(ms) { + if (process.config.target_defaults.default_configuration === 'Debug') + ms = 2 * ms; + ++ if (global.__coverage__) ++ ms = 4 * ms; ++ + if (exports.isAix) + return 2 * ms; // default localhost speed is slower on AIX + +@@ -348,7 +351,7 @@ function leakedGlobals() { + if (-1 === knownGlobals.indexOf(global[val])) + leaked.push(val); + +- return leaked; ++ return leaked.filter((varname) => !/^__cov/.test(varname)); + } + exports.leakedGlobals = leakedGlobals; + +diff --git a/test/parallel/test-fs-sync-fd-leak.js b/test/parallel/test-fs-sync-fd-leak.js +index f7cfd25f4b9b..80ad8cf6b705 100644 +--- a/test/parallel/test-fs-sync-fd-leak.js ++++ b/test/parallel/test-fs-sync-fd-leak.js +@@ -1,8 +1,13 @@ + 'use strict'; +-require('../common'); ++const common = require('../common'); + var assert = require('assert'); + var fs = require('fs'); + ++if (global.__coverage__) { ++ common.skip('Not working with coverage'); ++ return; ++} ++ + // ensure that (read|write|append)FileSync() closes the file descriptor + fs.openSync = function() { + return 42;