diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 3e3a65364..ea91d6162 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -91,7 +91,7 @@ jobs: _numpy_version: ${{ matrix.numpy-version }} run: | python -um pip install --upgrade --upgrade-strategy eager wheel - python -um pip install --upgrade --upgrade-strategy eager coverage numpy$_numpy_version + python -um pip install --upgrade --upgrade-strategy eager numpy$_numpy_version # Install Nutils from `dist` dir created in job `build-python-package`. python -um pip install "$_wheel[import_gmsh,export_mpl]" - name: Install Scipy @@ -103,11 +103,40 @@ jobs: python -um pip install --upgrade --upgrade-strategy eager mkl python -um devtools.gha.configure_mkl - name: Test - run: python -um coverage run -m unittest discover -b -q -t . -s tests - - name: Post-process coverage - run: python -um devtools.gha.coverage_report_xml - - name: Upload coverage - uses: codecov/codecov-action@v3 + env: + COVERAGE_ID: ${{ matrix.name }} + run: python -um devtools.gha.unittest + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + name: _coverage_${{ matrix.name }} + path: target/coverage/ + if-no-files-found: error + process-coverage: + if: ${{ always() }} + needs: test + name: 'Test coverage' + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Download coverage artifacts + uses: actions/download-artifact@v4 + with: + pattern: _coverage_* + path: target/coverage + merge-multiple: true + - name: Generate summary + run: python -um devtools.gha.report_coverage + - name: Upload lcov artifact + uses: actions/upload-artifact@v4 + with: + name: coverage + path: target/coverage/coverage.info + - name: Delete temporary coverage artifacts + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: python -um devtools.gha.delete_coverage_artifacts test-examples: needs: build-python-package name: 'Test examples ${{ matrix.os }}' diff --git a/devtools/gha/api.py b/devtools/gha/api.py new file mode 100644 index 000000000..490d36e72 --- /dev/null +++ b/devtools/gha/api.py @@ -0,0 +1,38 @@ +from http.client import HTTPSConnection +import json +import os +import sys + +_token = os.environ.get('GITHUB_TOKEN') +if not _token: + import getpass + _token = getpass.getpass('GitHub token: ') + +repo = os.environ.get('GITHUB_REPOSITORY', 'evalf/nutils') + +host = 'api.github.com' +_conn = HTTPSConnection(host) + +def _request(method, url, *, desired_status=200): + _conn.request( + method, + url, + headers={ + 'Host': host, + 'User-Agent': 'Nutils devtools', + 'Accept': 'application/vnd.github+json', + 'Authorization': f'Bearer {_token}', + 'X-GitHub-Api-Version': '2022-11-28', + }, + ) + response = _conn.getresponse() + if response.status != desired_status: + raise RuntimeError(f'ERROR: {method} https://{host}{url} failed: {response.status} {response.reason}') + return response.read() + +def list_workflow_run_artifacts(run_id: str): + # TODO: implement pagination: https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api?apiVersion=2022-11-28 + return json.loads(_request('GET', f'/repos/{repo}/actions/runs/{run_id}/artifacts'))['artifacts'] + +def delete_artifact(artifact_id: str): + _request('DELETE', f'/repos/{repo}/actions/artifacts/{artifact_id}', desired_status=204) diff --git a/devtools/gha/coverage_report_xml.py b/devtools/gha/coverage_report_xml.py deleted file mode 100644 index 795bdb8c9..000000000 --- a/devtools/gha/coverage_report_xml.py +++ /dev/null @@ -1,55 +0,0 @@ -import sys -import re -import os.path -from typing import Sequence -from xml.etree import ElementTree -from pathlib import Path -from coverage import Coverage - -paths = [] -for path in sys.path: - try: - paths.append(str(Path(path).resolve()).lower()+os.path.sep) - except FileNotFoundError: - pass -paths = list(sorted(paths, key=len, reverse=True)) -unix_paths = tuple(p.replace('\\', '/') for p in paths) -packages = tuple(p.replace('/', '.') for p in unix_paths) - -dst = Path('coverage.xml') - -# Generate `coverage.xml` with absolute file and package names. -cov = Coverage() -cov.load() -cov.xml_report(outfile=str(dst)) - -# Load the report, remove the largest prefix in `packages` from attribute -# `name` of element `package`, if any, and similarly the largest prefix in -# `paths` from attribute `filename` of element `class` and from the content of -# element `source`. Matching prefixes is case insensitive for case insensitive -# file systems. - - -def remove_prefix(value: str, prefixes: Sequence[str]) -> str: - lvalue = value.lower() - for prefix in prefixes: - if lvalue.startswith(prefix): - return value[len(prefix):] - return value - - -root = ElementTree.parse(str(dst)) -for elem in root.iter('package'): - for package in packages: - name = elem.get('name') - if name: - elem.set('name', remove_prefix(name, packages)) - for elem in root.iter('class'): - filename = elem.get('filename') - if filename: - elem.set('filename', remove_prefix(filename, unix_paths)) - for elem in root.iter('source'): - text = elem.text - if text: - elem.text = remove_prefix(text, paths) -root.write('coverage.xml') diff --git a/devtools/gha/delete_coverage_artifacts.py b/devtools/gha/delete_coverage_artifacts.py new file mode 100644 index 000000000..5f8a5c212 --- /dev/null +++ b/devtools/gha/delete_coverage_artifacts.py @@ -0,0 +1,10 @@ +from . import api + +run_id = os.environ.get('GITHUB_RUN_ID') +if not run_id: + raise RuntimeError('ERROR: environment variable GITHUB_RUN_ID not set') + +for artifact in api.list_workflow_run_artifacts(run_id): + if artifact['name'].startswith('_coverage_'): + print(f'deleting {artifact["name"]}') + api.delete_artifact(artifact['id']) diff --git a/devtools/gha/report_coverage.py b/devtools/gha/report_coverage.py new file mode 100644 index 000000000..8cfea3a5f --- /dev/null +++ b/devtools/gha/report_coverage.py @@ -0,0 +1,117 @@ +import array +import itertools +import json +import os +from pathlib import Path +import subprocess + +cov_dir = Path() / 'target' / 'coverage' + +# Load and merge coverage data. +coverage = {} +for part in cov_dir.glob('*.json'): + with part.open('r') as f: + part = json.load(f) + for file_name, part_file_coverage in part.items(): + coverage.setdefault(file_name, []).append(part_file_coverage) +coverage = {file_name: array.array('B', list(map(max, *file_coverage)) if len(file_coverage) > 1 else file_coverage[0]) for file_name, file_coverage in coverage.items()} + +# Annotate lines without coverage. +for file_name, file_coverage in sorted(coverage.items()): + i = 0 + while i < len(file_coverage): + j = i + if file_coverage[i] == 1: + while j + 1 < len(file_coverage) and file_coverage[j + 1] == 1: + j += 1 + if i == j: + print(f'::warning file={file_name},line={i},endLine={j},title=Uncovered lines,Line {i} is not covered by tests') + else: + print(f'::warning file={file_name},line={i},endLine={j},title=Uncovered lines,Lines {i}-{j} are not covered by tests') + i = j + 1 + +# Generate lcov. +with (cov_dir / 'coverage.info').open('w') as f: + print('TN:unittest', file=f) + for file_name, file_coverage in sorted(coverage.items()): + print(f'SF:{file_name}', file=f) + print('FNF:0', file=f) + print('FNH:0', file=f) + print('BRF:0', file=f) + print('BRH:0', file=f) + for i, status in enumerate(file_coverage[1:], 1): + if status: + print(f'DA:{i},{status - 1}', file=f) + hit = sum(status == 2 for status in file_coverage) + found = sum(status != 0 for status in file_coverage) + print('LH:{hit}', file=f) + print('LF:{found}', file=f) + print('end_of_record', file=f) + +# If this is a PR, build patch coverage data. +patch_coverage = {} +if os.environ.get('GITHUB_EVENT_NAME', None) == 'pull_request': + base = os.environ.get('GITHUB_BASE_REF') + subprocess.run(['git', 'fetch', '--depth=1', 'origin', base], check=True, stdin=subprocess.DEVNULL) + patch = iter(subprocess.run(['git', 'diff', '-U0', f'origin/{base}', '--'], check=True, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, text=True).stdout.splitlines()) + for line in patch: + # Skip to a file with coverage. + if not line.startswith('+++ b/'): + continue + file_name = line[6:].rstrip() + if (file_coverage := coverage.get(file_name)) is None: + continue + # Copy the full coverage and mask out unchanged lines. + patch_coverage[file_name] = patch_file_coverage = array.array('B', file_coverage) + prev_offset = 0 + for line in patch: + if line.startswith('--- '): + break + if line.startswith('@@ '): + chunk = line.split(' ')[2] + assert chunk.startswith('+') + if ',' in chunk: + offset, count = map(int, chunk[1:].split(',')) + else: + offset = int(chunk[1:]) + count = 1 + for i in range(prev_offset, offset): + patch_file_coverage[i] = 0 + prev_offset = offset + count + for i in range(prev_offset, len(patch_file_coverage)): + patch_file_coverage[i] = 0 + +# Generate summary. +header = ['Name', 'Stmts', 'Miss', 'Cover'] +align = ['<', '>', '>', '>'] +if patch_coverage: + header += ['Patch stmts', 'Patch miss', 'Patch cover'] + align += ['>'] * 3 +table = [] +def row_stats(*data): + hit = 0 + miss = 0 + for data in data: + hit += data.count(2) + miss += data.count(1) + total = hit + miss + percentage = 100 * hit / (hit + miss) if hit + miss else 100. + return [str(total), str(miss), f'{percentage:.1f}%'] +for file_name, file_coverage in sorted(coverage.items()): + row = [f'`{file_name}`'] + row_stats(file_coverage) + if (patch_file_coverage := patch_coverage.get(file_name)): + row += row_stats(patch_file_coverage) + elif patch_coverage: + row += [''] * 3 + table.append(row) +row = ['TOTAL'] + row_stats(*coverage.values()) +if patch_coverage: + row += row_stats(*patch_coverage.values()) +table.append(row) +with open(os.environ.get('GITHUB_STEP_SUMMARY', None) or cov_dir / 'summary.md', 'w') as f: + width = tuple(max(map(len, columns)) for columns in zip(header, *table)) + print('| ' + ' | '.join(f'{{:<{w}}}'.format(h) for w, h in zip(width, header)) + ' |', file=f) + print('| ' + ' | '.join(':' + '-' * (w - 1) if a == '<' else '-' * (w - 1) + ':' for a, w in zip(align, width)) + ' |', file=f) + fmt = '| ' + ' | '.join(f'{{:{a}{w}}}' for a, w in zip(align, width)) + ' |' + for row in table: + print(fmt.format(*row), file=f) diff --git a/devtools/gha/unittest.py b/devtools/gha/unittest.py new file mode 100644 index 000000000..06e1dbbd7 --- /dev/null +++ b/devtools/gha/unittest.py @@ -0,0 +1,52 @@ +import importlib +import inspect +import json +import os +from pathlib import Path +import sys +import unittest + +source = importlib.util.find_spec('nutils').origin +assert source.endswith(os.sep + '__init__.py') +source = source[:-11] +coverage = {} + +if hasattr(sys, 'monitoring'): + + def start(code, _): + if isinstance(code.co_filename, str) and code.co_filename.startswith(source) and not sys.monitoring.get_local_events(sys.monitoring.COVERAGE_ID, code): + if (file_coverage := coverage.get(code.co_filename)) is None: + with open(code.co_filename, 'rb') as f: + nlines = sum(1 for _ in f) + coverage[code.co_filename] = file_coverage = [0] * (nlines + 1) + for _, _, l in code.co_lines(): + if l: + file_coverage[l] = 1 + sys.monitoring.set_local_events(sys.monitoring.COVERAGE_ID, code, sys.monitoring.events.LINE) + for obj in code.co_consts: + if inspect.iscode(obj): + start(obj, None) + return sys.monitoring.DISABLE + + def line(code, line_number): + coverage[code.co_filename][line_number] = 2 + return sys.monitoring.DISABLE + + sys.monitoring.register_callback(sys.monitoring.COVERAGE_ID, sys.monitoring.events.PY_START, start) + sys.monitoring.register_callback(sys.monitoring.COVERAGE_ID, sys.monitoring.events.LINE, line) + sys.monitoring.use_tool_id(sys.monitoring.COVERAGE_ID, 'test') + sys.monitoring.set_events(sys.monitoring.COVERAGE_ID, sys.monitoring.events.PY_START) + +loader = unittest.TestLoader() +suite = loader.discover('tests', top_level_dir='.') +runner = unittest.TextTestRunner(buffer=True) +result = runner.run(suite) + +coverage = {file_name[len(source) - 7:].replace('\\', '/'): file_coverage for file_name, file_coverage in coverage.items()} +cov_dir = (Path() / 'target' / 'coverage') +cov_dir.mkdir(parents=True, exist_ok=True) +cov_file = cov_dir / (os.environ.get('COVERAGE_ID', 'coverage') + '.json') +with cov_file.open('w') as f: + json.dump(coverage, f) + +sys.exit(0 if result.wasSuccessful() else 1)