-
Notifications
You must be signed in to change notification settings - Fork 48
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
GHA: replace coverage with custom framework
Python 3.12 has a significant regression in coverage analysis. Related issue: python/cpython#107674 . This patch replaces coverage with a custom framework built using `sys.monitoring`, which is only available as of Python 3.12.
- Loading branch information
1 parent
fd8670b
commit 9a9fd57
Showing
6 changed files
with
252 additions
and
61 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
from http.client import HTTPSConnection | ||
import json | ||
import os | ||
import sys | ||
|
||
_token = os.environ.get('GITHUB_TOKEN') | ||
if not _token: | ||
import getpass | ||
_token = getpass.getpass('GitHub token: ') | ||
|
||
repo = os.environ.get('GITHUB_REPOSITORY', 'evalf/nutils') | ||
|
||
host = 'api.github.com' | ||
_conn = HTTPSConnection(host) | ||
|
||
def _request(method, url, *, desired_status=200): | ||
_conn.request( | ||
method, | ||
url, | ||
headers={ | ||
'Host': host, | ||
'User-Agent': 'Nutils devtools', | ||
'Accept': 'application/vnd.github+json', | ||
'Authorization': f'Bearer {_token}', | ||
'X-GitHub-Api-Version': '2022-11-28', | ||
}, | ||
) | ||
response = _conn.getresponse() | ||
if response.status != desired_status: | ||
raise RuntimeError(f'ERROR: {method} https://{host}{url} failed: {response.status} {response.reason}') | ||
return response.read() | ||
|
||
def list_workflow_run_artifacts(run_id: str): | ||
# TODO: implement pagination: https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api?apiVersion=2022-11-28 | ||
return json.loads(_request('GET', f'/repos/{repo}/actions/runs/{run_id}/artifacts'))['artifacts'] | ||
|
||
def delete_artifact(artifact_id: str): | ||
_request('DELETE', f'/repos/{repo}/actions/artifacts/{artifact_id}', desired_status=204) |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
from . import api | ||
|
||
run_id = os.environ.get('GITHUB_RUN_ID') | ||
if not run_id: | ||
raise RuntimeError('ERROR: environment variable GITHUB_RUN_ID not set') | ||
|
||
for artifact in api.list_workflow_run_artifacts(run_id): | ||
if artifact['name'].startswith('_coverage_'): | ||
print(f'deleting {artifact["name"]}') | ||
api.delete_artifact(artifact['id']) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
import array | ||
import itertools | ||
import json | ||
import os | ||
from pathlib import Path | ||
import subprocess | ||
|
||
cov_dir = Path() / 'target' / 'coverage' | ||
|
||
# Load and merge coverage data. | ||
coverage = {} | ||
for part in cov_dir.glob('*.json'): | ||
with part.open('r') as f: | ||
part = json.load(f) | ||
for file_name, part_file_coverage in part.items(): | ||
coverage.setdefault(file_name, []).append(part_file_coverage) | ||
coverage = {file_name: array.array('B', list(map(max, *file_coverage)) if len(file_coverage) > 1 else file_coverage[0]) for file_name, file_coverage in coverage.items()} | ||
|
||
# Annotate lines without coverage. | ||
for file_name, file_coverage in sorted(coverage.items()): | ||
i = 0 | ||
while i < len(file_coverage): | ||
j = i | ||
if file_coverage[i] == 1: | ||
while j + 1 < len(file_coverage) and file_coverage[j + 1] == 1: | ||
j += 1 | ||
if i == j: | ||
print(f'::warning file={file_name},line={i},endLine={j},title=Uncovered lines,Line {i} is not covered by tests') | ||
else: | ||
print(f'::warning file={file_name},line={i},endLine={j},title=Uncovered lines,Lines {i}-{j} are not covered by tests') | ||
i = j + 1 | ||
|
||
# Generate lcov. | ||
with (cov_dir / 'coverage.info').open('w') as f: | ||
print('TN:unittest', file=f) | ||
for file_name, file_coverage in sorted(coverage.items()): | ||
print(f'SF:{file_name}', file=f) | ||
print('FNF:0', file=f) | ||
print('FNH:0', file=f) | ||
print('BRF:0', file=f) | ||
print('BRH:0', file=f) | ||
for i, status in enumerate(file_coverage[1:], 1): | ||
if status: | ||
print(f'DA:{i},{status - 1}', file=f) | ||
hit = sum(status == 2 for status in file_coverage) | ||
found = sum(status != 0 for status in file_coverage) | ||
print('LH:{hit}', file=f) | ||
print('LF:{found}', file=f) | ||
print('end_of_record', file=f) | ||
|
||
# If this is a PR, build patch coverage data. | ||
patch_coverage = {} | ||
if os.environ.get('GITHUB_EVENT_NAME', None) == 'pull_request': | ||
base = os.environ.get('GITHUB_BASE_REF') | ||
subprocess.run(['git', 'fetch', '--depth=1', 'origin', base], check=True, stdin=subprocess.DEVNULL) | ||
patch = iter(subprocess.run(['git', 'diff', '-U0', f'origin/{base}', '--'], check=True, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, text=True).stdout.splitlines()) | ||
for line in patch: | ||
# Skip to a file with coverage. | ||
if not line.startswith('+++ b/'): | ||
continue | ||
file_name = line[6:].rstrip() | ||
if (file_coverage := coverage.get(file_name)) is None: | ||
continue | ||
# Copy the full coverage and mask out unchanged lines. | ||
patch_coverage[file_name] = patch_file_coverage = array.array('B', file_coverage) | ||
prev_offset = 0 | ||
for line in patch: | ||
if line.startswith('--- '): | ||
break | ||
if line.startswith('@@ '): | ||
chunk = line.split(' ')[2] | ||
assert chunk.startswith('+') | ||
if ',' in chunk: | ||
offset, count = map(int, chunk[1:].split(',')) | ||
else: | ||
offset = int(chunk[1:]) | ||
count = 1 | ||
for i in range(prev_offset, offset): | ||
patch_file_coverage[i] = 0 | ||
prev_offset = offset + count | ||
for i in range(prev_offset, len(patch_file_coverage)): | ||
patch_file_coverage[i] = 0 | ||
|
||
# Generate summary. | ||
header = ['Name', 'Stmts', 'Miss', 'Cover'] | ||
align = ['<', '>', '>', '>'] | ||
if patch_coverage: | ||
header += ['Patch stmts', 'Patch miss', 'Patch cover'] | ||
align += ['>'] * 3 | ||
table = [] | ||
def row_stats(*data): | ||
hit = 0 | ||
miss = 0 | ||
for data in data: | ||
hit += data.count(2) | ||
miss += data.count(1) | ||
total = hit + miss | ||
percentage = 100 * hit / (hit + miss) if hit + miss else 100. | ||
return [str(total), str(miss), f'{percentage:.1f}%'] | ||
for file_name, file_coverage in sorted(coverage.items()): | ||
row = [f'`{file_name}`'] + row_stats(file_coverage) | ||
if (patch_file_coverage := patch_coverage.get(file_name)): | ||
row += row_stats(patch_file_coverage) | ||
elif patch_coverage: | ||
row += [''] * 3 | ||
table.append(row) | ||
row = ['TOTAL'] + row_stats(*coverage.values()) | ||
if patch_coverage: | ||
row += row_stats(*patch_coverage.values()) | ||
table.append(row) | ||
with open(os.environ.get('GITHUB_STEP_SUMMARY', None) or cov_dir / 'summary.md', 'w') as f: | ||
width = tuple(max(map(len, columns)) for columns in zip(header, *table)) | ||
print('| ' + ' | '.join(f'{{:<{w}}}'.format(h) for w, h in zip(width, header)) + ' |', file=f) | ||
print('| ' + ' | '.join(':' + '-' * (w - 1) if a == '<' else '-' * (w - 1) + ':' for a, w in zip(align, width)) + ' |', file=f) | ||
fmt = '| ' + ' | '.join(f'{{:{a}{w}}}' for a, w in zip(align, width)) + ' |' | ||
for row in table: | ||
print(fmt.format(*row), file=f) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
import importlib | ||
import inspect | ||
import json | ||
import os | ||
from pathlib import Path | ||
import sys | ||
import unittest | ||
|
||
source = importlib.util.find_spec('nutils').origin | ||
assert source.endswith(os.sep + '__init__.py') | ||
source = source[:-11] | ||
coverage = {} | ||
|
||
if hasattr(sys, 'monitoring'): | ||
|
||
def start(code, _): | ||
if isinstance(code.co_filename, str) and code.co_filename.startswith(source) and not sys.monitoring.get_local_events(sys.monitoring.COVERAGE_ID, code): | ||
if (file_coverage := coverage.get(code.co_filename)) is None: | ||
with open(code.co_filename, 'rb') as f: | ||
nlines = sum(1 for _ in f) | ||
coverage[code.co_filename] = file_coverage = [0] * (nlines + 1) | ||
for _, _, l in code.co_lines(): | ||
if l: | ||
file_coverage[l] = 1 | ||
sys.monitoring.set_local_events(sys.monitoring.COVERAGE_ID, code, sys.monitoring.events.LINE) | ||
for obj in code.co_consts: | ||
if inspect.iscode(obj): | ||
start(obj, None) | ||
return sys.monitoring.DISABLE | ||
|
||
def line(code, line_number): | ||
coverage[code.co_filename][line_number] = 2 | ||
return sys.monitoring.DISABLE | ||
|
||
sys.monitoring.register_callback(sys.monitoring.COVERAGE_ID, sys.monitoring.events.PY_START, start) | ||
sys.monitoring.register_callback(sys.monitoring.COVERAGE_ID, sys.monitoring.events.LINE, line) | ||
sys.monitoring.use_tool_id(sys.monitoring.COVERAGE_ID, 'test') | ||
sys.monitoring.set_events(sys.monitoring.COVERAGE_ID, sys.monitoring.events.PY_START) | ||
|
||
loader = unittest.TestLoader() | ||
suite = loader.discover('tests', top_level_dir='.') | ||
runner = unittest.TextTestRunner(buffer=True) | ||
result = runner.run(suite) | ||
|
||
coverage = {file_name[len(source) - 7:].replace('\\', '/'): file_coverage for file_name, file_coverage in coverage.items()} | ||
cov_dir = (Path() / 'target' / 'coverage') | ||
cov_dir.mkdir(parents=True, exist_ok=True) | ||
cov_file = cov_dir / (os.environ.get('COVERAGE_ID', 'coverage') + '.json') | ||
with cov_file.open('w') as f: | ||
json.dump(coverage, f) | ||
|
||
sys.exit(0 if result.wasSuccessful() else 1) |