diff --git a/tests/perf/benchmark.py b/tests/perf/benchmark.py index c1ee69c4a39..3f9bd04f857 100644 --- a/tests/perf/benchmark.py +++ b/tests/perf/benchmark.py @@ -79,14 +79,18 @@ def __call__(self, result_entry: pd.Series, target_entry: pd.Series) -> None: return if self.compare == "==": print( - f"[Check] abs({result_entry[self.name]=} - {target_entry[self.name]=}) < {target_entry[self.name]=} * {self.margin=}", + f"[Check] abs({self.name}:{result_entry[self.name]} - {self.name}:{target_entry[self.name]}) < {self.name}:{target_entry[self.name]} * {self.margin}", ) assert abs(result_entry[self.name] - target_entry[self.name]) < target_entry[self.name] * self.margin elif self.compare == "<": - print(f"[Check] {result_entry[self.name]=} < {target_entry[self.name]=} * (1.0 + {self.margin=})") + print( + f"[Check] {self.name}:{result_entry[self.name]} < {self.name}:{target_entry[self.name]} * (1.0 + {self.margin})", + ) assert result_entry[self.name] < target_entry[self.name] * (1.0 + self.margin) elif self.compare == ">": - print(f"[Check] {result_entry[self.name]=} > {target_entry[self.name]=} * (1.0 - {self.margin=})") + print( + f"[Check] {self.name}:{result_entry[self.name]} > {self.name}:{target_entry[self.name]} * (1.0 - {self.margin})", + ) assert result_entry[self.name] > target_entry[self.name] * (1.0 - self.margin) def __init__( @@ -287,6 +291,8 @@ def run( gc.collect() result = self.load_result(work_dir) + if result is None: + return None result = summary.average(result, keys=["task", "model", "data_group", "data"]) # Average out seeds return result.set_index(["task", "model", "data_group", "data"]) diff --git a/tests/perf/conftest.py b/tests/perf/conftest.py index 8a990ba27a5..933cdfd4460 100644 --- a/tests/perf/conftest.py +++ b/tests/perf/conftest.py @@ -296,7 +296,10 @@ def fxt_model(request: pytest.FixtureRequest, fxt_model_category) -> Benchmark.M model: Benchmark.Model = request.param if fxt_model_category == "all": return model - if (fxt_model_category == "default" and model.category == "other") or fxt_model_category != model.category: + if fxt_model_category == "default": + if model.category == "other": + pytest.skip(f"{model.category} category model") + elif fxt_model_category != model.category: pytest.skip(f"{model.category} category model") return model