Skip to content

Commit

Permalink
restart
Browse files Browse the repository at this point in the history
  • Loading branch information
dweindl committed Sep 30, 2024
1 parent e2a9c33 commit 066df33
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ int SuiteSparse_divcomplex
// be done via the SUITESPARSE_TIME macro, defined below:
#define SUITESPARSE_TIMER_ENABLED
#define SUITESPARSE_HAVE_CLOCK_GETTIME
#define SUITESPARSE_CONFIG_TIMER omp_get_wtime
#define SUITESPARSE_CONFIG_TIMER clock_gettime
#if defined ( SUITESPARSE_TIMER_ENABLED )
#if defined ( _OPENMP )
// Avoid indirection through the library if the compilation unit
Expand Down
14 changes: 10 additions & 4 deletions tests/benchmark-models/test_petab_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ class GradientCheckSettings:
atol_sim: float = 1e-12
rtol_sim: float = 1e-12
# Absolute and relative tolerances for finite difference gradient checks.
atol_check: float = 1e-9
rtol_check: float = 1e-8
atol_check: float = 1e-3
rtol_check: float = 1e-2
# Step sizes for finite difference gradient checks.
step_sizes = [
1e-1,
Expand Down Expand Up @@ -80,7 +80,12 @@ class GradientCheckSettings:
"ignore:Importing amici.petab_objective is deprecated.:DeprecationWarning",
r"ignore:.*petab\.v1.*instead.*:DeprecationWarning",
)
@pytest.mark.filterwarnings("ignore:divide by zero encountered in log10")
@pytest.mark.filterwarnings(
"ignore:divide by zero encountered in log",
# https://github.com/AMICI-dev/AMICI/issues/18
"ignore:Adjoint sensitivity analysis for models with discontinuous "
"right hand sides .*:UserWarning",
)
@pytest.mark.parametrize("scale", (True, False), ids=["scaled", "unscaled"])
@pytest.mark.parametrize(
"sensitivity_method",
Expand Down Expand Up @@ -184,9 +189,10 @@ def test_benchmark_gradient(model, scale, sensitivity_method):
expectation=expected_derivative,
point=point,
)
success = check(
check_result = check(
rtol=cur_settings.rtol_check, atol=cur_settings.atol_check
)
success = check_result.success

if debug:
df = pd.DataFrame(
Expand Down

0 comments on commit 066df33

Please sign in to comment.