Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
dweindl committed Oct 8, 2024
1 parent fb28761 commit 76811db
Showing 1 changed file with 11 additions and 15 deletions.
26 changes: 11 additions & 15 deletions tests/benchmark-models/test_petab_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ class GradientCheckSettings:
ss_sensitivity_mode: amici.SteadyStateSensitivityMode = (
amici.SteadyStateSensitivityMode.integrateIfNewtonFails
)
noise_level: float = 0.05


settings = defaultdict(GradientCheckSettings)
Expand Down Expand Up @@ -105,7 +106,7 @@ class GradientCheckSettings:
atol_check=1e-5,
rtol_check=1e-4,
)
# NOTE: Newton method fails badly for this model
# NOTE: Newton method fails badly with ASA for this model
settings["Blasi_CellSystems2016"] = GradientCheckSettings(
atol_check=1e-12,
rtol_check=1e-4,
Expand Down Expand Up @@ -193,10 +194,7 @@ def test_benchmark_gradient(model, scale, sensitivity_method, request):
petab.flatten_timepoint_specific_output_overrides(petab_problem)

# Only compute gradient for estimated parameters.
parameter_df_free = petab_problem.parameter_df.loc[
petab_problem.x_free_ids
]
parameter_ids = list(parameter_df_free.index)
parameter_ids = petab_problem.x_free_ids
cur_settings = settings[model]

# Setup AMICI objects.
Expand All @@ -209,6 +207,7 @@ def test_benchmark_gradient(model, scale, sensitivity_method, request):
amici_solver.setRelativeTolerance(cur_settings.rtol_sim)
amici_solver.setMaxSteps(int(1e5))
amici_solver.setSensitivityMethod(sensitivity_method)
# TODO: we should probably test all sensitivity modes
amici_model.setSteadyStateSensitivityMode(cur_settings.ss_sensitivity_mode)

amici_function, amici_derivative = simulate_petab_to_cached_functions(
Expand All @@ -224,23 +223,20 @@ def test_benchmark_gradient(model, scale, sensitivity_method, request):
# cache=not debug,
cache=False,
)
noise_level = 0.05
np.random.seed(cur_settings.rng_seed)

# find a point where the derivative can be computed
for _ in range(5):
if scale:
point = np.asarray(
list(
petab_problem.scale_parameters(
dict(parameter_df_free.nominalValue)
).values()
)
point = petab_problem.x_nominal_free_scaled
point_noise = (
np.random.randn(len(point)) * cur_settings.noise_level
)
point_noise = np.random.randn(len(point)) * noise_level
else:
point = parameter_df_free.nominalValue.values
point_noise = np.random.randn(len(point)) * point * noise_level
point = petab_problem.x_nominal_free_unscaled
point_noise = (
np.random.randn(len(point)) * point * cur_settings.noise_level
)
point += point_noise # avoid small gradients at nominal value

try:
Expand Down

0 comments on commit 76811db

Please sign in to comment.