Skip to content

Commit

Permalink
Merge branch 'develop' into dae_sensitivity
Browse files Browse the repository at this point in the history
  • Loading branch information
dweindl authored May 25, 2023
2 parents e9881ac + fd1e22e commit afdc731
Show file tree
Hide file tree
Showing 21 changed files with 573 additions and 544 deletions.
7 changes: 5 additions & 2 deletions .github/workflows/test_petab_test_suite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@ jobs:
libatlas-base-dev \
python3-venv
- run: pip3 install pysb petab

- name: Build BNGL
run: |
scripts/buildBNGL.sh
Expand All @@ -59,6 +57,11 @@ jobs:
run: |
scripts/installAmiciSource.sh
- name: Install petab
run: |
source ./build/venv/bin/activate \
&& pip3 install wheel pytest shyaml pytest-cov pysb
# retrieve test models
- name: Download and install PEtab test suite
run: |
Expand Down
20 changes: 14 additions & 6 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ endif()

if(DEFINED ENV{SWIG})
message(STATUS "Setting SWIG_EXECUTABLE to $ENV{SWIG} ($SWIG)")
unset(SWIG_VERSION CACHE)
unset(SWIG_DIR CACHE)
set(SWIG_EXECUTABLE $ENV{SWIG})
endif()

Expand Down Expand Up @@ -151,12 +153,18 @@ if(${BLAS} STREQUAL "MKL" OR DEFINED ENV{MKLROOT})
CACHE STRING "")
endif()
elseif(NOT DEFINED ENV{BLAS_LIBS} AND NOT DEFINED ENV{BLAS_CFLAGS})
set(BLAS_INCLUDE_DIRS
""
CACHE STRING "")
set(BLAS_LIBRARIES
-lcblas
CACHE STRING "")
# if nothing is specified via environment variables, let's try FindBLAS
find_package(BLAS)
if(NOT BLAS_FOUND)
# Nothing specified by the user and FindBLAS didn't find anything; let's try
# if cblas is available on the system paths.
set(BLAS_INCLUDE_DIRS
""
CACHE STRING "")
set(BLAS_LIBRARIES
-lcblas
CACHE STRING "")
endif()
endif()
add_compile_definitions(AMICI_BLAS_${BLAS})

Expand Down
2 changes: 1 addition & 1 deletion cmake/version.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ if(Git_FOUND)
execute_process(
COMMAND
sh -c
"${GIT_EXECUTABLE} describe --abbrev=4 --dirty=-dirty --always --tags | cut -c 2- | tr -d '\n' | sed s/-/./"
"'${GIT_EXECUTABLE}' describe --abbrev=4 --dirty=-dirty --always --tags | cut -c 2- | tr -d '\n' | sed s/-/./"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
OUTPUT_VARIABLE PROJECT_VERSION_GIT)
endif()
Expand Down
2 changes: 1 addition & 1 deletion documentation/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def install_mtocpp():

def install_doxygen():
"""Get a more recent doxygen"""
version = "1.9.6"
version = "1.9.7"
doxygen_exe = os.path.join(
amici_dir, "ThirdParty", f"doxygen-{version}", "bin", "doxygen"
)
Expand Down
2 changes: 1 addition & 1 deletion documentation/rtd_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ mock>=5.0.2
setuptools==67.7.2
pysb>=1.11.0
matplotlib==3.7.1
nbsphinx>=0.9.1
nbsphinx==0.9.1
nbformat==5.8.0
recommonmark>=0.7.1
sphinx_rtd_theme>=1.2.0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
"source": [
"# If running as a Github action, just do the minimal amount of work required to check whether the code is working\n",
"if os.getenv('GITHUB_ACTIONS') is not None:\n",
" n_starts = 10\n",
" n_starts = 15\n",
" pypesto_optimizer = pypesto.optimize.FidesOptimizer(verbose=logging.WARNING, options=dict(maxiter=10))\n",
" pypesto_engine = pypesto.engine.SingleCoreEngine()"
]
Expand Down Expand Up @@ -362,7 +362,7 @@
"### Maximum Likelihood estimation\n",
"Using pyPESTO we can optimize for the parameter vector that maximizes the probability of observing the experimental data (maximum likelihood estimation).\n",
"\n",
"A multistart method with local gradient-based optimization is used and the results of each multistart can be visualized in a waterfall plot. "
"A multistart method with local gradient-based optimization is used and the results of each multistart can be visualized in a waterfall plot."
]
},
{
Expand Down Expand Up @@ -908,15 +908,15 @@
" np.log10(regstrength) # parameter is specified as log10 scale in PEtab\n",
" )\n",
" regproblem = copy.deepcopy(pypesto_problem)\n",
" \n",
"\n",
" # Load existing results if available\n",
" if os.path.exists(f'{name}.h5'):\n",
" regresult = pypesto.store.read_result(f'{name}.h5', problem=regproblem)\n",
" else:\n",
" regresult = None\n",
" # Overwrite\n",
" # regresult = None\n",
" \n",
"\n",
" # Parallel multistart optimization with pyPESTO and FIDES\n",
" if n_starts > 0:\n",
" if regresult is None:\n",
Expand All @@ -935,10 +935,10 @@
" regresult.optimize_result.sort()\n",
" if regresult.optimize_result.x[0] is None:\n",
" raise Exception(\"All multistarts failed (n_starts is probably too small)! If this error occurred during CI, just run the workflow again.\")\n",
" \n",
"\n",
" # Save results to disk\n",
" # pypesto.store.write_result(regresult, f'{name}.h5', overwrite=True)\n",
" \n",
"\n",
" # Store result\n",
" regproblems[regstrength] = regproblem\n",
" regresults[regstrength] = regresult"
Expand Down Expand Up @@ -1469,15 +1469,15 @@
" np.log10(regstrength) # parameter is specified as log10 scale in PEtab\n",
" )\n",
" regproblem = copy.deepcopy(pypesto_problem)\n",
" \n",
"\n",
" # Load existing results if available\n",
" if os.path.exists(f'{name}.h5'):\n",
" regresult = pypesto.store.read_result(f'{name}.h5', problem=regproblem)\n",
" else:\n",
" regresult = None\n",
" # Overwrite\n",
" # regresult = None\n",
" \n",
"\n",
" # Parallel multistart optimization with pyPESTO and FIDES\n",
" if n_starts > 0:\n",
" if regresult is None:\n",
Expand All @@ -1496,10 +1496,10 @@
" regresult.optimize_result.sort()\n",
" if regresult.optimize_result.x[0] is None:\n",
" raise Exception(\"All multistarts failed (n_starts is probably too small)! If this error occurred during CI, just run the workflow again.\")\n",
" \n",
"\n",
" # Save results to disk\n",
" # pypesto.store.write_result(regresult, f'{name}.h5', overwrite=True)\n",
" \n",
"\n",
" # Store result\n",
" regproblems[regstrength] = regproblem\n",
" regresults[regstrength] = regresult"
Expand Down Expand Up @@ -1934,7 +1934,7 @@
"## Bibliography\n",
"Schelker, M. et al. (2012). “Comprehensive estimation of input signals and dynamics in biochemical reaction networks”. In: Bioinformatics 28.18, pp. i529–i534. doi: [10.1093/bioinformatics/bts393](https://doi.org/10.1093/bioinformatics/bts393).\n",
"\n",
"Swameye, I. et al. (2003). “Identification of nucleocytoplasmic cycling as a remote sensor in cellular signaling by databased modeling”. In: Proceedings of the National Academy of Sciences 100.3, pp. 1028–1033. doi: [10.1073/pnas.0237333100](https://doi.org/10.1073/pnas.0237333100)."
"Swameye, I. et al. (2003). “Identification of nucleocytoplasmic cycling as a remote sensor in cellular signaling by databased modeling”. In: Proceedings of the National Academy of Sciences 100.3, pp. 1028–1033. doi: [10.1073/pnas.0237333100](https://doi.org/10.1073/pnas.0237333100).\n"
]
}
],
Expand Down
5 changes: 3 additions & 2 deletions python/sdist/amici/de_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -1999,9 +1999,10 @@ def _compute_equation(self, name: str) -> None:

# need to check if equations are zero since we are using
# symbols
if not smart_is_zero_matrix(self.eq("stau")[ie]):
if not smart_is_zero_matrix(self.eq("stau")[ie]) \
and not smart_is_zero_matrix(self.eq("xdot")):
tmp_eq += smart_multiply(
(self.sym("xdot_old") - self.sym("xdot")),
self.sym("xdot_old") - self.sym("xdot"),
self.sym("stau").T,
)

Expand Down
3 changes: 3 additions & 0 deletions python/sdist/amici/parameter_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,9 @@ def _get_par(model_par, value, mapping):
# condition table overrides must have been handled already,
# e.g. by the PEtab parameter mapping, but parameters from
# InitialAssignments may still be present.
if mapping[value] == model_par:
# prevent infinite recursion
raise
return _get_par(value, mapping[value], mapping)
if model_par in problem_parameters:
# user-provided
Expand Down
Loading

0 comments on commit afdc731

Please sign in to comment.