From 7f569a9e856efd9d2fda6c1cd7906666b26b5652 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 28 Jun 2021 09:20:15 +0200 Subject: [PATCH 01/10] SuperMUC build script: skip boost --- buildSuperMUC.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/buildSuperMUC.sh b/buildSuperMUC.sh index 5e683726..1b551d62 100755 --- a/buildSuperMUC.sh +++ b/buildSuperMUC.sh @@ -66,7 +66,7 @@ build_boost() { build_3rd_party_deps() { # build dependencies - build_boost + # build_boost "${parpe_root}/ThirdParty/installIpopt.sh" #./installCeres.sh #./installCpputest.sh @@ -89,8 +89,8 @@ build_parpe() { mkdir -p build && cd build ipopt_root=${parpe_root}/ThirdParty/Ipopt-releases-3.13.3/ + #BOOST_ROOT=${boost_install_dir} \ HDF5_ROOT=${HDF5_BASE} \ - BOOST_ROOT=${boost_install_dir} \ MPI_HOME=${MPI_BASE} \ PKG_CONFIG_PATH=${PKG_CONFIG_PATH:-}:${ipopt_root}/install/lib/pkgconfig/:${ipopt_root}/ThirdParty-HSL/install/lib/pkgconfig/ \ cmake -S .. \ From eb33a10abde3faf2835ce846482817df06afcfe1 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 28 Jun 2021 22:14:58 +0200 Subject: [PATCH 02/10] Fix race condition with parallel HDF5 calls (#345) * Fix race condition with parallel HDF5 calls Add some locks, in particular for H5Location::nameExists. Also remove some unnecessary H5File::getId calls. * Safely close HDF5 files * Check pthread_create retval * cleanup --- ...lOptimizationAnalyticalParameterProvider.h | 6 +- .../parpeamici/multiConditionDataProvider.h | 2 +- include/parpeamici/simulationResultWriter.h | 2 + .../optimizationResultWriter.h | 2 +- ...ptimizationAnalyticalParameterProvider.cpp | 12 ++- src/parpeamici/multiConditionDataProvider.cpp | 49 +++++++---- src/parpeamici/multiConditionProblem.cpp | 16 ++-- src/parpeamici/optimizationApplication.cpp | 1 + src/parpeamici/simulationResultWriter.cpp | 6 ++ src/parpeamici/standaloneSimulator.cpp | 15 ++-- src/parpecommon/hdf5Misc.cpp | 81 ++++++++++--------- .../multiStartOptimization.cpp | 21 +++-- src/parpeoptimization/optimizationOptions.cpp | 10 ++- .../optimizationResultWriter.cpp | 41 ++++++---- 14 files changed, 165 insertions(+), 99 deletions(-) diff --git a/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h b/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h index 28c49569..40ff62f7 100644 --- a/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h +++ b/include/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.h @@ -124,6 +124,8 @@ class AnalyticalParameterHdf5Reader : public AnalyticalParameterProvider */ std::vector getOptimizationParameterIndices() const override; + ~AnalyticalParameterHdf5Reader() override; + private: /** * @brief Get number of analytically computed parameters @@ -142,9 +144,9 @@ class AnalyticalParameterHdf5Reader : public AnalyticalParameterProvider * - observableIdx: index of model output */ void readParameterConditionObservableMappingFromFile(); - std::vector readRawMap(H5::DataSet& dataset, + std::vector readRawMap(const H5::DataSet &dataset, hsize_t& nRows, - hsize_t& nCols); + hsize_t& nCols) const; H5::H5File file; std::string rootPath; diff --git a/include/parpeamici/multiConditionDataProvider.h b/include/parpeamici/multiConditionDataProvider.h index cedca925..f3ae58b2 100644 --- a/include/parpeamici/multiConditionDataProvider.h +++ b/include/parpeamici/multiConditionDataProvider.h @@ -249,7 +249,7 @@ class MultiConditionDataProviderHDF5 : public MultiConditionDataProvider MultiConditionDataProviderHDF5(MultiConditionDataProviderHDF5 const&) = delete; - virtual ~MultiConditionDataProviderHDF5() override = default; + virtual ~MultiConditionDataProviderHDF5() override;; /** * @brief Get the number of simulations required for objective function diff --git a/include/parpeamici/simulationResultWriter.h b/include/parpeamici/simulationResultWriter.h index 53a18dac..7fccdaef 100644 --- a/include/parpeamici/simulationResultWriter.h +++ b/include/parpeamici/simulationResultWriter.h @@ -51,6 +51,8 @@ class SimulationResultWriter { // Implement me SimulationResultWriter(SimulationResultWriter const&) = delete; + ~SimulationResultWriter(); + /** * @brief Create results datasets. * diff --git a/include/parpeoptimization/optimizationResultWriter.h b/include/parpeoptimization/optimizationResultWriter.h index d9133866..abb5c26f 100644 --- a/include/parpeoptimization/optimizationResultWriter.h +++ b/include/parpeoptimization/optimizationResultWriter.h @@ -39,7 +39,7 @@ class OptimizationResultWriter { OptimizationResultWriter(OptimizationResultWriter const& other); - virtual ~OptimizationResultWriter() = default; + virtual ~OptimizationResultWriter(); /** * @brief Function to be called after each objective function f(x) or diff --git a/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp b/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp index a817777f..76fd3887 100644 --- a/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp +++ b/src/parpeamici/hierarchicalOptimizationAnalyticalParameterProvider.cpp @@ -83,6 +83,12 @@ AnalyticalParameterHdf5Reader::getOptimizationParameterIndices() const return analyticalParameterIndices; } +AnalyticalParameterHdf5Reader::~AnalyticalParameterHdf5Reader() +{ + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file.close(); +} + int AnalyticalParameterHdf5Reader::getNumAnalyticalParameters() const { @@ -136,10 +142,12 @@ AnalyticalParameterHdf5Reader::readParameterConditionObservableMappingFromFile() } std::vector -AnalyticalParameterHdf5Reader::readRawMap(H5::DataSet& dataset, +AnalyticalParameterHdf5Reader::readRawMap(H5::DataSet const& dataset, hsize_t& nRows, - hsize_t& nCols) + hsize_t& nCols) const { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + auto dataspace = dataset.getSpace(); auto ndims = dataspace.getSimpleExtentNdims(); if (ndims != 2) diff --git a/src/parpeamici/multiConditionDataProvider.cpp b/src/parpeamici/multiConditionDataProvider.cpp index 7075a4a4..3557c57d 100644 --- a/src/parpeamici/multiConditionDataProvider.cpp +++ b/src/parpeamici/multiConditionDataProvider.cpp @@ -21,8 +21,13 @@ MultiConditionDataProviderHDF5::MultiConditionDataProviderHDF5( : MultiConditionDataProviderHDF5(std::move(model), hdf5Filename, "") {} +MultiConditionDataProviderHDF5::~MultiConditionDataProviderHDF5() { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file_.close(); +} + MultiConditionDataProviderHDF5::MultiConditionDataProviderHDF5( - std::unique_ptr model, + std::unique_ptr model, std::string const& hdf5Filename, std::string const& rootPath) : model_(std::move(model)) @@ -70,8 +75,8 @@ MultiConditionDataProviderHDF5::getNumberOfSimulationConditions() const [[maybe_unused]] auto lock = hdf5MutexGetLock(); int d1, d2; - hdf5GetDatasetDimensions( - file_.getId(), hdf5_reference_condition_path_.c_str(), 2, &d1, &d2); + hdf5GetDatasetDimensions(file_, hdf5_reference_condition_path_, + 2, &d1, &d2); return d1; } @@ -82,6 +87,8 @@ MultiConditionDataProviderHDF5::getSimulationToOptimizationParameterMapping( { std::string path = hdf5_simulation_to_optimization_parameter_mapping_path_; + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + if (file_.nameExists(path)) { return hdf5Read2DIntegerHyperslab( file_, path, model_->np(), 1, 0, conditionIdx); @@ -129,15 +136,20 @@ MultiConditionDataProviderHDF5::mapAndSetOptimizationToSimulationVariables( auto mapping = getSimulationToOptimizationParameterMapping(conditionIdx); std::vector overrides; - if (file_.nameExists(hdf5_parameter_overrides_path)) { - overrides.resize(model_->np()); - hdf5Read2DDoubleHyperslab(file_, - hdf5_parameter_overrides_path, - model_->np(), - 1, - 0, - conditionIdx, - overrides); + + { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + + if (file_.nameExists(hdf5_parameter_overrides_path)) { + overrides.resize(model_->np()); + hdf5Read2DDoubleHyperslab(file_, + hdf5_parameter_overrides_path, + model_->np(), + 1, + 0, + conditionIdx, + overrides); + } } for (int i = 0; i < model_->np(); ++i) { @@ -260,8 +272,8 @@ MultiConditionDataProviderHDF5::readFixedSimulationParameters( H5_SAVE_ERROR_HANDLER; - hdf5Read2DDoubleHyperslab(file_.getId(), - hdf5_condition_path_.c_str(), + hdf5Read2DDoubleHyperslab(file_, + hdf5_condition_path_, model_->nk(), 1, 0, @@ -392,7 +404,7 @@ MultiConditionDataProviderHDF5::getNumOptimizationParameters() const { std::string path = root_path_ + "/parameters/parameterNames"; int size = 0; - hdf5GetDatasetDimensions(file_.getId(), path.c_str(), 1, &size); + hdf5GetDatasetDimensions(file_, path, 1, &size); return size; } @@ -436,6 +448,7 @@ MultiConditionDataProviderHDF5::updateSimulationParametersAndScale( void MultiConditionDataProviderHDF5::copyInputData(H5::H5File const& target) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); H5Ocopy(file_.getId(), "/", @@ -515,6 +528,7 @@ H5::H5File MultiConditionDataProviderHDF5::getHdf5File() const void MultiConditionDataProviderHDF5::checkDataIntegrity() const { + // check matching IDs std::string modelParameterIdsPath = root_path_ + "/model/parameterIds"; auto dataParameterIds = @@ -565,7 +579,7 @@ MultiConditionDataProviderHDF5::checkDataIntegrity() const if (model_->nk()) { parpe::hdf5GetDatasetDimensions( - file_.getId(), hdf5_condition_path_.c_str(), 2, &d1, &d2); + file_, hdf5_condition_path_, 2, &d1, &d2); Expects(d1 == model_->nk()); } } @@ -678,6 +692,8 @@ std::vector> MultiConditionDataProviderDefault::getAllMeasurements() const { std::vector> measurements; + measurements.reserve(edata_.size()); + for (const auto& e : edata_) { measurements.push_back(e.getObservedData()); } @@ -688,6 +704,7 @@ std::vector> MultiConditionDataProviderDefault::getAllSigmas() const { std::vector> sigmas; + sigmas.reserve(edata_.size()); for (const auto& e : edata_) { sigmas.push_back(e.getObservedDataStdDev()); } diff --git a/src/parpeamici/multiConditionProblem.cpp b/src/parpeamici/multiConditionProblem.cpp index 3e5c8439..08657a88 100644 --- a/src/parpeamici/multiConditionProblem.cpp +++ b/src/parpeamici/multiConditionProblem.cpp @@ -258,31 +258,31 @@ void saveSimulation(const H5::H5File &file, std::string const& pathStr, [[maybe_unused]] auto lock = hdf5MutexGetLock(); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationLogLikelihood", + file, fullGroupPath, "simulationLogLikelihood", gsl::make_span(&llh, 1)); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "jobId", + file, fullGroupPath, "jobId", gsl::make_span(&jobId, 1)); if (!gradient.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationLogLikelihoodGradient", + file, fullGroupPath, "simulationLogLikelihoodGradient", gradient); } else if(!parameters.empty()) { double dummyGradient[parameters.size()]; std::fill_n(dummyGradient, parameters.size(), NAN); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationLogLikelihoodGradient", + file, fullGroupPath, "simulationLogLikelihoodGradient", gsl::make_span(dummyGradient, parameters.size())); } if (!parameters.empty()) hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationParameters", parameters); + file, fullGroupPath, "simulationParameters", parameters); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "simulationWallTimeInSec", + file, fullGroupPath, "simulationWallTimeInSec", gsl::make_span(&timeElapsedInSeconds, 1)); // TODO: This was broken by allowing different numbers of timepoints @@ -302,10 +302,10 @@ void saveSimulation(const H5::H5File &file, std::string const& pathStr, // stateSensi.size() / parameters.size(), parameters.size()); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "simulationStatus", + file, fullGroupPath, "simulationStatus", gsl::make_span(&status, 1)); - hdf5CreateOrExtendAndWriteToString1DArray(file.getId(), fullGroupPath, + hdf5CreateOrExtendAndWriteToString1DArray(file, fullGroupPath, "simulationLabel", label); file.flush(H5F_SCOPE_LOCAL); diff --git a/src/parpeamici/optimizationApplication.cpp b/src/parpeamici/optimizationApplication.cpp index 0e472448..29ed2aa0 100644 --- a/src/parpeamici/optimizationApplication.cpp +++ b/src/parpeamici/optimizationApplication.cpp @@ -345,6 +345,7 @@ bool OptimizationApplication::isWorker() { return getMpiRank() > 0; } OptimizationApplication::~OptimizationApplication() { // objects must be destroyed before MPI_Finalize is called // and Hdf5 mutex is destroyed + [[maybe_unused]] auto lock = hdf5MutexGetLock(); h5File.close(); problem.reset(nullptr); #ifdef PARPE_ENABLE_MPI diff --git a/src/parpeamici/simulationResultWriter.cpp b/src/parpeamici/simulationResultWriter.cpp index 84484b15..75f633aa 100644 --- a/src/parpeamici/simulationResultWriter.cpp +++ b/src/parpeamici/simulationResultWriter.cpp @@ -18,6 +18,12 @@ SimulationResultWriter::SimulationResultWriter(H5::H5File const& file, updatePaths(); } +SimulationResultWriter::~SimulationResultWriter() +{ + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file.close(); +} + SimulationResultWriter::SimulationResultWriter(const std::string &hdf5FileName, std::string rootPath) diff --git a/src/parpeamici/standaloneSimulator.cpp b/src/parpeamici/standaloneSimulator.cpp index 2aeddbe1..9f279a14 100644 --- a/src/parpeamici/standaloneSimulator.cpp +++ b/src/parpeamici/standaloneSimulator.cpp @@ -520,6 +520,8 @@ std::pair getFunctionEvaluationWithMinimalCost(std::string const& datasetPath, H5::H5File const& file) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + H5::DataSet dataset = file.openDataSet(datasetPath); H5::DataSpace filespace = dataset.getSpace(); @@ -534,7 +536,7 @@ getFunctionEvaluationWithMinimalCost(std::string const& datasetPath, std::vector cost(numFunctionEvalations, INFINITY); parpe::hdf5Read2DDoubleHyperslab( - file.getId(), datasetPath.c_str(), 1, numFunctionEvalations, 0, 0, cost); + file, datasetPath, 1, numFunctionEvalations, 0, 0, cost); int minIndex = std::min_element(cost.begin(), cost.end()) - cost.begin(); return { minIndex, cost[minIndex] }; } @@ -561,8 +563,8 @@ getParameterTrajectory(std::string const& startIndex, H5::H5File const& file) for (int iter = 0; iter < numIter; ++iter) { parameters[iter] = std::vector(numParam); - parpe::hdf5Read2DDoubleHyperslab(file.getId(), - parameterPath.c_str(), + parpe::hdf5Read2DDoubleHyperslab(file, + parameterPath, numParam, 1, 0, @@ -577,7 +579,7 @@ int getNumStarts(H5::H5File const& file, std::string const& rootPath) { auto o = parpe::OptimizationOptions::fromHDF5( - file.getId(), rootPath + "/optimizationOptions"); + file, rootPath + "/optimizationOptions"); return o->numStarts; } @@ -722,7 +724,6 @@ runNominalParameters(StandaloneSimulator& sim, [[maybe_unused]] auto lock = hdf5MutexGetLock(); H5::H5File parameterFile(parameterFileName, H5F_ACC_RDONLY); H5::H5File conditionFile(conditionFileName, H5F_ACC_RDONLY); - lock.unlock(); int errors = 0; @@ -731,9 +732,11 @@ runNominalParameters(StandaloneSimulator& sim, < parameters; for(int i = 0; i < static_cast(parameterValues.size()); ++i) diff --git a/src/parpecommon/hdf5Misc.cpp b/src/parpecommon/hdf5Misc.cpp index 65173246..ee3c3074 100644 --- a/src/parpecommon/hdf5Misc.cpp +++ b/src/parpecommon/hdf5Misc.cpp @@ -43,6 +43,8 @@ namespace parpe { static mutexHdfType mutexHdf; void initHDF5Mutex() { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + // TODO: check if still required H5dont_atexit(); } @@ -55,8 +57,9 @@ std::unique_lock hdf5MutexGetLock() herr_t hdf5ErrorStackWalker_cb(unsigned int n, const H5E_error_t *err_desc, void* /*client_data*/) { Ensures(err_desc != nullptr); - const int indent = 2; + constexpr int indent = 2; + [[maybe_unused]] auto lock = hdf5MutexGetLock(); std::unique_ptr maj_str { H5Eget_major(err_desc->maj_num), &std::free }; std::unique_ptr @@ -108,8 +111,8 @@ void hdf5CreateGroup(const H5::H5File &file, const std::string &groupPath, bool try { auto group = file.createGroup(groupPath.c_str(), groupCreationPropertyList); } catch (H5::Exception const&) { - throw(HDF5Exception("Failed to create group in hdf5CreateGroup:" + - groupPath)); + throw HDF5Exception("Failed to create group in hdf5CreateGroup:" + + groupPath); } } @@ -118,15 +121,15 @@ void hdf5CreateExtendableDouble2DArray(const H5::H5File &file, hsize_t stride) { constexpr int rank = 2; - hsize_t initialDimensions[2] = {stride, 0}; - hsize_t maximumDimensions[2] = {stride, H5S_UNLIMITED}; + hsize_t initialDimensions[2] {stride, 0}; + hsize_t maximumDimensions[2] {stride, H5S_UNLIMITED}; std::lock_guard lock(mutexHdf); H5::DataSpace dataspace(rank, initialDimensions, maximumDimensions); // need chunking for extendable dataset - hsize_t chunkDimensions[2] = {stride, 1}; + hsize_t chunkDimensions[2] {stride, 1}; H5::DSetCreatPropList dSetCreatPropList; dSetCreatPropList.setChunk(rank, chunkDimensions); @@ -156,13 +159,12 @@ void hdf5Extend2ndDimensionAndWriteToDouble2DArray(const H5::H5File &file, const Expects(buffer.size() == currentDimensions[0]); - hsize_t newDimensions[2] = {currentDimensions[0], - currentDimensions[1] + 1}; + hsize_t newDimensions[2] {currentDimensions[0], currentDimensions[1] + 1}; dataset.extend(newDimensions); filespace = dataset.getSpace(); - hsize_t offset[2] = {0, currentDimensions[1]}; - hsize_t slabsize[2] = {currentDimensions[0], 1}; + hsize_t offset[2] {0, currentDimensions[1]}; + hsize_t slabsize[2] {currentDimensions[0], 1}; filespace.selectHyperslab(H5S_SELECT_SET, slabsize, offset); @@ -186,18 +188,18 @@ void hdf5Extend3rdDimensionAndWriteToDouble3DArray(const H5::H5File &file, hsize_t currentDimensions[3]; filespace.getSimpleExtentDims(currentDimensions); - hsize_t newDimensions[3] = {currentDimensions[0], - currentDimensions[1], - currentDimensions[2] + 1}; + hsize_t newDimensions[3] {currentDimensions[0], + currentDimensions[1], + currentDimensions[2] + 1}; dataset.extend(newDimensions); filespace = dataset.getSpace(); - hsize_t offset[3] = {0, 0, currentDimensions[2]}; - hsize_t slabsize[3] = {currentDimensions[0], currentDimensions[1], 1}; + hsize_t offset[3] {0, 0, currentDimensions[2]}; + hsize_t slabsize[3] {currentDimensions[0], currentDimensions[1], 1}; filespace.selectHyperslab(H5S_SELECT_SET, slabsize, offset); - auto memspace = H5::DataSpace(rank, slabsize); + H5::DataSpace memspace(rank, slabsize); dataset.write(buffer.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace); } @@ -207,9 +209,11 @@ void hdf5CreateOrExtendAndWriteToDouble2DArray(const H5::H5File &file, const std::string &datasetName, gsl::span buffer) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5EnsureGroupExists(file, parentPath); - std::string fullDatasetPath = std::string(parentPath) + "/" + datasetName; + auto fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableDouble2DArray( @@ -226,9 +230,11 @@ void hdf5CreateOrExtendAndWriteToDouble3DArray(const H5::H5File &file, gsl::span buffer, hsize_t stride1, hsize_t stride2) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5EnsureGroupExists(file, parentPath); - std::string fullDatasetPath = std::string(parentPath) + "/" + datasetName; + std::string fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableDouble3DArray( @@ -250,7 +256,7 @@ void hdf5CreateOrExtendAndWriteToInt2DArray(const H5::H5File &file, hdf5EnsureGroupExists(file, parentPath); - auto fullDatasetPath = std::string(parentPath) + "/" + datasetName; + auto fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableInt2DArray( @@ -279,13 +285,12 @@ void hdf5Extend2ndDimensionAndWriteToInt2DArray(const H5::H5File &file, filespace.getSimpleExtentDims(currentDimensions); Expects(buffer.size() == currentDimensions[0]); - hsize_t newDimensions[2] = {currentDimensions[0], - currentDimensions[1] + 1}; + hsize_t newDimensions[2] {currentDimensions[0], currentDimensions[1] + 1}; dataset.extend(newDimensions); filespace = dataset.getSpace(); - hsize_t offset[2] = {0, currentDimensions[1]}; - hsize_t slabsize[2] = {currentDimensions[0], 1}; + hsize_t offset[2] {0, currentDimensions[1]}; + hsize_t slabsize[2] {currentDimensions[0], 1}; filespace.selectHyperslab(H5S_SELECT_SET, slabsize, offset); @@ -299,14 +304,14 @@ void hdf5CreateExtendableInt2DArray(const H5::H5File &file, { std::lock_guard lock(mutexHdf); - int rank = 2; - hsize_t initialDimensions[2] = {stride, 0}; - hsize_t maximumDimensions[2] = {stride, H5S_UNLIMITED}; + constexpr int rank = 2; + hsize_t initialDimensions[rank] {stride, 0}; + hsize_t maximumDimensions[rank] {stride, H5S_UNLIMITED}; H5::DataSpace dataspace(rank, initialDimensions, maximumDimensions); // need chunking for extendable dataset - hsize_t chunkDimensions[2] = {stride, 1}; + hsize_t chunkDimensions[2] {stride, 1}; H5::DSetCreatPropList datasetCreationProperty; datasetCreationProperty.setChunk(rank, chunkDimensions); @@ -322,16 +327,16 @@ void hdf5CreateExtendableDouble3DArray(const H5::H5File &file, hsize_t stride2) { - int rank = 3; - hsize_t initialDimensions[3] = {stride1, stride2, 0}; - hsize_t maximumDimensions[3] = {stride1, stride2, H5S_UNLIMITED}; + constexpr int rank = 3; + hsize_t initialDimensions[rank] {stride1, stride2, 0}; + hsize_t maximumDimensions[rank] {stride1, stride2, H5S_UNLIMITED}; std::lock_guard lock(mutexHdf); H5::DataSpace dataspace(rank, initialDimensions, maximumDimensions); // need chunking for extendable dataset - hsize_t chunkDimensions[3] = {stride1, stride2, 1}; + hsize_t chunkDimensions[rank] = {stride1, stride2, 1}; H5::DSetCreatPropList datasetCreationProperty; datasetCreationProperty.setChunk(rank, chunkDimensions); @@ -353,8 +358,8 @@ void hdf5Read2DDoubleHyperslab(const H5::H5File &file, auto dataset = file.openDataSet(path.c_str()); auto dataspace = dataset.getSpace(); - hsize_t offset[] = {offset0, offset1}; - hsize_t count[] = {size0, size1}; + hsize_t offset[] {offset0, offset1}; + hsize_t count[] {size0, size1}; const int ndims = dataspace.getSimpleExtentNdims(); RELEASE_ASSERT(ndims == 2 && "Only works for 2D arrays!", ""); @@ -574,9 +579,9 @@ void hdf5GetDatasetDimensions(const H5::H5File &file, const int nDimsActual = dataspace.getSimpleExtentNdims(); if(nDimsActual != (signed)nDimsExpected) - throw(HDF5Exception("Dataset rank (" + std::to_string(nDimsActual) + throw HDF5Exception("Dataset rank (" + std::to_string(nDimsActual) + ") does not match nDims argument (" - + std::to_string(nDimsExpected) + ")")); + + std::to_string(nDimsExpected) + ")"); hsize_t dims[nDimsExpected]; dataspace.getSimpleExtentDims(dims); @@ -673,9 +678,11 @@ void hdf5CreateOrExtendAndWriteToString1DArray(const H5::H5File &file, const std::string &datasetName, const std::string &buffer) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5EnsureGroupExists(file, parentPath); - std::string fullDatasetPath = std::string(parentPath) + "/" + datasetName; + std::string fullDatasetPath = parentPath + "/" + datasetName; if (!file.nameExists(fullDatasetPath.c_str())) { hdf5CreateExtendableString1DArray(file, fullDatasetPath); @@ -701,7 +708,7 @@ H5::H5File hdf5OpenForReading(const std::string &hdf5Filename) H5Ewalk2(H5E_DEFAULT, H5E_WALK_DOWNWARD, hdf5ErrorStackWalker_cb, nullptr); H5_RESTORE_ERROR_HANDLER; - throw(HDF5Exception("Unable to open HDF5 file " + hdf5Filename)); + throw HDF5Exception("Unable to open HDF5 file " + hdf5Filename); } } diff --git a/src/parpeoptimization/multiStartOptimization.cpp b/src/parpeoptimization/multiStartOptimization.cpp index 43617357..4eb92c56 100644 --- a/src/parpeoptimization/multiStartOptimization.cpp +++ b/src/parpeoptimization/multiStartOptimization.cpp @@ -59,8 +59,14 @@ void MultiStartOptimization::runMultiThreaded() "Spawning thread for local optimization #%d (%d)", lastStartIdx, ms); - pthread_create(&localOptimizationThreads.at(ms), &threadAttr, - getLocalOptimumThreadWrapper, static_cast(localProblems[ms])); + auto ret = pthread_create( + &localOptimizationThreads.at(ms), &threadAttr, + getLocalOptimumThreadWrapper, + static_cast(localProblems[ms])); + if(ret) { + throw ParPEException("Failure during thread creation: " + + std::to_string(ret)); + } } int numCompleted = 0; @@ -109,9 +115,14 @@ void MultiStartOptimization::runMultiThreaded() LOGLVL_DEBUG, "Spawning thread for local optimization #%d (%d)", lastStartIdx, ms); - pthread_create(&localOptimizationThreads[ms], &threadAttr, - getLocalOptimumThreadWrapper, - static_cast(localProblems[ms])); + auto ret = pthread_create( + &localOptimizationThreads[ms], &threadAttr, + getLocalOptimumThreadWrapper, + static_cast(localProblems[ms])); + if(ret) { + throw ParPEException("Failure during thread creation: " + + std::to_string(ret)); + } } #endif delete threadStatus; diff --git a/src/parpeoptimization/optimizationOptions.cpp b/src/parpeoptimization/optimizationOptions.cpp index 455284a2..2775c675 100644 --- a/src/parpeoptimization/optimizationOptions.cpp +++ b/src/parpeoptimization/optimizationOptions.cpp @@ -50,13 +50,15 @@ template std::string to_string(const T &n) { } // namespace patch -void optimizationOptionsFromAttribute(H5::H5Object& loc, +void optimizationOptionsFromAttribute(H5::H5Object & loc, const H5std_string attr_name, void *op_data) { // iterate over attributes and add to OptimizationOptions auto *o = static_cast(op_data); + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + auto a = loc.openAttribute(attr_name); auto type = a.getDataType(); auto typeClass = a.getTypeClass(); @@ -94,9 +96,9 @@ std::unique_ptr OptimizationOptions::fromHDF5(const std::st std::unique_ptr OptimizationOptions::fromHDF5(const H5::H5File &file, std::string const& path) { auto o = std::make_unique(); - const char *hdf5path = path.c_str(); - auto fileId = file.getId(); + auto hdf5path = path.c_str(); [[maybe_unused]] auto lock = hdf5MutexGetLock(); + auto fileId = file.getId(); if (hdf5AttributeExists(file, path, "optimizer")) { int buffer; @@ -171,7 +173,7 @@ std::vector OptimizationOptions::getStartingPoint(H5::H5File const& file int index) { std::vector startingPoint; - const char *path = "/optimizationOptions/randomStarts"; + auto path = "/optimizationOptions/randomStarts"; [[maybe_unused]] auto lock = hdf5MutexGetLock(); diff --git a/src/parpeoptimization/optimizationResultWriter.cpp b/src/parpeoptimization/optimizationResultWriter.cpp index adff1379..85be40c9 100644 --- a/src/parpeoptimization/optimizationResultWriter.cpp +++ b/src/parpeoptimization/optimizationResultWriter.cpp @@ -28,7 +28,7 @@ OptimizationResultWriter::OptimizationResultWriter(const std::string &filename, rootPath(std::move(rootPath)) { logmessage(LOGLVL_DEBUG, "Writing results to %s.", filename.c_str()); - file = hdf5CreateFile(filename.c_str(), overwrite); + file = hdf5CreateFile(filename, overwrite); hdf5EnsureGroupExists(file, this->rootPath); } @@ -41,6 +41,11 @@ OptimizationResultWriter::OptimizationResultWriter( hdf5EnsureGroupExists(file, rootPath); } +OptimizationResultWriter::~OptimizationResultWriter() { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + file.close(); +} + const std::string &OptimizationResultWriter::getRootPath() const { return rootPath; } @@ -60,24 +65,25 @@ void OptimizationResultWriter::logObjectiveFunctionEvaluation( int numFunctionCalls, double timeElapsedInSeconds) { + [[maybe_unused]] auto lock = hdf5MutexGetLock(); std::string pathStr = getIterationPath(numIterations); const char *fullGroupPath = pathStr.c_str(); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunCost", + file, fullGroupPath, "costFunCost", gsl::make_span(&objectiveFunctionValue, 1)); if (logGradientEachFunctionEvaluation) { if (!objectiveFunctionGradient.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunGradient", + file, fullGroupPath, "costFunGradient", objectiveFunctionGradient); } else if (!parameters.empty()) { double dummyGradient[parameters.size()]; std::fill_n(dummyGradient, parameters.size(), NAN); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunGradient", + file, fullGroupPath, "costFunGradient", gsl::make_span(dummyGradient, parameters.size())); } @@ -86,15 +92,15 @@ void OptimizationResultWriter::logObjectiveFunctionEvaluation( if (logParametersEachFunctionEvaluation) if (!parameters.empty()) hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunParameters", + file, fullGroupPath, "costFunParameters", parameters); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "costFunWallTimeInSec", + file, fullGroupPath, "costFunWallTimeInSec", gsl::make_span(&timeElapsedInSeconds, 1)); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "costFunCallIndex", + file, fullGroupPath, "costFunCallIndex", gsl::make_span(&numFunctionCalls, 1)); flushResultWriter(); @@ -111,39 +117,41 @@ void OptimizationResultWriter::logOptimizerIteration( std::string const& pathStr = getRootPath(); const char *fullGroupPath = pathStr.c_str(); + [[maybe_unused]] auto lock = hdf5MutexGetLock(); + hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunCost", + file, fullGroupPath, "iterCostFunCost", gsl::make_span(&objectiveFunctionValue, 1)); if (logGradientEachIteration) { if (!gradient.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunGradient", + file, fullGroupPath, "iterCostFunGradient", gradient); } else if (!parameters.empty()) { std::vector nanGradient(parameters.size(), NAN); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunGradient", + file, fullGroupPath, "iterCostFunGradient", nanGradient); } } if (!parameters.empty()) { hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunParameters", + file, fullGroupPath, "iterCostFunParameters", parameters); } hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunWallSec", + file, fullGroupPath, "iterCostFunWallSec", gsl::make_span(&wallSeconds, 1)); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "iterCostFunCpuSec", + file, fullGroupPath, "iterCostFunCpuSec", gsl::make_span(&cpuSeconds, 1)); hdf5CreateOrExtendAndWriteToInt2DArray( - file.getId(), fullGroupPath, "iterIndex", + file, fullGroupPath, "iterIndex", gsl::make_span(&numIterations, 1)); flushResultWriter(); @@ -165,11 +173,10 @@ void OptimizationResultWriter::setLoggingEachFunctionEvaluation( void OptimizationResultWriter::starting( gsl::span initialParameters) { if (!initialParameters.empty()) { - std::string const& pathStr = getRootPath(); - const char *fullGroupPath = pathStr.c_str(); + const auto& root_path = getRootPath(); hdf5CreateOrExtendAndWriteToDouble2DArray( - file.getId(), fullGroupPath, "initialParameters", + file, root_path, "initialParameters", initialParameters); flushResultWriter(); } From 72f29d96537ae0e8bb19df9fb262a173d82319ae Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 28 Jun 2021 19:06:10 +0200 Subject: [PATCH 03/10] Fix HDF5 generation for state reinitialization with estimated initial conditions (#344) --- python/parpe/hdf5_pe_input.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/python/parpe/hdf5_pe_input.py b/python/parpe/hdf5_pe_input.py index c24e57bb..ef1c83fd 100644 --- a/python/parpe/hdf5_pe_input.py +++ b/python/parpe/hdf5_pe_input.py @@ -418,6 +418,18 @@ def _set_initial_concentration(condition_id, species_id, species_idx = state_id_to_idx[species_id] state_idxs_for_reinitialization_cur.append( species_idx) + else: + # If the state for the current species is not + # reinitialized, this parameter should never be + # used. We can set it to the same value as for + # preequilibration, to avoid issues with AMICI, + # where we cannot provide different values for + # dynamic parameter for preequilibration and + # simulation. + condition_map_sim[init_par_id] = \ + condition_map_preeq[init_par_id] + condition_scale_map_sim[init_par_id] = \ + condition_scale_map_preeq[init_par_id] # for simulation init_par_id = f'initial_{species_id}_sim' From 46589e5592cebcd6296c476c5caac3c69b61fc19 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 1 Jul 2021 19:52:18 +0200 Subject: [PATCH 04/10] Cleanup PEtab import (#346) --- python/parpe/hdf5_pe_input.py | 41 +++++++++++++++++------------------ 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/python/parpe/hdf5_pe_input.py b/python/parpe/hdf5_pe_input.py index ef1c83fd..3f6471c1 100644 --- a/python/parpe/hdf5_pe_input.py +++ b/python/parpe/hdf5_pe_input.py @@ -389,24 +389,22 @@ def _set_initial_concentration(condition_id, species_id, # for preequilibration init_par_id = f'initial_{species_id}_preeq' - # need to set dummy value for preeq parameter anyways, as it - # is expected below (set to 0, not nan, because will be - # multiplied with indicator variable in initial assignment) + # preeq initial parameter is always added during PEtab + # import, independently of whether preeq is used. Need to + # set dummy value for preeq parameter anyways, as + # it is expected in parameter mapping below + # (set to 0, not nan, because will be multiplied with + # indicator variable in initial assignment) condition_map_sim[init_par_id] = 0.0 condition_scale_map_sim[init_par_id] = ptc.LIN if preeq_cond_idx != NO_PREEQ_CONDITION_IDX: - value = petab.to_float_if_float( - self.petab_problem.condition_df.loc[ - preeq_cond_id, species_id]) - - if isinstance(value, Number): - condition_map_sim[init_par_id] = value _set_initial_concentration( preeq_cond_id, species_id, init_par_id, condition_map_preeq, condition_scale_map_preeq) + # Check if reinitialization is requested value_sim = petab.to_float_if_float( self.petab_problem.condition_df.loc[ sim_cond_id, species_id]) @@ -418,18 +416,19 @@ def _set_initial_concentration(condition_id, species_id, species_idx = state_id_to_idx[species_id] state_idxs_for_reinitialization_cur.append( species_idx) - else: - # If the state for the current species is not - # reinitialized, this parameter should never be - # used. We can set it to the same value as for - # preequilibration, to avoid issues with AMICI, - # where we cannot provide different values for - # dynamic parameter for preequilibration and - # simulation. - condition_map_sim[init_par_id] = \ - condition_map_preeq[init_par_id] - condition_scale_map_sim[init_par_id] = \ - condition_scale_map_preeq[init_par_id] + + # Set the preequilibration value also for simulation. + # Either it will be overwritten in the next step, + # or it will not be used anywhere. + # Setting it to the same value as for + # preequilibration avoids issues with AMICI, + # where we cannot provide different values for + # dynamic parameter for preequilibration and + # simulation. + condition_map_sim[init_par_id] = \ + condition_map_preeq[init_par_id] + condition_scale_map_sim[init_par_id] = \ + condition_scale_map_preeq[init_par_id] # for simulation init_par_id = f'initial_{species_id}_sim' From e5e4fb3444ef8590aa0117b97d64c99f5a6ba2a3 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 1 Jul 2021 20:31:50 +0200 Subject: [PATCH 05/10] Update AMICI to develop@8ef53c88 (#347) git subrepo clone --branch=develop --force git@github.com:AMICI-dev/AMICI.git deps/AMICI subrepo: subdir: "deps/AMICI" merged: "8ef53c88" upstream: origin: "git@github.com:AMICI-dev/AMICI.git" branch: "develop" commit: "8ef53c88" git-subrepo: version: "0.4.1" origin: "https://github.com/ingydotnet/git-subrepo" commit: "a04d8c2" --- .../.github/workflows/deploy_release.yml | 25 +++++++++++++++ .../workflows/release_biosimulators.yml | 32 ------------------- deps/AMICI/.github/workflows/test_windows.yml | 2 +- deps/AMICI/.gitrepo | 6 ++-- deps/AMICI/include/amici/exception.h | 23 ++++++++++++- deps/AMICI/python/amici/__init__.py | 15 +++++++-- deps/AMICI/python/amici/petab_import.py | 7 +++- deps/AMICI/python/amici/petab_objective.py | 21 +++++++++--- deps/AMICI/src/CMakeLists.template.cmake | 7 ++-- deps/AMICI/src/exception.cpp | 25 +++++++++++++-- 10 files changed, 113 insertions(+), 50 deletions(-) delete mode 100644 deps/AMICI/.github/workflows/release_biosimulators.yml diff --git a/deps/AMICI/.github/workflows/deploy_release.yml b/deps/AMICI/.github/workflows/deploy_release.yml index 57eacbe9..543ad17c 100644 --- a/deps/AMICI/.github/workflows/deploy_release.yml +++ b/deps/AMICI/.github/workflows/deploy_release.yml @@ -33,3 +33,28 @@ jobs: password: ${{ secrets.pypi_password }} packages_dir: python/sdist/dist + bioSimulatorsUpdateCliAndDockerImage: + name: Release to BioSimulators + needs: pypi + runs-on: ubuntu-latest + env: + # Owner/repository-id for the GitHub repository for the downstream command-line interface and Docker image + DOWNSTREAM_REPOSITORY: biosimulators/Biosimulators_AMICI + + # Username/token to use the GitHub API to trigger an action on the GitHub repository for the downstream + # command-line interface and Docker image. Tokens can be generated at https://github.com/settings/tokens. + # The token should have the scope `repo` + GH_ISSUE_USERNAME: ${{ secrets.BIOSIMULATORS_USERNAME }} + GH_ISSUE_TOKEN: ${{ secrets.BIOSIMULATORS_TOKEN }} + steps: + - name: Trigger GitHub action that will build and release the downstream command-line interface and Docker image + run: | + PACKAGE_VERSION="${GITHUB_REF/refs\/tags\/v/}" + WORKFLOW_FILE=ci.yml + + curl \ + -X POST \ + -u ${GH_ISSUE_USERNAME}:${GH_ISSUE_TOKEN} \ + -H "Accept: application/vnd.github.v3+json" \ + https://api.github.com/repos/${DOWNSTREAM_REPOSITORY}/actions/workflows/${WORKFLOW_FILE}/dispatches \ + -d "{\"ref\": \"dev\", \"inputs\": {\"simulatorVersion\": \"${PACKAGE_VERSION}\", \"simulatorVersionLatest\": \"true\"}}" diff --git a/deps/AMICI/.github/workflows/release_biosimulators.yml b/deps/AMICI/.github/workflows/release_biosimulators.yml deleted file mode 100644 index 53d5a555..00000000 --- a/deps/AMICI/.github/workflows/release_biosimulators.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Release to BioSimulators - -on: - release: - types: - - published - -jobs: - updateCliAndDockerImage: - name: Build and release downstream command-line interface and Docker image - runs-on: ubuntu-latest - env: - # Owner/repository-id for the GitHub repository for the downstream command-line interface and Docker image - DOWNSTREAM_REPOSITORY: biosimulators/Biosimulators_AMICI - - # Username/token to use the GitHub API to trigger an action on the GitHub repository for the downstream - # command-line interface and Docker image. Tokens can be generated at https://github.com/settings/tokens. - # The token should have the scope `repo` - GH_ISSUE_USERNAME: ${{ secrets.BIOSIMULATORS_USERNAME }} - GH_ISSUE_TOKEN: ${{ secrets.BIOSIMULATORS_TOKEN }} - steps: - - name: Trigger GitHub action that will build and release the downstream command-line interface and Docker image - run: | - PACKAGE_VERSION="${GITHUB_REF/refs\/tags\/v/}" - WORKFLOW_FILE=ci.yml - - curl \ - -X POST \ - -u ${GH_ISSUE_USERNAME}:${GH_ISSUE_TOKEN} \ - -H "Accept: application/vnd.github.v3+json" \ - https://api.github.com/repos/${DOWNSTREAM_REPOSITORY}/actions/workflows/${WORKFLOW_FILE}/dispatches \ - -d "{\"ref\": \"dev\", \"inputs\": {\"simulatorVersion\": \"${PACKAGE_VERSION}\", \"simulatorVersionLatest\": \"true\"}}" diff --git a/deps/AMICI/.github/workflows/test_windows.yml b/deps/AMICI/.github/workflows/test_windows.yml index 50a2cd1c..bf6c8d80 100644 --- a/deps/AMICI/.github/workflows/test_windows.yml +++ b/deps/AMICI/.github/workflows/test_windows.yml @@ -33,7 +33,7 @@ jobs: python -m pip install --upgrade pip \ && pip install pytest petab \ && choco install -y ninja \ - && choco install -y swig --version=4.0.1 + && choco install -y swig - name: Install OpenBLAS shell: powershell diff --git a/deps/AMICI/.gitrepo b/deps/AMICI/.gitrepo index 20b7889c..2cefac44 100644 --- a/deps/AMICI/.gitrepo +++ b/deps/AMICI/.gitrepo @@ -5,8 +5,8 @@ ; [subrepo] remote = git@github.com:ICB-DCM/AMICI.git - branch = v0.11.17 - commit = c0377065207b68c881143d203de1f56fd52878b9 - parent = d0bd8c19b9bb5426affdcd8386d893eee7940e80 + branch = develop + commit = 8ef53c8808437cafc171d6ab9545a59d61bb83af + parent = eb33a10abde3faf2835ce846482817df06afcfe1 cmdver = 0.4.1 method = merge diff --git a/deps/AMICI/include/amici/exception.h b/deps/AMICI/include/amici/exception.h index 6556e5fc..5d767fd4 100644 --- a/deps/AMICI/include/amici/exception.h +++ b/deps/AMICI/include/amici/exception.h @@ -15,6 +15,13 @@ namespace amici { */ class AmiException : public std::exception { public: + /** + * @brief Constructor with printf style interface + * @param fmt error message with printf format + * @param ... printf formatting variables + */ + AmiException(); + /** * @brief Constructor with printf style interface * @param fmt error message with printf format @@ -40,6 +47,14 @@ class AmiException : public std::exception { */ void storeBacktrace(int nMaxFrames); + protected: + /** + * @brief Store the provided message + * @param fmt error message with printf format + * @param argptr pointer to variadic argument list + */ + void storeMessage(const char *fmt, va_list argptr); + private: std::array msg_; std::array trace_; @@ -131,7 +146,13 @@ class IntegrationFailureB : public AmiException { */ class SetupFailure : public AmiException { public: - using AmiException::AmiException; + /** + * @brief Constructor with printf style interface + * @param fmt error message with printf format + * @param ... printf formatting variables + */ + explicit SetupFailure(char const* fmt, ...); + }; diff --git a/deps/AMICI/python/amici/__init__.py b/deps/AMICI/python/amici/__init__.py index bb474d89..be8ad826 100644 --- a/deps/AMICI/python/amici/__init__.py +++ b/deps/AMICI/python/amici/__init__.py @@ -28,7 +28,7 @@ import re import sys from contextlib import suppress -from types import ModuleType +from types import ModuleType as ModelModule from typing import Optional, Union, Sequence, List @@ -134,6 +134,17 @@ def _imported_from_setup() -> bool: from .sbml_import import SbmlImporter, assignmentRules2observables from .ode_export import ODEModel, ODEExporter + try: + # Requires Python>=3.8 + from typing import Protocol + + class ModelModule(Protocol): + """Enable Python static type checking for AMICI-generated model modules""" + def getModel(self) -> amici.Model: + pass + except ImportError: + pass + hdf5_enabled = 'readSolverSettingsFromHDF5' in dir() @@ -283,7 +294,7 @@ def __exit__(self, exc_type, exc_value, traceback): def import_model_module(module_name: str, - module_path: Optional[str] = None) -> ModuleType: + module_path: Optional[str] = None) -> ModelModule: """ Import Python module of an AMICI model diff --git a/deps/AMICI/python/amici/petab_import.py b/deps/AMICI/python/amici/petab_import.py index bfbbf744..4f8ad93c 100644 --- a/deps/AMICI/python/amici/petab_import.py +++ b/deps/AMICI/python/amici/petab_import.py @@ -378,7 +378,7 @@ def import_model_sbml( model_output_dir: Optional[str] = None, verbose: Optional[Union[bool, int]] = True, allow_reinit_fixpar_initcond: bool = True, - **kwargs) -> None: + **kwargs) -> amici.SbmlImporter: """ Create AMICI model from PEtab problem @@ -415,6 +415,9 @@ def import_model_sbml( :param kwargs: Additional keyword arguments to be passed to :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. + + :return: + The created :class:`amici.sbml_import.SbmlImporter` instance. """ set_log_level(logger, verbose) @@ -584,6 +587,8 @@ def import_model_sbml( verbose=verbose, **kwargs) + return sbml_importer + # for backwards compatibility import_model = import_model_sbml diff --git a/deps/AMICI/python/amici/petab_objective.py b/deps/AMICI/python/amici/petab_objective.py index 6ae8ab0c..aadcbd49 100644 --- a/deps/AMICI/python/amici/petab_objective.py +++ b/deps/AMICI/python/amici/petab_objective.py @@ -17,6 +17,7 @@ import numpy as np import pandas as pd import petab +import sympy as sp from petab.C import * # noqa: F403 from . import AmiciModel, AmiciExpData @@ -346,11 +347,21 @@ def _set_initial_concentration(condition_id, species_id, init_par_id, value = petab.to_float_if_float( petab_problem.condition_df.loc[condition_id, species_id]) if pd.isna(value): - value = float( - get_species_initial( - petab_problem.sbml_model.getSpecies(species_id) - ) + value = get_species_initial( + petab_problem.sbml_model.getSpecies(species_id) ) + try: + value = float(value) + except ValueError: + if sp.nsimplify(value).is_Atom: + # Get rid of multiplication with one + value = sp.nsimplify(value) + else: + raise NotImplementedError( + "Cannot handle non-trivial expressions for " + f"species initial for {species_id}: {value}") + # this should be a parameter ID + value = str(value) logger.debug(f'The species {species_id} has no initial value ' f'defined for the condition {condition_id} in ' 'the PEtab conditions table. The initial value is ' @@ -728,4 +739,4 @@ def rdatas_to_simulation_df( df = rdatas_to_measurement_df(rdatas=rdatas, model=model, measurement_df=measurement_df) - return df.rename(columns={MEASUREMENT: SIMULATION}) \ No newline at end of file + return df.rename(columns={MEASUREMENT: SIMULATION}) diff --git a/deps/AMICI/src/CMakeLists.template.cmake b/deps/AMICI/src/CMakeLists.template.cmake index a89fc829..6aa13400 100644 --- a/deps/AMICI/src/CMakeLists.template.cmake +++ b/deps/AMICI/src/CMakeLists.template.cmake @@ -18,10 +18,13 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_POSITION_INDEPENDENT_CODE ON) include(CheckCXXCompilerFlag) -set(MY_CXX_FLAGS -Wall -Wno-unused-function -Wno-unused-variable -Wno-unused-but-set-variable) +set(MY_CXX_FLAGS -Wall -Wno-unused-function -Wno-unused-variable) +if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + list(APPEND MY_CXX_FLAGS -Wno-unused-but-set-variable) +endif() foreach(FLAG ${MY_CXX_FLAGS}) unset(CUR_FLAG_SUPPORTED CACHE) - CHECK_CXX_COMPILER_FLAG(${FLAG} CUR_FLAG_SUPPORTED) + CHECK_CXX_COMPILER_FLAG(-Werror ${FLAG} CUR_FLAG_SUPPORTED) if(${CUR_FLAG_SUPPORTED}) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}") endif() diff --git a/deps/AMICI/src/exception.cpp b/deps/AMICI/src/exception.cpp index 97676bea..2d1f72ec 100644 --- a/deps/AMICI/src/exception.cpp +++ b/deps/AMICI/src/exception.cpp @@ -8,12 +8,18 @@ namespace amici { -AmiException::AmiException(const char *fmt, ...) { +AmiException::AmiException() +{ + storeBacktrace(12); +} + +AmiException::AmiException(const char *fmt, ...) + : AmiException() +{ va_list ap; va_start(ap, fmt); - vsnprintf(msg_.data(), msg_.size(), fmt, ap); + storeMessage(fmt, ap); va_end(ap); - storeBacktrace(12); } const char *AmiException::what() const noexcept { @@ -29,6 +35,11 @@ void AmiException::storeBacktrace(const int nMaxFrames) { backtraceString(nMaxFrames).c_str()); } +void AmiException::storeMessage(const char *fmt, va_list argptr) +{ + vsnprintf(msg_.data(), msg_.size(), fmt, argptr); +} + CvodeException::CvodeException(const int error_code, const char *function) : AmiException("Cvode routine %s failed with error code %i",function,error_code){} @@ -48,4 +59,12 @@ NewtonFailure::NewtonFailure(int code, const char *function) : error_code = code; } +SetupFailure::SetupFailure(const char *fmt, ...) +{ + va_list ap; + va_start(ap, fmt); + storeMessage(fmt, ap); + va_end(ap); +} + } // namespace amici From 98a0746a708be9e43c7daeb54ec38bcb6709f7d9 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 1 Jul 2021 22:29:15 +0200 Subject: [PATCH 06/10] Fixup AMICI https://github.com/AMICI-dev/AMICI/pull/1522 --- deps/AMICI/python/amici/petab_objective.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/AMICI/python/amici/petab_objective.py b/deps/AMICI/python/amici/petab_objective.py index aadcbd49..13e33d1f 100644 --- a/deps/AMICI/python/amici/petab_objective.py +++ b/deps/AMICI/python/amici/petab_objective.py @@ -352,7 +352,7 @@ def _set_initial_concentration(condition_id, species_id, init_par_id, ) try: value = float(value) - except ValueError: + except (ValueError, TypeError): if sp.nsimplify(value).is_Atom: # Get rid of multiplication with one value = sp.nsimplify(value) From 38589b2e0ddb150f0bcd83a4294ea9d7c8643ee1 Mon Sep 17 00:00:00 2001 From: PaulJonasJost Date: Tue, 13 Jul 2021 16:58:18 +0200 Subject: [PATCH 07/10] added parPE to pyPESTO optimization + history converter. --- misc/parpe_to_pypesto.py | 72 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 misc/parpe_to_pypesto.py diff --git a/misc/parpe_to_pypesto.py b/misc/parpe_to_pypesto.py new file mode 100644 index 00000000..b2a9f8a3 --- /dev/null +++ b/misc/parpe_to_pypesto.py @@ -0,0 +1,72 @@ +import h5py +import pypesto.optimize +import pypesto.objective +from pypesto.store.save_to_hdf5 import get_or_create_group +from pypesto.result import Result +from pypesto.store import write_result + + +def write_pypesto_history(fn_parpe, + fn_pypesto, + i_ms): + """ + Write the History from parPE file directly to pypesto file. + """ + f = h5py.File(fn_parpe, 'r') + g = h5py.File(fn_pypesto, 'w') + + history_grp = get_or_create_group(g, 'history') + ms_grp = get_or_create_group(history_grp, str(i_ms)) + trace_grp = get_or_create_group(ms_grp, 'trace') + iterations = len(f[f'multistarts/{i_ms}/iteration']) + + for i_iter in range(iterations): + iteration = get_or_create_group(trace_grp, str(i_iter)) + iteration['fval'] = f[f'multistarts/{i_ms}/iterCostFunCost'][:, i_iter] + iteration['grad'] = f[f'multistarts/{i_ms}/iterCostFunGradient'][:, i_iter] + iteration['x'] = f[f'multistarts/{i_ms}/iterCostFunParameters'][:, i_iter] + iteration['time'] = f[f'multistarts/{i_ms}/iterCostFunWallSec'][:, i_iter] + + f.close() + g.close() + + +def get_optimizer_result_from_parPE(fn_parpe, + i_ms): + """ + Fill in a `OptimizerResult` object from parpe history data. + """ + + result = pypesto.optimize.OptimizerResult() + + with h5py.File(fn_parpe, 'r') as f: + result.id = str(i_ms) + result.x = f[f'multistarts/{i_ms}/finalParameters'][()] + result.grad = f[f'multistarts/{i_ms}/iterCostFunGradient'][:, -1] + result.fval = f[f'multistarts/{i_ms}/finalCost'][()] + result.x0 = f[f'multistarts/{i_ms}/initialParameters'][()] + result.fval0 = f[f'multistarts/{i_ms}/iterCostFunParameters'][0, :] + result.n_fval = 0 + result.n_grad = 0 + for i_iter in f[f'multistarts/{i_ms}/iteration']: + result.n_grad += f[f'multistarts/{i_ms}/iteration/{i_iter}/costFunGradient'].shape[1] + result.n_fval += f[f'multistarts/{i_ms}/iteration/{i_iter}/costFunCost'].shape[1] + result.exitflag = f[f'multistarts/{i_ms}/exitStatus'][()] + + return result + + +def parpe_to_pypesto_history(fn_parpe, + fn_pypesto): + """ + Convert a parPE history file to a pypesto History file. + """ + + with h5py.File(fn_parpe, 'r') as f: + result = Result() + for i_ms in f['multistarts']: + write_pypesto_history(fn_parpe=fn_parpe, + fn_pypesto=fn_pypesto, + i_ms=i_ms) + result.optimize_result.append(get_optimizer_result_from_parPE(fn_parpe, i_ms)) + write_result(result, fn_pypesto, problem=False, sample=False, profile=False) From 614fc0537c893e81f1b0651683cd580b99f01211 Mon Sep 17 00:00:00 2001 From: PaulJonasJost Date: Tue, 13 Jul 2021 17:03:33 +0200 Subject: [PATCH 08/10] updated to pep8 standard --- misc/parpe_to_pypesto.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/misc/parpe_to_pypesto.py b/misc/parpe_to_pypesto.py index b2a9f8a3..f2f96830 100644 --- a/misc/parpe_to_pypesto.py +++ b/misc/parpe_to_pypesto.py @@ -22,10 +22,14 @@ def write_pypesto_history(fn_parpe, for i_iter in range(iterations): iteration = get_or_create_group(trace_grp, str(i_iter)) - iteration['fval'] = f[f'multistarts/{i_ms}/iterCostFunCost'][:, i_iter] - iteration['grad'] = f[f'multistarts/{i_ms}/iterCostFunGradient'][:, i_iter] - iteration['x'] = f[f'multistarts/{i_ms}/iterCostFunParameters'][:, i_iter] - iteration['time'] = f[f'multistarts/{i_ms}/iterCostFunWallSec'][:, i_iter] + iteration['fval'] = \ + f[f'multistarts/{i_ms}/iterCostFunCost'][:, i_iter] + iteration['grad'] = \ + f[f'multistarts/{i_ms}/iterCostFunGradient'][:, i_iter] + iteration['x'] = \ + f[f'multistarts/{i_ms}/iterCostFunParameters'][:, i_iter] + iteration['time'] = \ + f[f'multistarts/{i_ms}/iterCostFunWallSec'][:, i_iter] f.close() g.close() @@ -49,8 +53,10 @@ def get_optimizer_result_from_parPE(fn_parpe, result.n_fval = 0 result.n_grad = 0 for i_iter in f[f'multistarts/{i_ms}/iteration']: - result.n_grad += f[f'multistarts/{i_ms}/iteration/{i_iter}/costFunGradient'].shape[1] - result.n_fval += f[f'multistarts/{i_ms}/iteration/{i_iter}/costFunCost'].shape[1] + result.n_grad += f[f'multistarts/{i_ms}/' + f'iteration/{i_iter}/costFunGradient'].shape[1] + result.n_fval += f[f'multistarts/{i_ms}/' + f'iteration/{i_iter}/costFunCost'].shape[1] result.exitflag = f[f'multistarts/{i_ms}/exitStatus'][()] return result @@ -68,5 +74,7 @@ def parpe_to_pypesto_history(fn_parpe, write_pypesto_history(fn_parpe=fn_parpe, fn_pypesto=fn_pypesto, i_ms=i_ms) - result.optimize_result.append(get_optimizer_result_from_parPE(fn_parpe, i_ms)) - write_result(result, fn_pypesto, problem=False, sample=False, profile=False) + result.optimize_result.append( + get_optimizer_result_from_parPE(fn_parpe, i_ms)) + write_result(result, + fn_pypesto, problem=False, sample=False, profile=False) From e504813d95ab1b9f8efe409dd53f114b2cd8dc50 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 8 Sep 2022 19:17:46 +0200 Subject: [PATCH 09/10] Cleanup; proper command line interface --- misc/parpe_to_pypesto.py | 157 +++++++++++++++++++++++++-------------- 1 file changed, 100 insertions(+), 57 deletions(-) mode change 100644 => 100755 misc/parpe_to_pypesto.py diff --git a/misc/parpe_to_pypesto.py b/misc/parpe_to_pypesto.py old mode 100644 new mode 100755 index f2f96830..0cad6c98 --- a/misc/parpe_to_pypesto.py +++ b/misc/parpe_to_pypesto.py @@ -1,80 +1,123 @@ +#!/usr/bin/env python3 +import argparse +from pathlib import Path +from typing import Union + import h5py -import pypesto.optimize -import pypesto.objective -from pypesto.store.save_to_hdf5 import get_or_create_group -from pypesto.result import Result -from pypesto.store import write_result +from pypesto.result.optimize import OptimizerResult as PypestoOptimizerResult +from pypesto.result.result import Result as PypestoResult +from pypesto.store import write_result as write_pypesto_result -def write_pypesto_history(fn_parpe, - fn_pypesto, - i_ms): +def write_pypesto_history( + fn_parpe: Union[str, Path], + fn_pypesto: Union[str, Path], + i_ms: Union[int, str] +): """ Write the History from parPE file directly to pypesto file. """ - f = h5py.File(fn_parpe, 'r') - g = h5py.File(fn_pypesto, 'w') - - history_grp = get_or_create_group(g, 'history') - ms_grp = get_or_create_group(history_grp, str(i_ms)) - trace_grp = get_or_create_group(ms_grp, 'trace') - iterations = len(f[f'multistarts/{i_ms}/iteration']) - - for i_iter in range(iterations): - iteration = get_or_create_group(trace_grp, str(i_iter)) - iteration['fval'] = \ - f[f'multistarts/{i_ms}/iterCostFunCost'][:, i_iter] - iteration['grad'] = \ - f[f'multistarts/{i_ms}/iterCostFunGradient'][:, i_iter] - iteration['x'] = \ - f[f'multistarts/{i_ms}/iterCostFunParameters'][:, i_iter] - iteration['time'] = \ - f[f'multistarts/{i_ms}/iterCostFunWallSec'][:, i_iter] - - f.close() - g.close() - - -def get_optimizer_result_from_parPE(fn_parpe, - i_ms): + with h5py.File(fn_parpe, 'r') as f: + with h5py.File(fn_pypesto, 'a') as g: + trace_grp = g.require_group(f'/history/{i_ms}/trace') + + iterations = len(f[f'multistarts/{i_ms}/iteration']) + + for i_iter in range(iterations): + start_group_in = f[f'multistarts/{i_ms}'] + iteration_grp = trace_grp.require_group(str(i_iter)) + + iteration_grp['fval'] = \ + start_group_in['iterCostFunCost'][:, i_iter] + iteration_grp['grad'] = \ + start_group_in['iterCostFunGradient'][:, i_iter] + iteration_grp['x'] = \ + start_group_in['iterCostFunParameters'][:, i_iter] + iteration_grp['time'] = \ + start_group_in['iterCostFunWallSec'][:, i_iter] + + +def pypesto_optimizer_result_from_parpe( + fn_parpe: Union[Path, str], + i_ms: Union[int, str] +) -> PypestoOptimizerResult: """ - Fill in a `OptimizerResult` object from parpe history data. + Fill in a `pypesto.OptimizerResult` object from parpe history data. """ - - result = pypesto.optimize.OptimizerResult() + result = PypestoOptimizerResult() with h5py.File(fn_parpe, 'r') as f: result.id = str(i_ms) - result.x = f[f'multistarts/{i_ms}/finalParameters'][()] - result.grad = f[f'multistarts/{i_ms}/iterCostFunGradient'][:, -1] - result.fval = f[f'multistarts/{i_ms}/finalCost'][()] - result.x0 = f[f'multistarts/{i_ms}/initialParameters'][()] - result.fval0 = f[f'multistarts/{i_ms}/iterCostFunParameters'][0, :] + ms_grp = f[f'multistarts/{i_ms}/'] + result.x = ms_grp['finalParameters'][()] + result.grad = ms_grp['iterCostFunGradient'][:, -1] + result.fval = ms_grp['finalCost'][()] + result.x0 = ms_grp['initialParameters'][()] + result.fval0 = ms_grp['iterCostFunParameters'][0, :] + result.exitflag = ms_grp['exitStatus'][()] result.n_fval = 0 result.n_grad = 0 - for i_iter in f[f'multistarts/{i_ms}/iteration']: - result.n_grad += f[f'multistarts/{i_ms}/' - f'iteration/{i_iter}/costFunGradient'].shape[1] - result.n_fval += f[f'multistarts/{i_ms}/' - f'iteration/{i_iter}/costFunCost'].shape[1] - result.exitflag = f[f'multistarts/{i_ms}/exitStatus'][()] + for i_iter in ms_grp['iteration']: + iter_grp = ms_grp[f'iteration/{i_iter}'] + result.n_grad += iter_grp['costFunGradient'].shape[1] + result.n_fval += iter_grp['costFunCost'].shape[1] return result -def parpe_to_pypesto_history(fn_parpe, - fn_pypesto): +def parpe_to_pypesto_history( + fn_parpe: Union[str, Path], + fn_pypesto: Union[str, Path], +): """ Convert a parPE history file to a pypesto History file. """ - + result = PypestoResult() with h5py.File(fn_parpe, 'r') as f: - result = Result() for i_ms in f['multistarts']: - write_pypesto_history(fn_parpe=fn_parpe, - fn_pypesto=fn_pypesto, - i_ms=i_ms) + write_pypesto_history( + fn_parpe=fn_parpe, + fn_pypesto=fn_pypesto, + i_ms=i_ms + ) result.optimize_result.append( - get_optimizer_result_from_parPE(fn_parpe, i_ms)) - write_result(result, - fn_pypesto, problem=False, sample=False, profile=False) + pypesto_optimizer_result_from_parpe(fn_parpe, i_ms) + ) + write_pypesto_result( + result, fn_pypesto, + optimize=True, problem=False, sample=False, profile=False + ) + + +def parse_args(): + """ + Parse command line arguments + + :return: + Parsed CLI arguments from :mod:`argparse`. + """ + parser = argparse.ArgumentParser( + description='Convert parPE result file to pypesto HDF5 result file') + + parser.add_argument( + '-i', dest='parpe_file', + help='parPE result file to convert', + required=True + ) + parser.add_argument( + '-o', dest='pypesto_file', + help='pypesto file to write', + required=True + ) + + return parser.parse_args() + + +def main(): + args = parse_args() + parpe_to_pypesto_history(fn_pypesto=args.pypesto_file, + fn_parpe=args.parpe_file) + + +if __name__ == '__main__': + main() From 4199f387d17018ebbcab2eace69594cc5d1edd69 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Thu, 8 Sep 2022 22:46:06 +0200 Subject: [PATCH 10/10] fixup,cleanup --- misc/parpe_to_pypesto.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/misc/parpe_to_pypesto.py b/misc/parpe_to_pypesto.py index 0cad6c98..b8960ec3 100755 --- a/misc/parpe_to_pypesto.py +++ b/misc/parpe_to_pypesto.py @@ -7,6 +7,7 @@ from pypesto.result.optimize import OptimizerResult as PypestoOptimizerResult from pypesto.result.result import Result as PypestoResult from pypesto.store import write_result as write_pypesto_result +from pypesto.C import HISTORY, TRACE, N_ITERATIONS def write_pypesto_history( @@ -19,7 +20,7 @@ def write_pypesto_history( """ with h5py.File(fn_parpe, 'r') as f: with h5py.File(fn_pypesto, 'a') as g: - trace_grp = g.require_group(f'/history/{i_ms}/trace') + trace_grp = g.require_group(f'/{HISTORY}/{i_ms}/{TRACE}') iterations = len(f[f'multistarts/{i_ms}/iteration']) @@ -34,7 +35,9 @@ def write_pypesto_history( iteration_grp['x'] = \ start_group_in['iterCostFunParameters'][:, i_iter] iteration_grp['time'] = \ - start_group_in['iterCostFunWallSec'][:, i_iter] + start_group_in['iterCostFunWallSec'][:, i_iter].squeeze() + + trace_grp.attrs[N_ITERATIONS] = iterations def pypesto_optimizer_result_from_parpe(