From 3797996e8904373079cdde60d0624453883984d5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 28 Sep 2024 10:48:51 -0700 Subject: [PATCH 01/18] Store the values --- gtsam/hybrid/HybridGaussianFactor.cpp | 68 ++++++++++--------- gtsam/hybrid/HybridGaussianFactor.h | 15 ++-- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 4 +- gtsam/hybrid/HybridNonlinearFactor.cpp | 4 +- .../hybrid/tests/testSerializationHybrid.cpp | 6 +- 5 files changed, 49 insertions(+), 48 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index b04db4977e..d3b26d4ef8 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -27,14 +27,16 @@ #include #include +#include "gtsam/base/types.h" + namespace gtsam { /* *******************************************************************************/ -HybridGaussianFactor::Factors HybridGaussianFactor::augment( +HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment( const FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - Factors gaussianFactors; + DecisionTree gaussianFactors; AlgebraicDecisionTree valueTree; std::tie(gaussianFactors, valueTree) = unzip(factors); @@ -42,16 +44,16 @@ HybridGaussianFactor::Factors HybridGaussianFactor::augment( double min_value = valueTree.min(); // Finally, update the [A|b] matrices. - auto update = [&min_value](const GaussianFactorValuePair &gfv) { + auto update = [&min_value](const auto &gfv) -> GaussianFactorValuePair { auto [gf, value] = gfv; auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return gf; + if (!jf) return {gf, 0.0}; // should this be zero or infinite? double normalized_value = value - min_value; // If the value is 0, do nothing - if (normalized_value == 0.0) return gf; + if (normalized_value == 0.0) return {gf, 0.0}; GaussianFactorGraph gfg; gfg.push_back(jf); @@ -62,18 +64,16 @@ HybridGaussianFactor::Factors HybridGaussianFactor::augment( auto constantFactor = std::make_shared(c); gfg.push_back(constantFactor); - return std::dynamic_pointer_cast( - std::make_shared(gfg)); + return {std::make_shared(gfg), normalized_value}; }; - return Factors(factors, update); + return FactorValuePairs(factors, update); } /* *******************************************************************************/ struct HybridGaussianFactor::ConstructorHelper { KeyVector continuousKeys; // Continuous keys extracted from factors DiscreteKeys discreteKeys; // Discrete keys provided to the constructors - FactorValuePairs pairs; // Used only if factorsTree is empty - Factors factorsTree; + FactorValuePairs pairs; // The decision tree with factors and scalars ConstructorHelper(const DiscreteKey &discreteKey, const std::vector &factors) @@ -85,9 +85,10 @@ struct HybridGaussianFactor::ConstructorHelper { break; } } - - // Build the DecisionTree from the factor vector - factorsTree = Factors(discreteKeys, factors); + // Build the FactorValuePairs DecisionTree + pairs = FactorValuePairs( + DecisionTree(discreteKeys, factors), + [](const auto &f) { return std::pair{f, 0.0}; }); } ConstructorHelper(const DiscreteKey &discreteKey, @@ -109,6 +110,7 @@ struct HybridGaussianFactor::ConstructorHelper { const FactorValuePairs &factorPairs) : discreteKeys(discreteKeys) { // Extract continuous keys from the first non-null factor + // TODO: just stop after first non-null factor factorPairs.visit([&](const GaussianFactorValuePair &pair) { if (pair.first && continuousKeys.empty()) { continuousKeys = pair.first->keys(); @@ -123,14 +125,13 @@ struct HybridGaussianFactor::ConstructorHelper { /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper) : Base(helper.continuousKeys, helper.discreteKeys), - factors_(helper.factorsTree.empty() ? augment(helper.pairs) - : helper.factorsTree) {} + factors_(augment(helper.pairs)) {} /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor( const DiscreteKey &discreteKey, - const std::vector &factors) - : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} + const std::vector &factorPairs) + : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor( @@ -140,8 +141,8 @@ HybridGaussianFactor::HybridGaussianFactor( /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors) - : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} + const FactorValuePairs &factorPairs) + : HybridGaussianFactor(ConstructorHelper(discreteKeys, factorPairs)) {} /* *******************************************************************************/ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { @@ -153,10 +154,12 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { if (factors_.empty() ^ e->factors_.empty()) return false; // Check the base and the factors: - return Base::equals(*e, tol) && - factors_.equals(e->factors_, [tol](const auto &f1, const auto &f2) { - return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); - }); + auto compareFunc = [tol](const auto &pair1, const auto &pair2) { + auto f1 = pair1.first, f2 = pair2.first; + bool match = (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); + return match && gtsam::equal(pair1.second, pair2.second, tol); + }; + return Base::equals(*e, tol) && factors_.equals(e->factors_, compareFunc); } /* *******************************************************************************/ @@ -171,15 +174,16 @@ void HybridGaussianFactor::print(const std::string &s, } else { factors_.print( "", [&](Key k) { return formatter(k); }, - [&](const sharedFactor &gf) -> std::string { + [&](const auto &pair) -> std::string { RedirectCout rd; std::cout << ":\n"; - if (gf) { - gf->print("", formatter); + if (pair.first) { + pair.first->print("", formatter); return rd.str(); } else { return "nullptr"; } + std::cout << "scalar: " << pair.second << "\n"; }); } std::cout << "}" << std::endl; @@ -188,7 +192,7 @@ void HybridGaussianFactor::print(const std::string &s, /* *******************************************************************************/ HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( const DiscreteValues &assignment) const { - return factors_(assignment); + return factors_(assignment).first; } /* *******************************************************************************/ @@ -207,7 +211,7 @@ GaussianFactorGraphTree HybridGaussianFactor::add( /* *******************************************************************************/ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() const { - auto wrap = [](const sharedFactor &gf) { return GaussianFactorGraph{gf}; }; + auto wrap = [](const auto &pair) { return GaussianFactorGraph{pair.first}; }; return {factors_, wrap}; } @@ -229,8 +233,8 @@ static double PotentiallyPrunedComponentError( AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [&continuousValues](const sharedFactor &gf) { - return PotentiallyPrunedComponentError(gf, continuousValues); + auto errorFunc = [this, &continuousValues](const auto &pair) { + return PotentiallyPrunedComponentError(pair.first, continuousValues); }; DecisionTree error_tree(factors_, errorFunc); return error_tree; @@ -239,8 +243,8 @@ AlgebraicDecisionTree HybridGaussianFactor::errorTree( /* *******************************************************************************/ double HybridGaussianFactor::error(const HybridValues &values) const { // Directly index to get the component, no need to build the whole tree. - const sharedFactor gf = factors_(values.discrete()); - return PotentiallyPrunedComponentError(gf, values.continuous()); + const auto pair = factors_(values.discrete()); + return PotentiallyPrunedComponentError(pair.first, values.continuous()); } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index e5a5754094..15993f5823 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -66,12 +66,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// typedef for Decision Tree of Gaussian factors and arbitrary value. using FactorValuePairs = DecisionTree; - /// typedef for Decision Tree of Gaussian factors. - using Factors = DecisionTree; private: /// Decision tree of Gaussian factors indexed by discrete keys. - Factors factors_; + FactorValuePairs factors_; public: /// @name Constructors @@ -110,10 +108,10 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m. * * @param discreteKeys Discrete variables and their cardinalities. - * @param factors The decision tree of Gaussian factor/scalar pairs. + * @param factorPairs The decision tree of Gaussian factor/scalar pairs. */ HybridGaussianFactor(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factors); + const FactorValuePairs &factorPairs); /// @} /// @name Testable @@ -158,7 +156,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { double error(const HybridValues &values) const override; /// Getter for GaussianFactor decision tree - const Factors &factors() const { return factors_; } + const FactorValuePairs &factors() const { return factors_; } /// Add HybridNonlinearFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( @@ -184,10 +182,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * value in the `b` vector as an additional row. * * @param factors DecisionTree of GaussianFactors and arbitrary scalars. - * Gaussian factor in factors. - * @return HybridGaussianFactor::Factors + * @return FactorValuePairs */ - static Factors augment(const FactorValuePairs &factors); + static FactorValuePairs augment(const FactorValuePairs &factors); /// Helper struct to assist private constructor below. struct ConstructorHelper; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 7dfa56e77d..957a850387 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -238,8 +238,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - auto logProbability = - [&](const GaussianFactor::shared_ptr &factor) -> double { + auto logProbability = [&](const auto &pair) -> double { + auto [factor, _] = pair; if (!factor) return 0.0; return factor->error(VectorValues()); }; diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 9378d07fe2..56711b313e 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -196,8 +196,8 @@ std::shared_ptr HybridNonlinearFactor::linearize( } }; - DecisionTree> - linearized_factors(factors_, linearizeDT); + HybridGaussianFactor::FactorValuePairs linearized_factors(factors_, + linearizeDT); return std::make_shared(discreteKeys_, linearized_factors); diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index e09669117e..8258d8615e 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -52,11 +52,11 @@ BOOST_CLASS_EXPORT_GUID(ADT::Leaf, "gtsam_AlgebraicDecisionTree_Leaf"); BOOST_CLASS_EXPORT_GUID(ADT::Choice, "gtsam_AlgebraicDecisionTree_Choice") BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor, "gtsam_HybridGaussianFactor"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::FactorValuePairs, "gtsam_HybridGaussianFactor_Factors"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Leaf, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::FactorValuePairs::Leaf, "gtsam_HybridGaussianFactor_Factors_Leaf"); -BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::Factors::Choice, +BOOST_CLASS_EXPORT_GUID(HybridGaussianFactor::FactorValuePairs::Choice, "gtsam_HybridGaussianFactor_Factors_Choice"); BOOST_CLASS_EXPORT_GUID(HybridGaussianConditional, From 14d1594bd177de4b85c9feaa717eda9c4fa3dcaa Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 1 Oct 2024 14:26:11 -0400 Subject: [PATCH 02/18] implement errorTree for HybridNonlinearFactorGraph --- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 31 +++++++++++++++++++++ gtsam/hybrid/HybridNonlinearFactorGraph.h | 13 +++++++++ 2 files changed, 44 insertions(+) diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 6d19ef156d..0e7e9c692d 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -179,4 +179,35 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( return linearFG; } +/* ************************************************************************* */ +AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( + const Values& values) const { + AlgebraicDecisionTree result(0.0); + + // Iterate over each factor. + for (auto& factor : factors_) { + if (auto hnf = std::dynamic_pointer_cast(factor)) { + // Compute factor error and add it. + result = result + hnf->errorTree(values); + + } else if (auto nf = std::dynamic_pointer_cast(factor)) { + // If continuous only, get the (double) error + // and add it to every leaf of the result + result = result + nf->error(values); + + } else if (auto df = std::dynamic_pointer_cast(factor)) { + // If discrete, just add its errorTree as well + result = result + df->errorTree(); + + } else { + throw std::runtime_error( + "HybridNonlinearFactorGraph::errorTree(Values) not implemented for " + "factor type " + + demangle(typeid(factor).name()) + "."); + } + } + + return result; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 5a09f18d45..53920a4aad 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -90,6 +90,19 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { /// Expose error(const HybridValues&) method. using Base::error; + /** + * @brief Compute error of (hybrid) nonlinear factors and discrete factors + * over each discrete assignment, and return as a tree. + * + * Error \f$ e = \Vert f(x) - \mu \Vert_{\Sigma} \f$. + * + * @note: Gaussian and hybrid Gaussian factors are not considered! + * + * @param values Manifold values at which to compute the error. + * @return AlgebraicDecisionTree + */ + AlgebraicDecisionTree errorTree(const Values& values) const; + /// @} }; From 1bb5b9551b6a8ff57c4039c436b15226e34e11f0 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 1 Oct 2024 21:41:58 -0700 Subject: [PATCH 03/18] discretePosterior in HNFG --- gtsam/hybrid/HybridGaussianFactor.cpp | 2 +- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 18 +++++++++++++++--- gtsam/hybrid/HybridNonlinearFactorGraph.h | 17 +++++++++++++++-- 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index d3b26d4ef8..f1e8a8498a 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -233,7 +233,7 @@ static double PotentiallyPrunedComponentError( AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [this, &continuousValues](const auto &pair) { + auto errorFunc = [&continuousValues](const auto &pair) { return PotentiallyPrunedComponentError(pair.first, continuousValues); }; DecisionTree error_tree(factors_, errorFunc); diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 0e7e9c692d..56b75d15ee 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -181,19 +181,19 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( /* ************************************************************************* */ AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( - const Values& values) const { + const Values& continuousValues) const { AlgebraicDecisionTree result(0.0); // Iterate over each factor. for (auto& factor : factors_) { if (auto hnf = std::dynamic_pointer_cast(factor)) { // Compute factor error and add it. - result = result + hnf->errorTree(values); + result = result + hnf->errorTree(continuousValues); } else if (auto nf = std::dynamic_pointer_cast(factor)) { // If continuous only, get the (double) error // and add it to every leaf of the result - result = result + nf->error(values); + result = result + nf->error(continuousValues); } else if (auto df = std::dynamic_pointer_cast(factor)) { // If discrete, just add its errorTree as well @@ -210,4 +210,16 @@ AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( return result; } +/* ************************************************************************ */ +AlgebraicDecisionTree HybridNonlinearFactorGraph::discretePosterior( + const Values& continuousValues) const { + AlgebraicDecisionTree errors = this->errorTree(continuousValues); + AlgebraicDecisionTree p = errors.apply([](double error) { + // NOTE: The 0.5 term is handled by each factor + return exp(-error); + }); + return p / p.sum(); +} + +/* ************************************************************************ */ } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index 53920a4aad..dd18cfa601 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -98,10 +98,23 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * * @note: Gaussian and hybrid Gaussian factors are not considered! * - * @param values Manifold values at which to compute the error. + * @param continuousValues Manifold values at which to compute the error. * @return AlgebraicDecisionTree */ - AlgebraicDecisionTree errorTree(const Values& values) const; + AlgebraicDecisionTree errorTree(const Values& continuousValues) const; + + /** + * @brief Computer posterior P(M|X=x) when all continuous values X are given. + * This is efficient as this simply takes -exp(.) of errorTree and normalizes. + * + * @note Not a DiscreteConditional as the cardinalities of the DiscreteKeys, + * which we would need, are hard to recover. + * + * @param continuousValues Continuous values x to condition on. + * @return DecisionTreeFactor + */ + AlgebraicDecisionTree discretePosterior( + const Values& continuousValues) const; /// @} }; From 8b3dfd85e70099f43d18260c72020116b7495e64 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Wed, 2 Oct 2024 08:39:18 -0700 Subject: [PATCH 04/18] New product factor class --- gtsam/hybrid/HybridGaussianFactor.cpp | 10 +- gtsam/hybrid/HybridGaussianFactor.h | 12 +- gtsam/hybrid/HybridGaussianProductFactor.cpp | 89 +++++++++ gtsam/hybrid/HybridGaussianProductFactor.h | 117 +++++++++++ .../tests/testHybridGaussianProductFactor.cpp | 185 ++++++++++++++++++ 5 files changed, 410 insertions(+), 3 deletions(-) create mode 100644 gtsam/hybrid/HybridGaussianProductFactor.cpp create mode 100644 gtsam/hybrid/HybridGaussianProductFactor.h create mode 100644 gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index f1e8a8498a..aa88ded30b 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -18,17 +18,17 @@ * @date Mar 12, 2022 */ +#include #include #include #include #include #include +#include #include #include #include -#include "gtsam/base/types.h" - namespace gtsam { /* *******************************************************************************/ @@ -215,6 +215,12 @@ GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() return {factors_, wrap}; } +/* *******************************************************************************/ +HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { + return {{factors_, + [](const auto &pair) { return GaussianFactorGraph{pair.first}; }}}; +} + /* *******************************************************************************/ /// Helper method to compute the error of a component. static double PotentiallyPrunedComponentError( diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 15993f5823..d160798b6b 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -24,6 +24,7 @@ #include #include #include +#include #include #include @@ -164,6 +165,14 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { sum = factor.add(sum); return sum; } + + /** + * @brief Helper function to return factors and functional to create a + * DecisionTree of Gaussian Factor Graphs. + * + * @return HybridGaussianProductFactor + */ + virtual HybridGaussianProductFactor asProductFactor() const; /// @} protected: @@ -175,7 +184,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { */ GaussianFactorGraphTree asGaussianFactorGraphTree() const; - private: + +private: /** * @brief Helper function to augment the [A|b] matrices in the factor * components with the additional scalar values. This is done by storing the diff --git a/gtsam/hybrid/HybridGaussianProductFactor.cpp b/gtsam/hybrid/HybridGaussianProductFactor.cpp new file mode 100644 index 0000000000..c9b4c07dd6 --- /dev/null +++ b/gtsam/hybrid/HybridGaussianProductFactor.cpp @@ -0,0 +1,89 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridGaussianProductFactor.h + * @date Oct 2, 2024 + * @author Frank Dellaert + * @author Varun Agrawal + */ + +#include +#include +#include +#include +#include + +namespace gtsam { + +static GaussianFactorGraph add(const GaussianFactorGraph &graph1, + const GaussianFactorGraph &graph2) { + auto result = graph1; + result.push_back(graph2); + return result; +}; + +HybridGaussianProductFactor operator+(const HybridGaussianProductFactor &a, + const HybridGaussianProductFactor &b) { + return a.empty() ? b : HybridGaussianProductFactor(a.apply(b, add)); +} + +HybridGaussianProductFactor HybridGaussianProductFactor::operator+( + const HybridGaussianFactor &factor) const { + return *this + factor.asProductFactor(); +} + +HybridGaussianProductFactor HybridGaussianProductFactor::operator+( + const GaussianFactor::shared_ptr &factor) const { + return *this + HybridGaussianProductFactor(factor); +} + +HybridGaussianProductFactor &HybridGaussianProductFactor::operator+=( + const GaussianFactor::shared_ptr &factor) { + *this = *this + factor; + return *this; +} + +HybridGaussianProductFactor & +HybridGaussianProductFactor::operator+=(const HybridGaussianFactor &factor) { + *this = *this + factor; + return *this; +} + +void HybridGaussianProductFactor::print(const std::string &s, + const KeyFormatter &formatter) const { + KeySet keys; + auto printer = [&](const Y &graph) { + if (keys.size() == 0) + keys = graph.keys(); + return "Graph of size " + std::to_string(graph.size()); + }; + Base::print(s, formatter, printer); + if (keys.size() > 0) { + std::stringstream ss; + ss << s << " Keys:"; + for (auto &&key : keys) + ss << " " << formatter(key); + std::cout << ss.str() << "." << std::endl; + } +} + +HybridGaussianProductFactor HybridGaussianProductFactor::removeEmpty() const { + auto emptyGaussian = [](const GaussianFactorGraph &graph) { + bool hasNull = + std::any_of(graph.begin(), graph.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + return hasNull ? GaussianFactorGraph() : graph; + }; + return {Base(*this, emptyGaussian)}; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianProductFactor.h b/gtsam/hybrid/HybridGaussianProductFactor.h new file mode 100644 index 0000000000..f1bd8bc3c5 --- /dev/null +++ b/gtsam/hybrid/HybridGaussianProductFactor.h @@ -0,0 +1,117 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridGaussianProductFactor.h + * @date Oct 2, 2024 + * @author Frank Dellaert + * @author Varun Agrawal + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + +class HybridGaussianFactor; + +/// Alias for DecisionTree of GaussianFactorGraphs +class HybridGaussianProductFactor : public DecisionTree { + public: + using Y = GaussianFactorGraph; + using Base = DecisionTree; + + /// @name Constructors + /// @{ + + /// Default constructor + HybridGaussianProductFactor() = default; + + /** + * @brief Construct from a single factor + * @tparam FACTOR Factor type + * @param factor Shared pointer to the factor + */ + template + HybridGaussianProductFactor(const std::shared_ptr& factor) : Base(Y{factor}) {} + + /** + * @brief Construct from DecisionTree + * @param tree Decision tree to construct from + */ + HybridGaussianProductFactor(Base&& tree) : Base(std::move(tree)) {} + + ///@} + + /// @name Operators + ///@{ + + /// Add GaussianFactor into HybridGaussianProductFactor + HybridGaussianProductFactor operator+(const GaussianFactor::shared_ptr& factor) const; + + /// Add HybridGaussianFactor into HybridGaussianProductFactor + HybridGaussianProductFactor operator+(const HybridGaussianFactor& factor) const; + + /// Add-assign operator for GaussianFactor + HybridGaussianProductFactor& operator+=(const GaussianFactor::shared_ptr& factor); + + /// Add-assign operator for HybridGaussianFactor + HybridGaussianProductFactor& operator+=(const HybridGaussianFactor& factor); + + ///@} + + /// @name Testable + /// @{ + + /** + * @brief Print the HybridGaussianProductFactor + * @param s Optional string to prepend + * @param formatter Optional key formatter + */ + void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const; + + /** + * @brief Check if this HybridGaussianProductFactor is equal to another + * @param other The other HybridGaussianProductFactor to compare with + * @param tol Tolerance for floating point comparisons + * @return true if equal, false otherwise + */ + bool equals(const HybridGaussianProductFactor& other, double tol = 1e-9) const { + return Base::equals(other, [tol](const Y& a, const Y& b) { return a.equals(b, tol); }); + } + + /// @} + + /// @name Other methods + ///@{ + + /** + * @brief Remove empty GaussianFactorGraphs from the decision tree + * @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs removed + * + * If any GaussianFactorGraph in the decision tree contains a nullptr, convert + * that leaf to an empty GaussianFactorGraph. This is needed because the DecisionTree + * will otherwise create a GaussianFactorGraph with a single (null) factor, + * which doesn't register as null. + */ + HybridGaussianProductFactor removeEmpty() const; + + ///@} +}; + +// Testable traits +template <> +struct traits : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp new file mode 100644 index 0000000000..bd830794ae --- /dev/null +++ b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp @@ -0,0 +1,185 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridGaussianProductFactor.cpp + * @brief Unit tests for HybridGaussianProductFactor + * @author Frank Dellaert + * @date October 2024 + */ + +#include "gtsam/inference/Key.h" +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include + +using namespace std; +using namespace gtsam; +using symbol_shorthand::M; +using symbol_shorthand::X; +using symbol_shorthand::Z; + +/* ************************************************************************* */ +namespace examples { +static const DiscreteKey m1(M(1), 2), m2(M(2), 3); + +auto A1 = Matrix::Zero(2, 1); +auto A2 = Matrix::Zero(2, 2); +auto b = Matrix::Zero(2, 1); + +auto f10 = std::make_shared(X(1), A1, X(2), A2, b); +auto f11 = std::make_shared(X(1), A1, X(2), A2, b); + +auto A3 = Matrix::Zero(2, 3); +auto f20 = std::make_shared(X(1), A1, X(3), A3, b); +auto f21 = std::make_shared(X(1), A1, X(3), A3, b); +auto f22 = std::make_shared(X(1), A1, X(3), A3, b); + +HybridGaussianFactor hybridFactorA(m1, {f10, f11}); +HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22}); +// Simulate a pruned hybrid factor, in this case m2==1 is nulled out. +HybridGaussianFactor prunedFactorB(m2, {f20, nullptr, f22}); +} // namespace examples + +/* ************************************************************************* */ +// Constructor +TEST(HybridGaussianProductFactor, Construct) { + HybridGaussianProductFactor product; +} + +/* ************************************************************************* */ +// Add two Gaussian factors and check only one leaf in tree +TEST(HybridGaussianProductFactor, AddTwoGaussianFactors) { + using namespace examples; + + HybridGaussianProductFactor product; + product += f10; + product += f11; + + // Check that the product has only one leaf and no discrete variables. + EXPECT_LONGS_EQUAL(1, product.nrLeaves()); + EXPECT(product.labels().empty()); + + // Retrieve the single leaf + auto leaf = product(Assignment()); + + // Check that the leaf contains both factors + EXPECT_LONGS_EQUAL(2, leaf.size()); + EXPECT(leaf.at(0) == f10); + EXPECT(leaf.at(1) == f11); +} + +/* ************************************************************************* */ +// Add two GaussianConditionals and check the resulting tree +TEST(HybridGaussianProductFactor, AddTwoGaussianConditionals) { + // Create two GaussianConditionals + Vector1 d(1.0); + Matrix11 R = I_1x1, S = I_1x1; + auto gc1 = std::make_shared(X(1), d, R, X(2), S); + auto gc2 = std::make_shared(X(2), d, R); + + // Create a HybridGaussianProductFactor and add the conditionals + HybridGaussianProductFactor product; + product += std::static_pointer_cast(gc1); + product += std::static_pointer_cast(gc2); + + // Check that the product has only one leaf and no discrete variables + EXPECT_LONGS_EQUAL(1, product.nrLeaves()); + EXPECT(product.labels().empty()); + + // Retrieve the single leaf + auto leaf = product(Assignment()); + + // Check that the leaf contains both conditionals + EXPECT_LONGS_EQUAL(2, leaf.size()); + EXPECT(leaf.at(0) == gc1); + EXPECT(leaf.at(1) == gc2); +} + +/* ************************************************************************* */ +// Check AsProductFactor +TEST(HybridGaussianProductFactor, AsProductFactor) { + using namespace examples; + auto product = hybridFactorA.asProductFactor(); + + // Let's check that this worked: + Assignment mode; + mode[m1.first] = 1; + auto actual = product(mode); + EXPECT(actual.at(0) == f11); +} + +/* ************************************************************************* */ +// "Add" one hybrid factors together. +TEST(HybridGaussianProductFactor, AddOne) { + using namespace examples; + HybridGaussianProductFactor product; + product += hybridFactorA; + + // Let's check that this worked: + Assignment mode; + mode[m1.first] = 1; + auto actual = product(mode); + EXPECT(actual.at(0) == f11); +} + +/* ************************************************************************* */ +// "Add" two HFG together. +TEST(HybridGaussianProductFactor, AddTwo) { + using namespace examples; + + // Create product of two hybrid factors: it will be a decision tree now on + // both discrete variables m1 and m2: + HybridGaussianProductFactor product; + product += hybridFactorA; + product += hybridFactorB; + + // Let's check that this worked: + auto actual00 = product({{M(1), 0}, {M(2), 0}}); + EXPECT(actual00.at(0) == f10); + EXPECT(actual00.at(1) == f20); + + auto actual12 = product({{M(1), 1}, {M(2), 2}}); + EXPECT(actual12.at(0) == f11); + EXPECT(actual12.at(1) == f22); +} + +/* ************************************************************************* */ +// "Add" two HFG together. +TEST(HybridGaussianProductFactor, AddPruned) { + using namespace examples; + + // Create product of two hybrid factors: it will be a decision tree now on + // both discrete variables m1 and m2: + HybridGaussianProductFactor product; + product += hybridFactorA; + product += prunedFactorB; + EXPECT_LONGS_EQUAL(6, product.nrLeaves()); + + auto pruned = product.removeEmpty(); + EXPECT_LONGS_EQUAL(5, pruned.nrLeaves()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ From ed9a216365a1087f94bd27882cf0ebdea324c563 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 5 Oct 2024 07:18:19 +0900 Subject: [PATCH 05/18] Switch to using HybridGaussianProductFactor --- gtsam/hybrid/HybridFactor.h | 3 - gtsam/hybrid/HybridGaussianConditional.cpp | 7 ++- gtsam/hybrid/HybridGaussianConditional.h | 7 ++- gtsam/hybrid/HybridGaussianFactor.cpp | 20 ------ gtsam/hybrid/HybridGaussianFactor.h | 28 --------- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 62 +++++-------------- gtsam/hybrid/HybridGaussianFactorGraph.h | 2 +- .../hybrid/tests/testHybridGaussianFactor.cpp | 35 +++-------- .../tests/testHybridGaussianFactorGraph.cpp | 11 ++-- 9 files changed, 41 insertions(+), 134 deletions(-) diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index fc91e08389..8d6fef3f43 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -32,9 +32,6 @@ namespace gtsam { class HybridValues; -/// Alias for DecisionTree of GaussianFactorGraphs -using GaussianFactorGraphTree = DecisionTree; - KeyVector CollectKeys(const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys); KeyVector CollectKeys(const KeyVector &keys1, const KeyVector &keys2); diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 2c0fb28a40..e69b966f8d 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -23,6 +23,7 @@ #include #include #include +#include #include #include #include @@ -136,9 +137,9 @@ HybridGaussianConditional::conditionals() const { } /* *******************************************************************************/ -GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() +HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() const { - auto wrap = [this](const GaussianConditional::shared_ptr &gc) { + auto wrap = [this](const std::shared_ptr &gc) { // First check if conditional has not been pruned if (gc) { const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_; @@ -155,7 +156,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() } return GaussianFactorGraph{gc}; }; - return {conditionals_, wrap}; + return {{conditionals_, wrap}}; } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 27e31d7670..3e6574abc4 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -24,6 +24,7 @@ #include #include #include +#include #include #include #include @@ -221,6 +222,9 @@ class GTSAM_EXPORT HybridGaussianConditional HybridGaussianConditional::shared_ptr prune( const DecisionTreeFactor &discreteProbs) const; + /// Convert to a DecisionTree of Gaussian factor graphs. + HybridGaussianProductFactor asProductFactor() const override; + /// @} private: @@ -231,9 +235,6 @@ class GTSAM_EXPORT HybridGaussianConditional HybridGaussianConditional(const DiscreteKeys &discreteParents, const Helper &helper); - /// Convert to a DecisionTree of Gaussian factor graphs. - GaussianFactorGraphTree asGaussianFactorGraphTree() const; - /// Check whether `given` has values for all frontal keys. bool allFrontalsGiven(const VectorValues &given) const; diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index aa88ded30b..cc72dc0ec0 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -195,26 +195,6 @@ HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( return factors_(assignment).first; } -/* *******************************************************************************/ -GaussianFactorGraphTree HybridGaussianFactor::add( - const GaussianFactorGraphTree &sum) const { - using Y = GaussianFactorGraph; - auto add = [](const Y &graph1, const Y &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; - }; - const auto tree = asGaussianFactorGraphTree(); - return sum.empty() ? tree : sum.apply(tree, add); -} - -/* *******************************************************************************/ -GaussianFactorGraphTree HybridGaussianFactor::asGaussianFactorGraphTree() - const { - auto wrap = [](const auto &pair) { return GaussianFactorGraph{pair.first}; }; - return {factors_, wrap}; -} - /* *******************************************************************************/ HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { return {{factors_, diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index d160798b6b..64a9e19187 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -130,16 +130,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Get factor at a given discrete assignment. sharedFactor operator()(const DiscreteValues &assignment) const; - /** - * @brief Combine the Gaussian Factor Graphs in `sum` and `this` while - * maintaining the original tree structure. - * - * @param sum Decision Tree of Gaussian Factor Graphs indexed by the - * variables. - * @return Sum - */ - GaussianFactorGraphTree add(const GaussianFactorGraphTree &sum) const; - /** * @brief Compute error of the HybridGaussianFactor as a tree. * @@ -158,14 +148,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Getter for GaussianFactor decision tree const FactorValuePairs &factors() const { return factors_; } - - /// Add HybridNonlinearFactor to a Sum, syntactic sugar. - friend GaussianFactorGraphTree &operator+=( - GaussianFactorGraphTree &sum, const HybridGaussianFactor &factor) { - sum = factor.add(sum); - return sum; - } - /** * @brief Helper function to return factors and functional to create a * DecisionTree of Gaussian Factor Graphs. @@ -175,16 +157,6 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { virtual HybridGaussianProductFactor asProductFactor() const; /// @} - protected: - /** - * @brief Helper function to return factors and functional to create a - * DecisionTree of Gaussian Factor Graphs. - * - * @return GaussianFactorGraphTree - */ - GaussianFactorGraphTree asGaussianFactorGraphTree() const; - - private: /** * @brief Helper function to augment the [A|b] matrices in the factor diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 957a850387..b58400aa41 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -127,43 +127,28 @@ void HybridGaussianFactorGraph::printErrors( std::cout.flush(); } -/* ************************************************************************ */ -static GaussianFactorGraphTree addGaussian( - const GaussianFactorGraphTree &gfgTree, - const GaussianFactor::shared_ptr &factor) { - // If the decision tree is not initialized, then initialize it. - if (gfgTree.empty()) { - GaussianFactorGraph result{factor}; - return GaussianFactorGraphTree(result); - } else { - auto add = [&factor](const GaussianFactorGraph &graph) { - auto result = graph; - result.push_back(factor); - return result; - }; - return gfgTree.apply(add); - } -} - /* ************************************************************************ */ // TODO(dellaert): it's probably more efficient to first collect the discrete // keys, and then loop over all assignments to populate a vector. -GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { - GaussianFactorGraphTree result; +HybridGaussianProductFactor +HybridGaussianFactorGraph::collectProductFactor() const { + HybridGaussianProductFactor result; for (auto &f : factors_) { - // TODO(dellaert): just use a virtual method defined in HybridFactor. + // TODO(dellaert): can we make this cleaner and less error-prone? if (auto gf = dynamic_pointer_cast(f)) { - result = addGaussian(result, gf); + result += gf; + } else if (auto gc = dynamic_pointer_cast(f)) { + result += gc; } else if (auto gmf = dynamic_pointer_cast(f)) { - result = gmf->add(result); + result += *gmf; } else if (auto gm = dynamic_pointer_cast(f)) { - result = gm->add(result); + result += *gm; // handled above already? } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gm = hc->asHybrid()) { - result = gm->add(result); + result += *gm; } else if (auto g = hc->asGaussian()) { - result = addGaussian(result, g); + result += g; } else { // Has to be discrete. // TODO(dellaert): in C++20, we can use std::visit. @@ -176,7 +161,7 @@ GaussianFactorGraphTree HybridGaussianFactorGraph::assembleGraphTree() const { } else { // TODO(dellaert): there was an unattributed comment here: We need to // handle the case where the object is actually an BayesTreeOrphanWrapper! - throwRuntimeError("gtsam::assembleGraphTree", f); + throwRuntimeError("gtsam::collectProductFactor", f); } } @@ -269,21 +254,6 @@ discreteElimination(const HybridGaussianFactorGraph &factors, return {std::make_shared(result.first), result.second}; } -/* ************************************************************************ */ -// If any GaussianFactorGraph in the decision tree contains a nullptr, convert -// that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will -// otherwise create a GFG with a single (null) factor, -// which doesn't register as null. -GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { - auto emptyGaussian = [](const GaussianFactorGraph &graph) { - bool hasNull = - std::any_of(graph.begin(), graph.end(), - [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); - return hasNull ? GaussianFactorGraph() : graph; - }; - return GaussianFactorGraphTree(sum, emptyGaussian); -} - /* ************************************************************************ */ using Result = std::pair, HybridGaussianFactor::sharedFactor>; @@ -334,7 +304,7 @@ static std::shared_ptr createHybridGaussianFactor( // and negative since we want log(k) hf->constantTerm() += -2.0 * conditional->negLogConstant(); } - return {factor, 0.0}; + return {factor, conditional->negLogConstant()}; }; DecisionTree newFactors(eliminationResults, correct); @@ -359,12 +329,12 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { // Collect all the factors to create a set of Gaussian factor graphs in a // decision tree indexed by all discrete keys involved. - GaussianFactorGraphTree factorGraphTree = assembleGraphTree(); + HybridGaussianProductFactor productFactor = collectProductFactor(); // Convert factor graphs with a nullptr to an empty factor graph. // This is done after assembly since it is non-trivial to keep track of which // FG has a nullptr as we're looping over the factors. - factorGraphTree = removeEmpty(factorGraphTree); + auto prunedProductFactor = productFactor.removeEmpty(); // This is the elimination method on the leaf nodes bool someContinuousLeft = false; @@ -383,7 +353,7 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { }; // Perform elimination! - DecisionTree eliminationResults(factorGraphTree, eliminate); + DecisionTree eliminationResults(prunedProductFactor, eliminate); // If there are no more continuous parents we create a DiscreteFactor with the // error for each discrete choice. Otherwise, create a HybridGaussianFactor diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index a5130ca086..ad6d6337d4 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -218,7 +218,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * one for A and one for B. The leaves of the tree will be the Gaussian * factors that have only continuous keys. */ - GaussianFactorGraphTree assembleGraphTree() const; + HybridGaussianProductFactor collectProductFactor() const; /** * @brief Eliminate the given continuous keys. diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 5ff8c14781..1c9476eebb 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -82,40 +82,25 @@ TEST(HybridGaussianFactor, ConstructorVariants) { } /* ************************************************************************* */ -// "Add" two hybrid factors together. -TEST(HybridGaussianFactor, Sum) { +TEST(HybridGaussianFactor, Keys) { using namespace test_constructor; - DiscreteKey m2(2, 3); + HybridGaussianFactor hybridFactorA(m1, {f10, f11}); + // Check the number of keys matches what we expect + EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); + EXPECT_LONGS_EQUAL(2, hybridFactorA.continuousKeys().size()); + EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size()); + DiscreteKey m2(2, 3); auto A3 = Matrix::Zero(2, 3); auto f20 = std::make_shared(X(1), A1, X(3), A3, b); auto f21 = std::make_shared(X(1), A1, X(3), A3, b); auto f22 = std::make_shared(X(1), A1, X(3), A3, b); - - // TODO(Frank): why specify keys at all? And: keys in factor should be *all* - // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey? - // Design review! - HybridGaussianFactor hybridFactorA(m1, {f10, f11}); HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22}); // Check the number of keys matches what we expect - EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); - EXPECT_LONGS_EQUAL(2, hybridFactorA.continuousKeys().size()); - EXPECT_LONGS_EQUAL(1, hybridFactorA.discreteKeys().size()); - - // Create sum of two hybrid factors: it will be a decision tree now on both - // discrete variables m1 and m2: - GaussianFactorGraphTree sum; - sum += hybridFactorA; - sum += hybridFactorB; - - // Let's check that this worked: - Assignment mode; - mode[m1.first] = 1; - mode[m2.first] = 2; - auto actual = sum(mode); - EXPECT(actual.at(0) == f11); - EXPECT(actual.at(1) == f22); + EXPECT_LONGS_EQUAL(3, hybridFactorB.keys().size()); + EXPECT_LONGS_EQUAL(2, hybridFactorB.continuousKeys().size()); + EXPECT_LONGS_EQUAL(1, hybridFactorB.discreteKeys().size()); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index f30085f020..4c88fa8cd0 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -29,6 +29,7 @@ #include #include #include +#include #include #include #include @@ -674,14 +675,14 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { /* ****************************************************************************/ // Check that assembleGraphTree assembles Gaussian factor graphs for each // assignment. -TEST(HybridGaussianFactorGraph, assembleGraphTree) { +TEST(HybridGaussianFactorGraph, collectProductFactor) { const int num_measurements = 1; auto fg = tiny::createHybridGaussianFactorGraph( num_measurements, VectorValues{{Z(0), Vector1(5.0)}}); EXPECT_LONGS_EQUAL(3, fg.size()); // Assemble graph tree: - auto actual = fg.assembleGraphTree(); + auto actual = fg.collectProductFactor(); // Create expected decision tree with two factor graphs: @@ -701,9 +702,9 @@ TEST(HybridGaussianFactorGraph, assembleGraphTree) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) - GaussianFactorGraphTree expected{ - M(0), GaussianFactorGraph(std::vector{(*hybrid)(d0), prior}), - GaussianFactorGraph(std::vector{(*hybrid)(d1), prior})}; + HybridGaussianProductFactor expected{ + {M(0), GaussianFactorGraph(std::vector{(*hybrid)(d0), prior}), + GaussianFactorGraph(std::vector{(*hybrid)(d1), prior})}}; EXPECT(assert_equal(expected(d0), actual(d0), 1e-5)); EXPECT(assert_equal(expected(d1), actual(d1), 1e-5)); From 55ca557b1ebdd74e6ec30f99b3acebca7c11c4e2 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sat, 5 Oct 2024 19:00:39 +0900 Subject: [PATCH 06/18] Fix conditional==null bug --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 39 ++++++++++++++-------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b58400aa41..a46493a327 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -40,7 +40,6 @@ #include #include -#include #include #include #include @@ -136,7 +135,9 @@ HybridGaussianFactorGraph::collectProductFactor() const { for (auto &f : factors_) { // TODO(dellaert): can we make this cleaner and less error-prone? - if (auto gf = dynamic_pointer_cast(f)) { + if (auto orphan = dynamic_pointer_cast(f)) { + continue; // Ignore OrphanWrapper + } else if (auto gf = dynamic_pointer_cast(f)) { result += gf; } else if (auto gc = dynamic_pointer_cast(f)) { result += gc; @@ -269,15 +270,20 @@ static std::shared_ptr createDiscreteFactor( const DiscreteKeys &discreteSeparator) { auto negLogProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; - static const VectorValues kEmpty; - // If the factor is not null, it has no keys, just contains the residual. - if (!factor) return 1.0; // TODO(dellaert): not loving this. - - // Negative logspace version of: - // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); - // negLogConstant gives `-log(k)` - // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. - return factor->error(kEmpty) - conditional->negLogConstant(); + if (conditional && factor) { + static const VectorValues kEmpty; + // If the factor is not null, it has no keys, just contains the residual. + + // Negative-log-space version of: + // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); + // negLogConstant gives `-log(k)` + // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. + return factor->error(kEmpty) - conditional->negLogConstant(); + } else if (!conditional && !factor) { + return 1.0; // TODO(dellaert): not loving this, what should this be?? + } else { + throw std::runtime_error("createDiscreteFactor has mixed NULLs"); + } }; AlgebraicDecisionTree negLogProbabilities( @@ -296,15 +302,20 @@ static std::shared_ptr createHybridGaussianFactor( // Correct for the normalization constant used up by the conditional auto correct = [&](const Result &pair) -> GaussianFactorValuePair { const auto &[conditional, factor] = pair; - if (factor) { + if (conditional && factor) { auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); // Add 2.0 term since the constant term will be premultiplied by 0.5 // as per the Hessian definition, // and negative since we want log(k) - hf->constantTerm() += -2.0 * conditional->negLogConstant(); + const double negLogK = conditional->negLogConstant(); + hf->constantTerm() += -2.0 * negLogK; + return {factor, negLogK}; + } else if (!conditional && !factor){ + return {nullptr, 0.0}; // TODO(frank): or should this be infinity? + } else { + throw std::runtime_error("createHybridGaussianFactors has mixed NULLs"); } - return {factor, conditional->negLogConstant()}; }; DecisionTree newFactors(eliminationResults, correct); From 92540298e1068b7f0fa2a5694629a8da4cd27bd5 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 6 Oct 2024 15:12:03 +0900 Subject: [PATCH 07/18] Squashed commit Revised asProductFactor methods collectProductFactor() method Move test better print Formatting Efficiency Fix several bugs Fix print methods Fix print methods More tests, BT tests in different file More product tests Disable printing tests Minimize diff Fix rebase issue --- gtsam/hybrid/HybridGaussianConditional.cpp | 3 - gtsam/hybrid/HybridGaussianFactor.cpp | 46 +- gtsam/hybrid/HybridGaussianFactor.h | 20 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 59 ++- gtsam/hybrid/HybridGaussianFactorGraph.h | 7 +- gtsam/hybrid/HybridNonlinearFactor.cpp | 4 +- gtsam/hybrid/tests/testHybridBayesTree.cpp | 323 +++++++++++++- .../hybrid/tests/testHybridGaussianFactor.cpp | 4 +- .../tests/testHybridGaussianFactorGraph.cpp | 393 +++--------------- .../tests/testHybridGaussianProductFactor.cpp | 1 - .../tests/testHybridNonlinearFactorGraph.cpp | 70 ++-- 11 files changed, 509 insertions(+), 421 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index e69b966f8d..a42485f109 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -203,9 +203,6 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf, void HybridGaussianConditional::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); - if (isContinuous()) std::cout << "Continuous "; - if (isDiscrete()) std::cout << "Discrete "; - if (isHybrid()) std::cout << "Hybrid "; BaseConditional::print("", formatter); std::cout << " Discrete Keys = "; for (auto &dk : discreteKeys()) { diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index cc72dc0ec0..acdd9ef89c 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -32,8 +32,8 @@ namespace gtsam { /* *******************************************************************************/ -HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment( - const FactorValuePairs &factors) { +HybridGaussianFactor::FactorValuePairs +HybridGaussianFactor::augment(const FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. DecisionTree gaussianFactors; @@ -48,12 +48,14 @@ HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment( auto [gf, value] = gfv; auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return {gf, 0.0}; // should this be zero or infinite? + if (!jf) + return {gf, 0.0}; // should this be zero or infinite? double normalized_value = value - min_value; // If the value is 0, do nothing - if (normalized_value == 0.0) return {gf, 0.0}; + if (normalized_value == 0.0) + return {gf, 0.0}; GaussianFactorGraph gfg; gfg.push_back(jf); @@ -71,9 +73,9 @@ HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment( /* *******************************************************************************/ struct HybridGaussianFactor::ConstructorHelper { - KeyVector continuousKeys; // Continuous keys extracted from factors - DiscreteKeys discreteKeys; // Discrete keys provided to the constructors - FactorValuePairs pairs; // The decision tree with factors and scalars + KeyVector continuousKeys; // Continuous keys extracted from factors + DiscreteKeys discreteKeys; // Discrete keys provided to the constructors + FactorValuePairs pairs; // The decision tree with factors and scalars ConstructorHelper(const DiscreteKey &discreteKey, const std::vector &factors) @@ -88,7 +90,9 @@ struct HybridGaussianFactor::ConstructorHelper { // Build the FactorValuePairs DecisionTree pairs = FactorValuePairs( DecisionTree(discreteKeys, factors), - [](const auto &f) { return std::pair{f, 0.0}; }); + [](const auto &f) { + return std::pair{f, 0.0}; + }); } ConstructorHelper(const DiscreteKey &discreteKey, @@ -147,15 +151,17 @@ HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, /* *******************************************************************************/ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); - if (e == nullptr) return false; + if (e == nullptr) + return false; // This will return false if either factors_ is empty or e->factors_ is // empty, but not if both are empty or both are not empty: - if (factors_.empty() ^ e->factors_.empty()) return false; + if (factors_.empty() ^ e->factors_.empty()) + return false; // Check the base and the factors: auto compareFunc = [tol](const auto &pair1, const auto &pair2) { - auto f1 = pair1.first, f2 = pair2.first; + auto f1 = pair1.first, f2 = pair2.first; bool match = (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); return match && gtsam::equal(pair1.second, pair2.second, tol); }; @@ -166,7 +172,6 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { void HybridGaussianFactor::print(const std::string &s, const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); - std::cout << "HybridGaussianFactor" << std::endl; HybridFactor::print("", formatter); std::cout << "{\n"; if (factors_.empty()) { @@ -179,19 +184,19 @@ void HybridGaussianFactor::print(const std::string &s, std::cout << ":\n"; if (pair.first) { pair.first->print("", formatter); + std::cout << "scalar: " << pair.second << "\n"; return rd.str(); } else { return "nullptr"; } - std::cout << "scalar: " << pair.second << "\n"; }); } std::cout << "}" << std::endl; } /* *******************************************************************************/ -HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( - const DiscreteValues &assignment) const { +HybridGaussianFactor::sharedFactor +HybridGaussianFactor::operator()(const DiscreteValues &assignment) const { return factors_(assignment).first; } @@ -203,8 +208,9 @@ HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { /* *******************************************************************************/ /// Helper method to compute the error of a component. -static double PotentiallyPrunedComponentError( - const GaussianFactor::shared_ptr &gf, const VectorValues &values) { +static double +PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr &gf, + const VectorValues &values) { // Check if valid pointer if (gf) { return gf->error(values); @@ -216,8 +222,8 @@ static double PotentiallyPrunedComponentError( } /* *******************************************************************************/ -AlgebraicDecisionTree HybridGaussianFactor::errorTree( - const VectorValues &continuousValues) const { +AlgebraicDecisionTree +HybridGaussianFactor::errorTree(const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. auto errorFunc = [&continuousValues](const auto &pair) { return PotentiallyPrunedComponentError(pair.first, continuousValues); @@ -233,4 +239,4 @@ double HybridGaussianFactor::error(const HybridValues &values) const { return PotentiallyPrunedComponentError(pair.first, values.continuous()); } -} // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 64a9e19187..bb4d3b368f 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -58,7 +58,7 @@ using GaussianFactorValuePair = std::pair; * @ingroup hybrid */ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { - public: +public: using Base = HybridFactor; using This = HybridGaussianFactor; using shared_ptr = std::shared_ptr; @@ -68,11 +68,11 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// typedef for Decision Tree of Gaussian factors and arbitrary value. using FactorValuePairs = DecisionTree; - private: +private: /// Decision tree of Gaussian factors indexed by discrete keys. FactorValuePairs factors_; - public: +public: /// @name Constructors /// @{ @@ -120,8 +120,9 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - void print(const std::string &s = "", const KeyFormatter &formatter = - DefaultKeyFormatter) const override; + void + print(const std::string &s = "", + const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} /// @name Standard API @@ -137,8 +138,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * @return AlgebraicDecisionTree A decision tree with the same keys * as the factors involved, and leaf values as the error. */ - AlgebraicDecisionTree errorTree( - const VectorValues &continuousValues) const override; + AlgebraicDecisionTree + errorTree(const VectorValues &continuousValues) const override; /** * @brief Compute the log-likelihood, including the log-normalizing constant. @@ -148,13 +149,14 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// Getter for GaussianFactor decision tree const FactorValuePairs &factors() const { return factors_; } - /** + /** * @brief Helper function to return factors and functional to create a * DecisionTree of Gaussian Factor Graphs. * * @return HybridGaussianProductFactor */ virtual HybridGaussianProductFactor asProductFactor() const; + /// @} private: @@ -189,4 +191,4 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; -} // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a46493a327..5d2b4c2037 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -18,6 +18,7 @@ * @date Mar 11, 2022 */ +#include "gtsam/discrete/DiscreteValues.h" #include #include #include @@ -78,10 +79,14 @@ static void printFactor(const std::shared_ptr &factor, const DiscreteValues &assignment, const KeyFormatter &keyFormatter) { if (auto hgf = std::dynamic_pointer_cast(factor)) { - hgf->operator()(assignment) - ->print("HybridGaussianFactor, component:", keyFormatter); + if (assignment.empty()) + hgf->print("HybridGaussianFactor:", keyFormatter); + else + hgf->operator()(assignment) + ->print("HybridGaussianFactor, component:", keyFormatter); } else if (auto gf = std::dynamic_pointer_cast(factor)) { factor->print("GaussianFactor:\n", keyFormatter); + } else if (auto df = std::dynamic_pointer_cast(factor)) { factor->print("DiscreteFactor:\n", keyFormatter); } else if (auto hc = std::dynamic_pointer_cast(factor)) { @@ -90,15 +95,38 @@ static void printFactor(const std::shared_ptr &factor, } else if (hc->isDiscrete()) { factor->print("DiscreteConditional:\n", keyFormatter); } else { - hc->asHybrid() - ->choose(assignment) - ->print("HybridConditional, component:\n", keyFormatter); + if (assignment.empty()) + hc->print("HybridConditional:", keyFormatter); + else + hc->asHybrid() + ->choose(assignment) + ->print("HybridConditional, component:\n", keyFormatter); } } else { factor->print("Unknown factor type\n", keyFormatter); } } +/* ************************************************************************ */ +void HybridGaussianFactorGraph::print(const std::string &s, + const KeyFormatter &keyFormatter) const { + std::cout << (s.empty() ? "" : s + " ") << std::endl; + std::cout << "size: " << size() << std::endl; + + for (size_t i = 0; i < factors_.size(); i++) { + auto &&factor = factors_[i]; + if (factor == nullptr) { + std::cout << "Factor " << i << ": nullptr\n"; + continue; + } + // Print the factor + std::cout << "Factor " << i << "\n"; + printFactor(factor, {}, keyFormatter); + std::cout << "\n"; + } + std::cout.flush(); +} + /* ************************************************************************ */ void HybridGaussianFactorGraph::printErrors( const HybridValues &values, const std::string &str, @@ -106,7 +134,7 @@ void HybridGaussianFactorGraph::printErrors( const std::function &printCondition) const { - std::cout << str << "size: " << size() << std::endl << std::endl; + std::cout << str << " size: " << size() << std::endl << std::endl; for (size_t i = 0; i < factors_.size(); i++) { auto &&factor = factors_[i]; @@ -267,7 +295,7 @@ using Result = std::pair, */ static std::shared_ptr createDiscreteFactor( const DecisionTree &eliminationResults, - const DiscreteKeys &discreteSeparator) { + const DiscreteKeys &discreteSeparator) { auto negLogProbability = [&](const Result &pair) -> double { const auto &[conditional, factor] = pair; if (conditional && factor) { @@ -298,7 +326,7 @@ static std::shared_ptr createDiscreteFactor( // for conditional constants. static std::shared_ptr createHybridGaussianFactor( const DecisionTree &eliminationResults, - const DiscreteKeys &discreteSeparator) { + const DiscreteKeys &discreteSeparator) { // Correct for the normalization constant used up by the conditional auto correct = [&](const Result &pair) -> GaussianFactorValuePair { const auto &[conditional, factor] = pair; @@ -459,6 +487,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, } else if (hybrid_factor->isHybrid()) { only_continuous = false; only_discrete = false; + break; } } else if (auto cont_factor = std::dynamic_pointer_cast(factor)) { @@ -495,10 +524,11 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( } else if (auto df = std::dynamic_pointer_cast(factor)) { // If discrete, just add its errorTree as well result = result + df->errorTree(); + } else if (auto gf = std::dynamic_pointer_cast(factor)) { + // For a continuous only factor, just add its error + result = result + gf->error(continuousValues); } else { - // Everything else is a continuous only factor - HybridValues hv(continuousValues, DiscreteValues()); - result = result + factor->error(hv); // NOTE: yes, you can add constants + throwRuntimeError("HybridGaussianFactorGraph::errorTree", factor); } } return result; @@ -533,8 +563,13 @@ GaussianFactorGraph HybridGaussianFactorGraph::choose( gfg.push_back(gf); } else if (auto hgf = std::dynamic_pointer_cast(f)) { gfg.push_back((*hgf)(assignment)); - } else if (auto hgc = dynamic_pointer_cast(f)) { + } else if (auto hgc = std::dynamic_pointer_cast(f)) { gfg.push_back((*hgc)(assignment)); + } else if (auto hc = std::dynamic_pointer_cast(f)) { + if (auto gc = hc->asGaussian()) + gfg.push_back(gc); + else if (auto hgc = hc->asHybrid()) + gfg.push_back((*hgc)(assignment)); } else { continue; } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index ad6d6337d4..d72e814892 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -145,10 +145,9 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @name Testable /// @{ - // TODO(dellaert): customize print and equals. - // void print( - // const std::string& s = "HybridGaussianFactorGraph", - // const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; + void + print(const std::string &s = "HybridGaussianFactorGraph", + const KeyFormatter &keyFormatter = DefaultKeyFormatter) const override; /** * @brief Print the errors of each factor in the hybrid factor graph. diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp index 56711b313e..9378d07fe2 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.cpp +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -196,8 +196,8 @@ std::shared_ptr HybridNonlinearFactor::linearize( } }; - HybridGaussianFactor::FactorValuePairs linearized_factors(factors_, - linearizeDT); + DecisionTree> + linearized_factors(factors_, linearizeDT); return std::make_shared(discreteKeys_, linearized_factors); diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 81b257c32e..f31846cb3a 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -20,6 +20,7 @@ #include #include #include +#include #include "Switching.h" @@ -28,9 +29,319 @@ using namespace std; using namespace gtsam; -using noiseModel::Isotropic; +using symbol_shorthand::D; using symbol_shorthand::M; using symbol_shorthand::X; +using symbol_shorthand::Y; + +static const DiscreteKey m0(M(0), 2), m1(M(1), 2), m2(M(2), 2), m3(M(3), 2); + +/* ************************************************************************* */ +TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { + // Test multifrontal elimination + HybridGaussianFactorGraph hfg; + + // Add priors on x0 and c1 + hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + hfg.add(DecisionTreeFactor(m1, {2, 8})); + + Ordering ordering; + ordering.push_back(X(0)); + auto result = hfg.eliminatePartialMultifrontal(ordering); + + EXPECT_LONGS_EQUAL(result.first->size(), 1); + EXPECT_LONGS_EQUAL(result.second->size(), 1); +} + +/* ************************************************************************* */ +namespace two { +std::vector components(Key key) { + return {std::make_shared(key, I_3x3, Z_3x1), + std::make_shared(key, I_3x3, Vector3::Ones())}; +} +} // namespace two + +/* ************************************************************************* */ +TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { + HybridGaussianFactorGraph hfg; + + hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); + + hfg.add(DecisionTreeFactor(m1, {2, 8})); + // TODO(Varun) Adding extra discrete variable not connected to continuous + // variable throws segfault + // hfg.add(DecisionTreeFactor({m1, m2, "1 2 3 4")); + + HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal(); + + // The bayes tree should have 3 cliques + EXPECT_LONGS_EQUAL(3, result->size()); +} + +/* ************************************************************************* */ +TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { + HybridGaussianFactorGraph hfg; + + // Prior on x0 + hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Factor between x0-x1 + hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + + // Hybrid factor P(x1|c1) + hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); + // Prior factor on c1 + hfg.add(DecisionTreeFactor(m1, {2, 8})); + + // Get a constrained ordering keeping c1 last + auto ordering_full = HybridOrdering(hfg); + + // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) + HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); + + EXPECT_LONGS_EQUAL(3, hbt->size()); +} + +/* ************************************************************************* */ +// Check assembling the Bayes Tree roots after we do partial elimination +TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { + HybridGaussianFactorGraph hfg; + + hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); + + hfg.add(HybridGaussianFactor(m0, two::components(X(0)))); + hfg.add(HybridGaussianFactor(m1, two::components(X(2)))); + + hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4")); + + hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); + hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); + + hfg.add(HybridGaussianFactor(m3, two::components(X(3)))); + hfg.add(HybridGaussianFactor(m2, two::components(X(5)))); + + auto ordering_full = + Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)}); + + const auto [hbt, remaining] = hfg.eliminatePartialMultifrontal(ordering_full); + + // 9 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(9, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); + + /* + (Fan) Explanation: the Junction tree will need to re-eliminate to get to the + marginal on X(1), which is not possible because it involves eliminating + discrete before continuous. The solution to this, however, is in Murphy02. + TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. + And I believe that we should do this. + */ +} + +/* ************************************************************************* */ +void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, + const HybridBayesTree::shared_ptr &hbt, + const Ordering &ordering) { + DotWriter dw; + dw.positionHints['c'] = 2; + dw.positionHints['x'] = 1; + std::cout << hfg->dot(DefaultKeyFormatter, dw); + std::cout << "\n"; + hbt->dot(std::cout); + + std::cout << "\n"; + std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw); +} + +/* ************************************************************************* */ +// TODO(fan): make a graph like Varun's paper one +TEST(HybridGaussianFactorGraph, Switching) { + auto N = 12; + auto hfg = makeSwitchingChain(N); + + // X(5) will be the center, X(1-4), X(6-9) + // X(3), X(7) + // X(2), X(8) + // X(1), X(4), X(6), X(9) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) + // auto ordering_full = + // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), + // X(5), + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); + KeyVector ordering; + + { + std::vector naturalX(N); + std::iota(naturalX.begin(), naturalX.end(), 1); + std::vector ordX; + std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), + [](int x) { return X(x); }); + + auto [ndX, lvls] = makeBinaryOrdering(ordX); + std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); + // TODO(dellaert): this has no effect! + for (auto &l : lvls) { + l = -l; + } + } + { + std::vector naturalC(N - 1); + std::iota(naturalC.begin(), naturalC.end(), 1); + std::vector ordC; + std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), + [](int x) { return M(x); }); + + // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); + const auto [ndC, lvls] = makeBinaryOrdering(ordC); + std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); + } + auto ordering_full = Ordering(ordering); + + const auto [hbt, remaining] = + hfg->eliminatePartialMultifrontal(ordering_full); + + // 12 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(12, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); +} + +/* ************************************************************************* */ +// TODO(fan): make a graph like Varun's paper one +TEST(HybridGaussianFactorGraph, SwitchingISAM) { + auto N = 11; + auto hfg = makeSwitchingChain(N); + + // X(5) will be the center, X(1-4), X(6-9) + // X(3), X(7) + // X(2), X(8) + // X(1), X(4), X(6), X(9) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) + // auto ordering_full = + // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), + // X(5), + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); + KeyVector ordering; + + { + std::vector naturalX(N); + std::iota(naturalX.begin(), naturalX.end(), 1); + std::vector ordX; + std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), + [](int x) { return X(x); }); + + auto [ndX, lvls] = makeBinaryOrdering(ordX); + std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); + // TODO(dellaert): this has no effect! + for (auto &l : lvls) { + l = -l; + } + } + { + std::vector naturalC(N - 1); + std::iota(naturalC.begin(), naturalC.end(), 1); + std::vector ordC; + std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), + [](int x) { return M(x); }); + + // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); + const auto [ndC, lvls] = makeBinaryOrdering(ordC); + std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); + } + auto ordering_full = Ordering(ordering); + + const auto [hbt, remaining] = + hfg->eliminatePartialMultifrontal(ordering_full); + + auto new_fg = makeSwitchingChain(12); + auto isam = HybridGaussianISAM(*hbt); + + // Run an ISAM update. + HybridGaussianFactorGraph factorGraph; + factorGraph.push_back(new_fg->at(new_fg->size() - 2)); + factorGraph.push_back(new_fg->at(new_fg->size() - 1)); + isam.update(factorGraph); + + // ISAM should have 12 factors after the last update + EXPECT_LONGS_EQUAL(12, isam.size()); +} + +/* ************************************************************************* */ +TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { + const int N = 7; + auto hfg = makeSwitchingChain(N, X); + hfg->push_back(*makeSwitchingChain(N, Y, D)); + + for (int t = 1; t <= N; t++) { + hfg->add(JacobianFactor(X(t), I_3x3, Y(t), -I_3x3, Vector3(1.0, 0.0, 0.0))); + } + + KeyVector ordering; + + KeyVector naturalX(N); + std::iota(naturalX.begin(), naturalX.end(), 1); + KeyVector ordX; + for (size_t i = 1; i <= N; i++) { + ordX.emplace_back(X(i)); + ordX.emplace_back(Y(i)); + } + + for (size_t i = 1; i <= N - 1; i++) { + ordX.emplace_back(M(i)); + } + for (size_t i = 1; i <= N - 1; i++) { + ordX.emplace_back(D(i)); + } + + { + DotWriter dw; + dw.positionHints['x'] = 1; + dw.positionHints['c'] = 0; + dw.positionHints['d'] = 3; + dw.positionHints['y'] = 2; + // std::cout << hfg->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; + } + + { + DotWriter dw; + dw.positionHints['y'] = 9; + // dw.positionHints['c'] = 0; + // dw.positionHints['d'] = 3; + dw.positionHints['x'] = 1; + // std::cout << "\n"; + // std::cout << hfg->eliminateSequential(Ordering(ordX)) + // ->dot(DefaultKeyFormatter, dw); + // hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); + } + + Ordering ordering_partial; + for (size_t i = 1; i <= N; i++) { + ordering_partial.emplace_back(X(i)); + ordering_partial.emplace_back(Y(i)); + } + const auto [hbn, remaining] = + hfg->eliminatePartialSequential(ordering_partial); + + EXPECT_LONGS_EQUAL(14, hbn->size()); + EXPECT_LONGS_EQUAL(11, remaining->size()); + + { + DotWriter dw; + dw.positionHints['x'] = 1; + dw.positionHints['c'] = 0; + dw.positionHints['d'] = 3; + dw.positionHints['y'] = 2; + // std::cout << remaining->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; + } +} /* ****************************************************************************/ // Test multifrontal optimize @@ -97,7 +408,8 @@ TEST(HybridBayesTree, OptimizeAssignment) { // Create ordering. Ordering ordering; - for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); + for (size_t k = 0; k < s.K; k++) + ordering.push_back(X(k)); const auto [hybridBayesNet, remainingFactorGraph] = s.linearizedFactorGraph.eliminatePartialSequential(ordering); @@ -139,20 +451,21 @@ TEST(HybridBayesTree, Optimize) { // Create ordering. Ordering ordering; - for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); + for (size_t k = 0; k < s.K; k++) + ordering.push_back(X(k)); const auto [hybridBayesNet, remainingFactorGraph] = s.linearizedFactorGraph.eliminatePartialSequential(ordering); DiscreteFactorGraph dfg; - for (auto&& f : *remainingFactorGraph) { + for (auto &&f : *remainingFactorGraph) { auto discreteFactor = dynamic_pointer_cast(f); assert(discreteFactor); dfg.push_back(discreteFactor); } // Add the probabilities for each branch - DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; + DiscreteKeys discrete_keys = {m0, m1, m2}; vector probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, 0.037152205, 0.12248971, 0.07349729, 0.08}; dfg.emplace_shared(discrete_keys, probs); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 1c9476eebb..71aac8224b 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -104,7 +104,7 @@ TEST(HybridGaussianFactor, Keys) { } /* ************************************************************************* */ -TEST(HybridGaussianFactor, Printing) { +TEST_DISABLED(HybridGaussianFactor, Printing) { using namespace test_constructor; HybridGaussianFactor hybridFactor(m1, {f10, f11}); @@ -123,6 +123,7 @@ Hybrid [x1 x2; 1]{ ] b = [ 0 0 ] No noise model +scalar: 0 1 Leaf : A[x1] = [ @@ -135,6 +136,7 @@ Hybrid [x1 x2; 1]{ ] b = [ 0 0 ] No noise model +scalar: 0 } )"; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 4c88fa8cd0..23ad0c1359 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -13,39 +13,35 @@ * @file testHybridGaussianFactorGraph.cpp * @date Mar 11, 2022 * @author Fan Jiang + * @author Varun Agrawal + * @author Frank Dellaert */ -#include -#include +#include #include #include #include #include #include #include -#include #include #include #include #include #include #include -#include #include #include -#include #include #include #include #include -#include +#include +#include + #include -#include -#include -#include #include -#include #include #include "Switching.h" @@ -54,17 +50,15 @@ using namespace std; using namespace gtsam; -using gtsam::symbol_shorthand::D; using gtsam::symbol_shorthand::M; using gtsam::symbol_shorthand::N; using gtsam::symbol_shorthand::X; -using gtsam::symbol_shorthand::Y; using gtsam::symbol_shorthand::Z; // Set up sampling std::mt19937_64 kRng(42); -static const DiscreteKey m1(M(1), 2); +static const DiscreteKey m0(M(0), 2), m1(M(1), 2), m2(M(2), 2); /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, Creation) { @@ -77,7 +71,7 @@ TEST(HybridGaussianFactorGraph, Creation) { // Define a hybrid gaussian conditional P(x0|x1, c0) // and add it to the factor graph. HybridGaussianConditional gm( - {M(0), 2}, + m0, {std::make_shared(X(0), Z_3x1, I_3x3, X(1), I_3x3), std::make_shared(X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)}); @@ -98,22 +92,6 @@ TEST(HybridGaussianFactorGraph, EliminateSequential) { EXPECT_LONGS_EQUAL(result.first->size(), 1); } -/* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { - // Test multifrontal elimination - HybridGaussianFactorGraph hfg; - - // Add priors on x0 and c1 - hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - hfg.add(DecisionTreeFactor(m1, {2, 8})); - - Ordering ordering; - ordering.push_back(X(0)); - auto result = hfg.eliminatePartialMultifrontal(ordering); - - EXPECT_LONGS_EQUAL(result.first->size(), 1); - EXPECT_LONGS_EQUAL(result.second->size(), 1); -} /* ************************************************************************* */ namespace two { @@ -121,7 +99,7 @@ std::vector components(Key key) { return {std::make_shared(key, I_3x3, Z_3x1), std::make_shared(key, I_3x3, Vector3::Ones())}; } -} // namespace two +} // namespace two /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) { @@ -179,7 +157,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { // Discrete probability table for c1 hfg.add(DecisionTreeFactor(m1, {2, 8})); // Joint discrete probability table for c1, c2 - hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); + hfg.add(DecisionTreeFactor({m1, m2}, "1 2 3 4")); HybridBayesNet::shared_ptr result = hfg.eliminateSequential(); @@ -187,296 +165,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { EXPECT_LONGS_EQUAL(4, result->size()); } -/* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { - HybridGaussianFactorGraph hfg; - - hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - - hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(1)))); - - hfg.add(DecisionTreeFactor(m1, {2, 8})); - // TODO(Varun) Adding extra discrete variable not connected to continuous - // variable throws segfault - // hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - - HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal(); - - // The bayes tree should have 3 cliques - EXPECT_LONGS_EQUAL(3, result->size()); - // GTSAM_PRINT(*result); - // GTSAM_PRINT(*result->marginalFactor(M(2))); -} - -/* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { - HybridGaussianFactorGraph hfg; - - // Prior on x0 - hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - // Factor between x0-x1 - hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - - // Hybrid factor P(x1|c1) - hfg.add(HybridGaussianFactor(m1, two::components(X(1)))); - // Prior factor on c1 - hfg.add(DecisionTreeFactor(m1, {2, 8})); - - // Get a constrained ordering keeping c1 last - auto ordering_full = HybridOrdering(hfg); - - // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) - HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); - - EXPECT_LONGS_EQUAL(3, hbt->size()); -} - -/* ************************************************************************* */ /* - * This test is about how to assemble the Bayes Tree roots after we do partial - * elimination - */ -TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { - HybridGaussianFactorGraph hfg; - - hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); - - hfg.add(HybridGaussianFactor({M(0), 2}, two::components(X(0)))); - hfg.add(HybridGaussianFactor({M(1), 2}, two::components(X(2)))); - - hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - - hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); - hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); - - hfg.add(HybridGaussianFactor({M(3), 2}, two::components(X(3)))); - hfg.add(HybridGaussianFactor({M(2), 2}, two::components(X(5)))); - - auto ordering_full = - Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)}); - - const auto [hbt, remaining] = hfg.eliminatePartialMultifrontal(ordering_full); - - // 9 cliques in the bayes tree and 0 remaining variables to eliminate. - EXPECT_LONGS_EQUAL(9, hbt->size()); - EXPECT_LONGS_EQUAL(0, remaining->size()); - - /* - (Fan) Explanation: the Junction tree will need to re-eliminate to get to the - marginal on X(1), which is not possible because it involves eliminating - discrete before continuous. The solution to this, however, is in Murphy02. - TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. - And I believe that we should do this. - */ -} - -void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, - const HybridBayesTree::shared_ptr &hbt, - const Ordering &ordering) { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw); -} - -/* ************************************************************************* */ -// TODO(fan): make a graph like Varun's paper one -TEST(HybridGaussianFactorGraph, Switching) { - auto N = 12; - auto hfg = makeSwitchingChain(N); - - // X(5) will be the center, X(1-4), X(6-9) - // X(3), X(7) - // X(2), X(8) - // X(1), X(4), X(6), X(9) - // M(5) will be the center, M(1-4), M(6-8) - // M(3), M(7) - // M(1), M(4), M(2), M(6), M(8) - // auto ordering_full = - // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), - // X(5), - // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); - KeyVector ordering; - - { - std::vector naturalX(N); - std::iota(naturalX.begin(), naturalX.end(), 1); - std::vector ordX; - std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), - [](int x) { return X(x); }); - - auto [ndX, lvls] = makeBinaryOrdering(ordX); - std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); - // TODO(dellaert): this has no effect! - for (auto &l : lvls) { - l = -l; - } - } - { - std::vector naturalC(N - 1); - std::iota(naturalC.begin(), naturalC.end(), 1); - std::vector ordC; - std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return M(x); }); - - // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); - const auto [ndC, lvls] = makeBinaryOrdering(ordC); - std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - } - auto ordering_full = Ordering(ordering); - - // GTSAM_PRINT(*hfg); - // GTSAM_PRINT(ordering_full); - - const auto [hbt, remaining] = - hfg->eliminatePartialMultifrontal(ordering_full); - - // 12 cliques in the bayes tree and 0 remaining variables to eliminate. - EXPECT_LONGS_EQUAL(12, hbt->size()); - EXPECT_LONGS_EQUAL(0, remaining->size()); -} - -/* ************************************************************************* */ -// TODO(fan): make a graph like Varun's paper one -TEST(HybridGaussianFactorGraph, SwitchingISAM) { - auto N = 11; - auto hfg = makeSwitchingChain(N); - - // X(5) will be the center, X(1-4), X(6-9) - // X(3), X(7) - // X(2), X(8) - // X(1), X(4), X(6), X(9) - // M(5) will be the center, M(1-4), M(6-8) - // M(3), M(7) - // M(1), M(4), M(2), M(6), M(8) - // auto ordering_full = - // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), - // X(5), - // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); - KeyVector ordering; - - { - std::vector naturalX(N); - std::iota(naturalX.begin(), naturalX.end(), 1); - std::vector ordX; - std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), - [](int x) { return X(x); }); - - auto [ndX, lvls] = makeBinaryOrdering(ordX); - std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); - // TODO(dellaert): this has no effect! - for (auto &l : lvls) { - l = -l; - } - } - { - std::vector naturalC(N - 1); - std::iota(naturalC.begin(), naturalC.end(), 1); - std::vector ordC; - std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return M(x); }); - - // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); - const auto [ndC, lvls] = makeBinaryOrdering(ordC); - std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - } - auto ordering_full = Ordering(ordering); - - const auto [hbt, remaining] = - hfg->eliminatePartialMultifrontal(ordering_full); - - auto new_fg = makeSwitchingChain(12); - auto isam = HybridGaussianISAM(*hbt); - - // Run an ISAM update. - HybridGaussianFactorGraph factorGraph; - factorGraph.push_back(new_fg->at(new_fg->size() - 2)); - factorGraph.push_back(new_fg->at(new_fg->size() - 1)); - isam.update(factorGraph); - - // ISAM should have 12 factors after the last update - EXPECT_LONGS_EQUAL(12, isam.size()); -} - -/* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { - const int N = 7; - auto hfg = makeSwitchingChain(N, X); - hfg->push_back(*makeSwitchingChain(N, Y, D)); - - for (int t = 1; t <= N; t++) { - hfg->add(JacobianFactor(X(t), I_3x3, Y(t), -I_3x3, Vector3(1.0, 0.0, 0.0))); - } - - KeyVector ordering; - - KeyVector naturalX(N); - std::iota(naturalX.begin(), naturalX.end(), 1); - KeyVector ordX; - for (size_t i = 1; i <= N; i++) { - ordX.emplace_back(X(i)); - ordX.emplace_back(Y(i)); - } - - for (size_t i = 1; i <= N - 1; i++) { - ordX.emplace_back(M(i)); - } - for (size_t i = 1; i <= N - 1; i++) { - ordX.emplace_back(D(i)); - } - - { - DotWriter dw; - dw.positionHints['x'] = 1; - dw.positionHints['c'] = 0; - dw.positionHints['d'] = 3; - dw.positionHints['y'] = 2; - // std::cout << hfg->dot(DefaultKeyFormatter, dw); - // std::cout << "\n"; - } - - { - DotWriter dw; - dw.positionHints['y'] = 9; - // dw.positionHints['c'] = 0; - // dw.positionHints['d'] = 3; - dw.positionHints['x'] = 1; - // std::cout << "\n"; - // std::cout << hfg->eliminateSequential(Ordering(ordX)) - // ->dot(DefaultKeyFormatter, dw); - // hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); - } - - Ordering ordering_partial; - for (size_t i = 1; i <= N; i++) { - ordering_partial.emplace_back(X(i)); - ordering_partial.emplace_back(Y(i)); - } - const auto [hbn, remaining] = - hfg->eliminatePartialSequential(ordering_partial); - - EXPECT_LONGS_EQUAL(14, hbn->size()); - EXPECT_LONGS_EQUAL(11, remaining->size()); - - { - DotWriter dw; - dw.positionHints['x'] = 1; - dw.positionHints['c'] = 0; - dw.positionHints['d'] = 3; - dw.positionHints['y'] = 2; - // std::cout << remaining->dot(DefaultKeyFormatter, dw); - // std::cout << "\n"; - } -} - -/* ****************************************************************************/ +****************************************************************************/ // Select a particular continuous factor graph given a discrete assignment TEST(HybridGaussianFactorGraph, DiscreteSelection) { Switching s(3); @@ -547,23 +237,43 @@ TEST(HybridGaussianFactorGraph, optimize) { // Test adding of gaussian conditional and re-elimination. TEST(HybridGaussianFactorGraph, Conditionals) { Switching switching(4); - HybridGaussianFactorGraph hfg; - hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + HybridGaussianFactorGraph hfg; + hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X0) Ordering ordering; ordering.push_back(X(0)); HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering); - hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) - hfg.push_back(*bayes_net); - hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) - hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) - ordering.push_back(X(1)); - ordering.push_back(X(2)); - ordering.push_back(M(0)); - ordering.push_back(M(1)); + HybridGaussianFactorGraph hfg2; + hfg2.push_back(*bayes_net); // P(X0) + hfg2.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0) + hfg2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1) + hfg2.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + ordering += X(1), X(2), M(0), M(1); + + // Created product of first two factors and check eliminate: + HybridGaussianFactorGraph fragment; + fragment.push_back(hfg2[0]); + fragment.push_back(hfg2[1]); - bayes_net = hfg.eliminateSequential(ordering); + // Check that product + HybridGaussianProductFactor product = fragment.collectProductFactor(); + auto leaf = fragment(DiscreteValues{{M(0), 0}}); + EXPECT_LONGS_EQUAL(2, leaf.size()); + + // Check product and that pruneEmpty does not touch it + auto pruned = product.removeEmpty(); + LONGS_EQUAL(2, pruned.nrLeaves()); + + // Test eliminate + auto [hybridConditional, factor] = fragment.eliminate({X(0)}); + EXPECT(hybridConditional->isHybrid()); + EXPECT(hybridConditional->keys() == KeyVector({X(0), X(1), M(0)})); + + EXPECT(dynamic_pointer_cast(factor)); + EXPECT(factor->keys() == KeyVector({X(1), M(0)})); + + bayes_net = hfg2.eliminateSequential(ordering); HybridValues result = bayes_net->optimize(); @@ -647,7 +357,7 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { HybridValues delta = hybridBayesNet->optimize(); auto error_tree = graph.errorTree(delta.continuous()); - std::vector discrete_keys = {{M(0), 2}, {M(1), 2}}; + std::vector discrete_keys = {m0, m1}; std::vector leaves = {2.7916153, 1.5888555, 1.7233422, 1.6191947}; AlgebraicDecisionTree expected_error(discrete_keys, leaves); @@ -712,11 +422,12 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { /* ****************************************************************************/ // Check that the factor graph unnormalized probability is proportional to the -// Bayes net probability for the given measurements. -bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, - const HybridGaussianFactorGraph &fg, size_t num_samples = 100) { + // Bayes net probability for the given measurements. + bool + ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, + const HybridGaussianFactorGraph &fg, size_t num_samples = 100) { auto compute_ratio = [&](HybridValues *sample) -> double { - sample->update(measurements); // update sample with given measurements: + sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / fg.probPrime(*sample); }; @@ -726,7 +437,8 @@ bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, // Test ratios for a number of independent samples: for (size_t i = 0; i < num_samples; i++) { HybridValues sample = bn.sample(&kRng); - if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false; + if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) + return false; } return true; } @@ -737,7 +449,7 @@ bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, const HybridBayesNet &posterior, size_t num_samples = 100) { auto compute_ratio = [&](HybridValues *sample) -> double { - sample->update(measurements); // update sample with given measurements: + sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / posterior.evaluate(*sample); }; @@ -749,7 +461,8 @@ bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, HybridValues sample = bn.sample(&kRng); // GTSAM_PRINT(sample); // std::cout << "ratio: " << compute_ratio(&sample) << std::endl; - if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false; + if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) + return false; } return true; } diff --git a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp index bd830794ae..6d650246e3 100644 --- a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp @@ -34,7 +34,6 @@ using namespace std; using namespace gtsam; using symbol_shorthand::M; using symbol_shorthand::X; -using symbol_shorthand::Z; /* ************************************************************************* */ namespace examples { diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 3a26f44869..ad3b2afff8 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -34,6 +34,7 @@ #include #include "Switching.h" +#include "Test.h" // Include for test suite #include @@ -498,7 +499,7 @@ TEST(HybridNonlinearFactorGraph, Full_Elimination) { /**************************************************************************** * Test printing */ -TEST(HybridNonlinearFactorGraph, Printing) { +TEST_DISABLED(HybridNonlinearFactorGraph, Printing) { Switching self(3); auto linearizedFactorGraph = self.linearizedFactorGraph; @@ -514,78 +515,98 @@ TEST(HybridNonlinearFactorGraph, Printing) { #ifdef GTSAM_DT_MERGING string expected_hybridFactorGraph = R"( size: 7 -factor 0: +Factor 0 +GaussianFactor: + A[x0] = [ - 10 + 10 ] b = [ -10 ] No noise model -factor 1: -HybridGaussianFactor + +Factor 1 +HybridGaussianFactor: Hybrid [x0 x1; m0]{ Choice(m0) 0 Leaf : A[x0] = [ - -1 + -1 ] A[x1] = [ - 1 + 1 ] b = [ -1 ] No noise model +scalar: 0 1 Leaf : A[x0] = [ - -1 + -1 ] A[x1] = [ - 1 + 1 ] b = [ -0 ] No noise model +scalar: 0 } -factor 2: -HybridGaussianFactor + +Factor 2 +HybridGaussianFactor: Hybrid [x1 x2; m1]{ Choice(m1) 0 Leaf : A[x1] = [ - -1 + -1 ] A[x2] = [ - 1 + 1 ] b = [ -1 ] No noise model +scalar: 0 1 Leaf : A[x1] = [ - -1 + -1 ] A[x2] = [ - 1 + 1 ] b = [ -0 ] No noise model +scalar: 0 } -factor 3: + +Factor 3 +GaussianFactor: + A[x1] = [ - 10 + 10 ] b = [ -10 ] No noise model -factor 4: + +Factor 4 +GaussianFactor: + A[x2] = [ - 10 + 10 ] b = [ -10 ] No noise model -factor 5: P( m0 ): + +Factor 5 +DiscreteFactor: + P( m0 ): Leaf 0.5 -factor 6: P( m1 | m0 ): + +Factor 6 +DiscreteFactor: + P( m1 | m0 ): Choice(m1) 0 Choice(m0) 0 0 Leaf 0.33333333 @@ -594,6 +615,7 @@ factor 6: P( m1 | m0 ): 1 0 Leaf 0.66666667 1 1 Leaf 0.4 + )"; #else string expected_hybridFactorGraph = R"( @@ -686,7 +708,7 @@ factor 6: P( m1 | m0 ): // Expected output for hybridBayesNet. string expected_hybridBayesNet = R"( size: 3 -conditional 0: Hybrid P( x0 | x1 m0) +conditional 0: P( x0 | x1 m0) Discrete Keys = (m0, 2), logNormalizationConstant: 1.38862 @@ -705,7 +727,7 @@ conditional 0: Hybrid P( x0 | x1 m0) logNormalizationConstant: 1.38862 No noise model -conditional 1: Hybrid P( x1 | x2 m0 m1) +conditional 1: P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), logNormalizationConstant: 1.3935 @@ -740,7 +762,7 @@ conditional 1: Hybrid P( x1 | x2 m0 m1) logNormalizationConstant: 1.3935 No noise model -conditional 2: Hybrid P( x2 | m0 m1) +conditional 2: P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), logNormalizationConstant: 1.38857 From 584a71fb944b0536b6497caecfa5d2236b5d3e2c Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 6 Oct 2024 17:50:22 +0900 Subject: [PATCH 08/18] Product now has scalars --- gtsam/hybrid/HybridGaussianConditional.cpp | 159 +++++++++--------- gtsam/hybrid/HybridGaussianFactor.cpp | 117 ++++++------- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 156 ++++++++--------- gtsam/hybrid/HybridGaussianProductFactor.cpp | 54 +++--- gtsam/hybrid/HybridGaussianProductFactor.h | 20 ++- .../tests/testHybridGaussianFactorGraph.cpp | 121 +++++++------ .../tests/testHybridGaussianProductFactor.cpp | 71 ++++---- 7 files changed, 344 insertions(+), 354 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index a42485f109..a6dcc6624d 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -22,8 +22,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -44,7 +44,7 @@ struct HybridGaussianConditional::Helper { /// Construct from a vector of mean and sigma pairs, plus extra args. template - explicit Helper(const DiscreteKey &mode, const P &p, Args &&...args) { + explicit Helper(const DiscreteKey& mode, const P& p, Args&&... args) { nrFrontals = 1; minNegLogConstant = std::numeric_limits::infinity(); @@ -52,9 +52,8 @@ struct HybridGaussianConditional::Helper { std::vector gcs; fvs.reserve(p.size()); gcs.reserve(p.size()); - for (auto &&[mean, sigma] : p) { - auto gaussianConditional = - GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); + for (auto&& [mean, sigma] : p) { + auto gaussianConditional = GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); double value = gaussianConditional->negLogConstant(); minNegLogConstant = std::min(minNegLogConstant, value); fvs.emplace_back(gaussianConditional, value); @@ -66,10 +65,9 @@ struct HybridGaussianConditional::Helper { } /// Construct from tree of GaussianConditionals. - explicit Helper(const Conditionals &conditionals) - : conditionals(conditionals), - minNegLogConstant(std::numeric_limits::infinity()) { - auto func = [this](const GC::shared_ptr &c) -> GaussianFactorValuePair { + explicit Helper(const Conditionals& conditionals) + : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { + auto func = [this](const GC::shared_ptr& c) -> GaussianFactorValuePair { double value = 0.0; if (c) { if (!nrFrontals.has_value()) { @@ -90,56 +88,61 @@ struct HybridGaussianConditional::Helper { }; /* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys &discreteParents, const Helper &helper) +HybridGaussianConditional::HybridGaussianConditional(const DiscreteKeys& discreteParents, + const Helper& helper) : BaseFactor(discreteParents, helper.pairs), BaseConditional(*helper.nrFrontals), conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &discreteParent, - const std::vector &conditionals) + const DiscreteKey& discreteParent, + const std::vector& conditionals) : HybridGaussianConditional(DiscreteKeys{discreteParent}, Conditionals({discreteParent}, conditionals)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &discreteParent, Key key, // - const std::vector> ¶meters) + const DiscreteKey& discreteParent, + Key key, // + const std::vector>& parameters) : HybridGaussianConditional(DiscreteKeys{discreteParent}, Helper(discreteParent, parameters, key)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &discreteParent, Key key, // - const Matrix &A, Key parent, - const std::vector> ¶meters) - : HybridGaussianConditional( - DiscreteKeys{discreteParent}, - Helper(discreteParent, parameters, key, A, parent)) {} + const DiscreteKey& discreteParent, + Key key, // + const Matrix& A, + Key parent, + const std::vector>& parameters) + : HybridGaussianConditional(DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A, parent)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey &discreteParent, Key key, // - const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, - const std::vector> ¶meters) - : HybridGaussianConditional( - DiscreteKeys{discreteParent}, - Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {} + const DiscreteKey& discreteParent, + Key key, // + const Matrix& A1, + Key parent1, + const Matrix& A2, + Key parent2, + const std::vector>& parameters) + : HybridGaussianConditional(DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) { +} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys &discreteParents, - const HybridGaussianConditional::Conditionals &conditionals) + const DiscreteKeys& discreteParents, + const HybridGaussianConditional::Conditionals& conditionals) : HybridGaussianConditional(discreteParents, Helper(conditionals)) {} /* *******************************************************************************/ -const HybridGaussianConditional::Conditionals & -HybridGaussianConditional::conditionals() const { +const HybridGaussianConditional::Conditionals& HybridGaussianConditional::conditionals() const { return conditionals_; } /* *******************************************************************************/ -HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() - const { - auto wrap = [this](const std::shared_ptr &gc) { +HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() const { + auto wrap = [this](const std::shared_ptr& gc) + -> std::pair { // First check if conditional has not been pruned if (gc) { const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_; @@ -151,10 +154,17 @@ HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() Vector c(1); c << std::sqrt(2.0 * Cgm_Kgcm); auto constantFactor = std::make_shared(c); - return GaussianFactorGraph{gc, constantFactor}; + return {GaussianFactorGraph{gc, constantFactor}, Cgm_Kgcm}; + } else { + // The scalar can be zero. + // TODO(Frank): after hiding is gone, this should be only case here. + return {GaussianFactorGraph{gc}, Cgm_Kgcm}; } + } else { + // If the conditional is pruned, return an empty GaussianFactorGraph with zero scalar sum + // TODO(Frank): Could we just return an *empty* GaussianFactorGraph? + return {GaussianFactorGraph{nullptr}, 0.0}; } - return GaussianFactorGraph{gc}; }; return {{conditionals_, wrap}}; } @@ -162,7 +172,7 @@ HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() /* *******************************************************************************/ size_t HybridGaussianConditional::nrComponents() const { size_t total = 0; - conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) { + conditionals_.visit([&total](const GaussianFactor::shared_ptr& node) { if (node) total += 1; }); return total; @@ -170,21 +180,19 @@ size_t HybridGaussianConditional::nrComponents() const { /* *******************************************************************************/ GaussianConditional::shared_ptr HybridGaussianConditional::choose( - const DiscreteValues &discreteValues) const { - auto &ptr = conditionals_(discreteValues); + const DiscreteValues& discreteValues) const { + auto& ptr = conditionals_(discreteValues); if (!ptr) return nullptr; auto conditional = std::dynamic_pointer_cast(ptr); if (conditional) return conditional; else - throw std::logic_error( - "A HybridGaussianConditional unexpectedly contained a non-conditional"); + throw std::logic_error("A HybridGaussianConditional unexpectedly contained a non-conditional"); } /* *******************************************************************************/ -bool HybridGaussianConditional::equals(const HybridFactor &lf, - double tol) const { - const This *e = dynamic_cast(&lf); +bool HybridGaussianConditional::equals(const HybridFactor& lf, double tol) const { + const This* e = dynamic_cast(&lf); if (e == nullptr) return false; // This will return false if either conditionals_ is empty or e->conditionals_ @@ -193,27 +201,26 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf, // Check the base and the factors: return BaseFactor::equals(*e, tol) && - conditionals_.equals( - e->conditionals_, [tol](const auto &f1, const auto &f2) { - return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); - }); + conditionals_.equals(e->conditionals_, [tol](const auto& f1, const auto& f2) { + return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); + }); } /* *******************************************************************************/ -void HybridGaussianConditional::print(const std::string &s, - const KeyFormatter &formatter) const { +void HybridGaussianConditional::print(const std::string& s, const KeyFormatter& formatter) const { std::cout << (s.empty() ? "" : s + "\n"); BaseConditional::print("", formatter); std::cout << " Discrete Keys = "; - for (auto &dk : discreteKeys()) { + for (auto& dk : discreteKeys()) { std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << std::endl << " logNormalizationConstant: " << -negLogConstant() << std::endl << std::endl; conditionals_.print( - "", [&](Key k) { return formatter(k); }, - [&](const GaussianConditional::shared_ptr &gf) -> std::string { + "", + [&](Key k) { return formatter(k); }, + [&](const GaussianConditional::shared_ptr& gf) -> std::string { RedirectCout rd; if (gf && !gf->empty()) { gf->print("", formatter); @@ -230,20 +237,19 @@ KeyVector HybridGaussianConditional::continuousParents() const { const auto range = parents(); KeyVector continuousParentKeys(range.begin(), range.end()); // Loop over all discrete keys: - for (const auto &discreteKey : discreteKeys()) { + for (const auto& discreteKey : discreteKeys()) { const Key key = discreteKey.first; // remove that key from continuousParentKeys: - continuousParentKeys.erase(std::remove(continuousParentKeys.begin(), - continuousParentKeys.end(), key), - continuousParentKeys.end()); + continuousParentKeys.erase( + std::remove(continuousParentKeys.begin(), continuousParentKeys.end(), key), + continuousParentKeys.end()); } return continuousParentKeys; } /* ************************************************************************* */ -bool HybridGaussianConditional::allFrontalsGiven( - const VectorValues &given) const { - for (auto &&kv : given) { +bool HybridGaussianConditional::allFrontalsGiven(const VectorValues& given) const { + for (auto&& kv : given) { if (given.find(kv.first) == given.end()) { return false; } @@ -253,7 +259,7 @@ bool HybridGaussianConditional::allFrontalsGiven( /* ************************************************************************* */ std::shared_ptr HybridGaussianConditional::likelihood( - const VectorValues &given) const { + const VectorValues& given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( "HybridGaussianConditional::likelihood: given values are missing some " @@ -264,8 +270,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( const KeyVector continuousParentKeys = continuousParents(); const HybridGaussianFactor::FactorValuePairs likelihoods( conditionals_, - [&](const GaussianConditional::shared_ptr &conditional) - -> GaussianFactorValuePair { + [&](const GaussianConditional::shared_ptr& conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; if (Cgm_Kgcm == 0.0) { @@ -276,26 +281,24 @@ std::shared_ptr HybridGaussianConditional::likelihood( return {likelihood_m, Cgm_Kgcm}; } }); - return std::make_shared(discreteParentKeys, - likelihoods); + return std::make_shared(discreteParentKeys, likelihoods); } /* ************************************************************************* */ -std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { +std::set DiscreteKeysAsSet(const DiscreteKeys& discreteKeys) { std::set s(discreteKeys.begin(), discreteKeys.end()); return s; } /* *******************************************************************************/ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune( - const DecisionTreeFactor &discreteProbs) const { + const DecisionTreeFactor& discreteProbs) const { // Find keys in discreteProbs.keys() but not in this->keys(): std::set mine(this->keys().begin(), this->keys().end()); - std::set theirs(discreteProbs.keys().begin(), - discreteProbs.keys().end()); + std::set theirs(discreteProbs.keys().begin(), discreteProbs.keys().end()); std::vector diff; - std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(), - std::back_inserter(diff)); + std::set_difference( + theirs.begin(), theirs.end(), mine.begin(), mine.end(), std::back_inserter(diff)); // Find maximum probability value for every combination of our keys. Ordering keys(diff); @@ -303,26 +306,24 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune( // Check the max value for every combination of our keys. // If the max value is 0.0, we can prune the corresponding conditional. - auto pruner = [&](const Assignment &choices, - const GaussianConditional::shared_ptr &conditional) - -> GaussianConditional::shared_ptr { + auto pruner = + [&](const Assignment& choices, + const GaussianConditional::shared_ptr& conditional) -> GaussianConditional::shared_ptr { return (max->evaluate(choices) == 0.0) ? nullptr : conditional; }; auto pruned_conditionals = conditionals_.apply(pruner); - return std::make_shared(discreteKeys(), - pruned_conditionals); + return std::make_shared(discreteKeys(), pruned_conditionals); } /* *******************************************************************************/ -double HybridGaussianConditional::logProbability( - const HybridValues &values) const { +double HybridGaussianConditional::logProbability(const HybridValues& values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } /* *******************************************************************************/ -double HybridGaussianConditional::evaluate(const HybridValues &values) const { +double HybridGaussianConditional::evaluate(const HybridValues& values) const { auto conditional = conditionals_(values.discrete()); return conditional->evaluate(values.continuous()); } diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index acdd9ef89c..d49630b642 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -32,8 +32,8 @@ namespace gtsam { /* *******************************************************************************/ -HybridGaussianFactor::FactorValuePairs -HybridGaussianFactor::augment(const FactorValuePairs &factors) { +HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment( + const FactorValuePairs& factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. DecisionTree gaussianFactors; @@ -44,18 +44,16 @@ HybridGaussianFactor::augment(const FactorValuePairs &factors) { double min_value = valueTree.min(); // Finally, update the [A|b] matrices. - auto update = [&min_value](const auto &gfv) -> GaussianFactorValuePair { + auto update = [&min_value](const auto& gfv) -> GaussianFactorValuePair { auto [gf, value] = gfv; auto jf = std::dynamic_pointer_cast(gf); - if (!jf) - return {gf, 0.0}; // should this be zero or infinite? + if (!jf) return {gf, 0.0}; // should this be zero or infinite? double normalized_value = value - min_value; // If the value is 0, do nothing - if (normalized_value == 0.0) - return {gf, 0.0}; + if (normalized_value == 0.0) return {gf, value}; GaussianFactorGraph gfg; gfg.push_back(jf); @@ -66,40 +64,42 @@ HybridGaussianFactor::augment(const FactorValuePairs &factors) { auto constantFactor = std::make_shared(c); gfg.push_back(constantFactor); - return {std::make_shared(gfg), normalized_value}; + // NOTE(Frank): we store the actual value, not the normalized value: + return {std::make_shared(gfg), value}; }; return FactorValuePairs(factors, update); } /* *******************************************************************************/ struct HybridGaussianFactor::ConstructorHelper { - KeyVector continuousKeys; // Continuous keys extracted from factors - DiscreteKeys discreteKeys; // Discrete keys provided to the constructors - FactorValuePairs pairs; // The decision tree with factors and scalars + KeyVector continuousKeys; // Continuous keys extracted from factors + DiscreteKeys discreteKeys; // Discrete keys provided to the constructors + FactorValuePairs pairs; // The decision tree with factors and scalars - ConstructorHelper(const DiscreteKey &discreteKey, - const std::vector &factors) + /// Constructor for a single discrete key and a vector of Gaussian factors + ConstructorHelper(const DiscreteKey& discreteKey, + const std::vector& factors) : discreteKeys({discreteKey}) { // Extract continuous keys from the first non-null factor - for (const auto &factor : factors) { + for (const auto& factor : factors) { if (factor && continuousKeys.empty()) { continuousKeys = factor->keys(); break; } } // Build the FactorValuePairs DecisionTree - pairs = FactorValuePairs( - DecisionTree(discreteKeys, factors), - [](const auto &f) { - return std::pair{f, 0.0}; - }); + pairs = FactorValuePairs(DecisionTree(discreteKeys, factors), + [](const auto& f) { + return std::pair{f, 0.0}; + }); } - ConstructorHelper(const DiscreteKey &discreteKey, - const std::vector &factorPairs) + /// Constructor for a single discrete key and a vector of GaussianFactorValuePairs + ConstructorHelper(const DiscreteKey& discreteKey, + const std::vector& factorPairs) : discreteKeys({discreteKey}) { // Extract continuous keys from the first non-null factor - for (const auto &pair : factorPairs) { + for (const auto& pair : factorPairs) { if (pair.first && continuousKeys.empty()) { continuousKeys = pair.first->keys(); break; @@ -110,12 +110,12 @@ struct HybridGaussianFactor::ConstructorHelper { pairs = FactorValuePairs(discreteKeys, factorPairs); } - ConstructorHelper(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factorPairs) + /// Constructor for a vector of discrete keys and a vector of GaussianFactorValuePairs + ConstructorHelper(const DiscreteKeys& discreteKeys, const FactorValuePairs& factorPairs) : discreteKeys(discreteKeys) { // Extract continuous keys from the first non-null factor // TODO: just stop after first non-null factor - factorPairs.visit([&](const GaussianFactorValuePair &pair) { + factorPairs.visit([&](const GaussianFactorValuePair& pair) { if (pair.first && continuousKeys.empty()) { continuousKeys = pair.first->keys(); } @@ -127,40 +127,32 @@ struct HybridGaussianFactor::ConstructorHelper { }; /* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper) - : Base(helper.continuousKeys, helper.discreteKeys), - factors_(augment(helper.pairs)) {} +HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper& helper) + : Base(helper.continuousKeys, helper.discreteKeys), factors_(augment(helper.pairs)) {} -/* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor( - const DiscreteKey &discreteKey, - const std::vector &factorPairs) + const DiscreteKey& discreteKey, const std::vector& factorPairs) : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} -/* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor( - const DiscreteKey &discreteKey, - const std::vector &factorPairs) +HybridGaussianFactor::HybridGaussianFactor(const DiscreteKey& discreteKey, + const std::vector& factorPairs) : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} -/* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, - const FactorValuePairs &factorPairs) +HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factorPairs) : HybridGaussianFactor(ConstructorHelper(discreteKeys, factorPairs)) {} /* *******************************************************************************/ -bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { - const This *e = dynamic_cast(&lf); - if (e == nullptr) - return false; +bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const { + const This* e = dynamic_cast(&lf); + if (e == nullptr) return false; // This will return false if either factors_ is empty or e->factors_ is // empty, but not if both are empty or both are not empty: - if (factors_.empty() ^ e->factors_.empty()) - return false; + if (factors_.empty() ^ e->factors_.empty()) return false; // Check the base and the factors: - auto compareFunc = [tol](const auto &pair1, const auto &pair2) { + auto compareFunc = [tol](const auto& pair1, const auto& pair2) { auto f1 = pair1.first, f2 = pair2.first; bool match = (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); return match && gtsam::equal(pair1.second, pair2.second, tol); @@ -169,8 +161,7 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { } /* *******************************************************************************/ -void HybridGaussianFactor::print(const std::string &s, - const KeyFormatter &formatter) const { +void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& formatter) const { std::cout << (s.empty() ? "" : s + "\n"); HybridFactor::print("", formatter); std::cout << "{\n"; @@ -178,8 +169,9 @@ void HybridGaussianFactor::print(const std::string &s, std::cout << " empty" << std::endl; } else { factors_.print( - "", [&](Key k) { return formatter(k); }, - [&](const auto &pair) -> std::string { + "", + [&](Key k) { return formatter(k); }, + [&](const auto& pair) -> std::string { RedirectCout rd; std::cout << ":\n"; if (pair.first) { @@ -195,22 +187,25 @@ void HybridGaussianFactor::print(const std::string &s, } /* *******************************************************************************/ -HybridGaussianFactor::sharedFactor -HybridGaussianFactor::operator()(const DiscreteValues &assignment) const { +HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( + const DiscreteValues& assignment) const { return factors_(assignment).first; } /* *******************************************************************************/ HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { - return {{factors_, - [](const auto &pair) { return GaussianFactorGraph{pair.first}; }}}; + // Implemented by creating a new DecisionTree where: + // - The structure (keys and assignments) is preserved from factors_ + // - Each leaf converted to a GaussianFactorGraph with just the factor and its scalar. + return {{factors_, [](const auto& pair) -> std::pair { + return {GaussianFactorGraph{pair.first}, pair.second}; + }}}; } /* *******************************************************************************/ /// Helper method to compute the error of a component. -static double -PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr &gf, - const VectorValues &values) { +static double PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr& gf, + const VectorValues& values) { // Check if valid pointer if (gf) { return gf->error(values); @@ -222,10 +217,10 @@ PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr &gf, } /* *******************************************************************************/ -AlgebraicDecisionTree -HybridGaussianFactor::errorTree(const VectorValues &continuousValues) const { +AlgebraicDecisionTree HybridGaussianFactor::errorTree( + const VectorValues& continuousValues) const { // functor to convert from sharedFactor to double error value. - auto errorFunc = [&continuousValues](const auto &pair) { + auto errorFunc = [&continuousValues](const auto& pair) { return PotentiallyPrunedComponentError(pair.first, continuousValues); }; DecisionTree error_tree(factors_, errorFunc); @@ -233,10 +228,10 @@ HybridGaussianFactor::errorTree(const VectorValues &continuousValues) const { } /* *******************************************************************************/ -double HybridGaussianFactor::error(const HybridValues &values) const { +double HybridGaussianFactor::error(const HybridValues& values) const { // Directly index to get the component, no need to build the whole tree. const auto pair = factors_(values.discrete()); return PotentiallyPrunedComponentError(pair.first, values.continuous()); } -} // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 5d2b4c2037..b94905652b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -18,7 +18,6 @@ * @date Mar 11, 2022 */ -#include "gtsam/discrete/DiscreteValues.h" #include #include #include @@ -40,6 +39,7 @@ #include #include #include +#include "gtsam/discrete/DiscreteValues.h" #include #include @@ -59,15 +59,14 @@ using std::dynamic_pointer_cast; /* ************************************************************************ */ // Throw a runtime exception for method specified in string s, and factor f: -static void throwRuntimeError(const std::string &s, - const std::shared_ptr &f) { - auto &fr = *f; - throw std::runtime_error(s + " not implemented for factor type " + - demangle(typeid(fr).name()) + "."); +static void throwRuntimeError(const std::string& s, const std::shared_ptr& f) { + auto& fr = *f; + throw std::runtime_error(s + " not implemented for factor type " + demangle(typeid(fr).name()) + + "."); } /* ************************************************************************ */ -const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) { +const Ordering HybridOrdering(const HybridGaussianFactorGraph& graph) { KeySet discrete_keys = graph.discreteKeySet(); const VariableIndex index(graph); return Ordering::ColamdConstrainedLast( @@ -75,15 +74,14 @@ const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) { } /* ************************************************************************ */ -static void printFactor(const std::shared_ptr &factor, - const DiscreteValues &assignment, - const KeyFormatter &keyFormatter) { +static void printFactor(const std::shared_ptr& factor, + const DiscreteValues& assignment, + const KeyFormatter& keyFormatter) { if (auto hgf = std::dynamic_pointer_cast(factor)) { if (assignment.empty()) hgf->print("HybridGaussianFactor:", keyFormatter); else - hgf->operator()(assignment) - ->print("HybridGaussianFactor, component:", keyFormatter); + hgf->operator()(assignment)->print("HybridGaussianFactor, component:", keyFormatter); } else if (auto gf = std::dynamic_pointer_cast(factor)) { factor->print("GaussianFactor:\n", keyFormatter); @@ -98,9 +96,7 @@ static void printFactor(const std::shared_ptr &factor, if (assignment.empty()) hc->print("HybridConditional:", keyFormatter); else - hc->asHybrid() - ->choose(assignment) - ->print("HybridConditional, component:\n", keyFormatter); + hc->asHybrid()->choose(assignment)->print("HybridConditional, component:\n", keyFormatter); } } else { factor->print("Unknown factor type\n", keyFormatter); @@ -108,13 +104,13 @@ static void printFactor(const std::shared_ptr &factor, } /* ************************************************************************ */ -void HybridGaussianFactorGraph::print(const std::string &s, - const KeyFormatter &keyFormatter) const { +void HybridGaussianFactorGraph::print(const std::string& s, + const KeyFormatter& keyFormatter) const { std::cout << (s.empty() ? "" : s + " ") << std::endl; std::cout << "size: " << size() << std::endl; for (size_t i = 0; i < factors_.size(); i++) { - auto &&factor = factors_[i]; + auto&& factor = factors_[i]; if (factor == nullptr) { std::cout << "Factor " << i << ": nullptr\n"; continue; @@ -129,15 +125,15 @@ void HybridGaussianFactorGraph::print(const std::string &s, /* ************************************************************************ */ void HybridGaussianFactorGraph::printErrors( - const HybridValues &values, const std::string &str, - const KeyFormatter &keyFormatter, - const std::function - &printCondition) const { + const HybridValues& values, + const std::string& str, + const KeyFormatter& keyFormatter, + const std::function& + printCondition) const { std::cout << str << " size: " << size() << std::endl << std::endl; for (size_t i = 0; i < factors_.size(); i++) { - auto &&factor = factors_[i]; + auto&& factor = factors_[i]; if (factor == nullptr) { std::cout << "Factor " << i << ": nullptr\n"; continue; @@ -157,14 +153,13 @@ void HybridGaussianFactorGraph::printErrors( /* ************************************************************************ */ // TODO(dellaert): it's probably more efficient to first collect the discrete // keys, and then loop over all assignments to populate a vector. -HybridGaussianProductFactor -HybridGaussianFactorGraph::collectProductFactor() const { +HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() const { HybridGaussianProductFactor result; - for (auto &f : factors_) { + for (auto& f : factors_) { // TODO(dellaert): can we make this cleaner and less error-prone? if (auto orphan = dynamic_pointer_cast(f)) { - continue; // Ignore OrphanWrapper + continue; // Ignore OrphanWrapper } else if (auto gf = dynamic_pointer_cast(f)) { result += gf; } else if (auto gc = dynamic_pointer_cast(f)) { @@ -172,7 +167,7 @@ HybridGaussianFactorGraph::collectProductFactor() const { } else if (auto gmf = dynamic_pointer_cast(f)) { result += *gmf; } else if (auto gm = dynamic_pointer_cast(f)) { - result += *gm; // handled above already? + result += *gm; // handled above already? } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gm = hc->asHybrid()) { result += *gm; @@ -198,11 +193,10 @@ HybridGaussianFactorGraph::collectProductFactor() const { } /* ************************************************************************ */ -static std::pair> -continuousElimination(const HybridGaussianFactorGraph &factors, - const Ordering &frontalKeys) { +static std::pair> continuousElimination( + const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { GaussianFactorGraph gfg; - for (auto &f : factors) { + for (auto& f : factors) { if (auto gf = dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto orphan = dynamic_pointer_cast(f)) { @@ -230,7 +224,7 @@ continuousElimination(const HybridGaussianFactorGraph &factors, * @return AlgebraicDecisionTree */ static AlgebraicDecisionTree probabilitiesFromNegativeLogValues( - const AlgebraicDecisionTree &logValues) { + const AlgebraicDecisionTree& logValues) { // Perform normalization double min_log = logValues.min(); AlgebraicDecisionTree probabilities = DecisionTree( @@ -241,18 +235,17 @@ static AlgebraicDecisionTree probabilitiesFromNegativeLogValues( } /* ************************************************************************ */ -static std::pair> -discreteElimination(const HybridGaussianFactorGraph &factors, - const Ordering &frontalKeys) { +static std::pair> discreteElimination( + const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { DiscreteFactorGraph dfg; - for (auto &f : factors) { + for (auto& f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - auto logProbability = [&](const auto &pair) -> double { + auto logProbability = [&](const auto& pair) -> double { auto [factor, _] = pair; if (!factor) return 0.0; return factor->error(VectorValues()); @@ -262,8 +255,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, AlgebraicDecisionTree probabilities = probabilitiesFromNegativeLogValues(logProbabilities); - dfg.emplace_shared(gmf->discreteKeys(), - probabilities); + dfg.emplace_shared(gmf->discreteKeys(), probabilities); } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. @@ -284,8 +276,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } /* ************************************************************************ */ -using Result = std::pair, - HybridGaussianFactor::sharedFactor>; +using Result = std::pair, GaussianFactor::shared_ptr>; +using ResultTree = DecisionTree>; /** * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) @@ -293,11 +285,10 @@ using Result = std::pair, * The residual error contains no keys, and only * depends on the discrete separator if present. */ -static std::shared_ptr createDiscreteFactor( - const DecisionTree &eliminationResults, - const DiscreteKeys &discreteSeparator) { - auto negLogProbability = [&](const Result &pair) -> double { - const auto &[conditional, factor] = pair; +static std::shared_ptr createDiscreteFactor(const ResultTree& eliminationResults, + const DiscreteKeys& discreteSeparator) { + auto negLogProbability = [&](const auto& pair) -> double { + const auto& [conditional, factor] = pair.first; if (conditional && factor) { static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. @@ -324,12 +315,11 @@ static std::shared_ptr createDiscreteFactor( // Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. -static std::shared_ptr createHybridGaussianFactor( - const DecisionTree &eliminationResults, - const DiscreteKeys &discreteSeparator) { +static std::shared_ptr createHybridGaussianFactor(const ResultTree& eliminationResults, + const DiscreteKeys& discreteSeparator) { // Correct for the normalization constant used up by the conditional - auto correct = [&](const Result &pair) -> GaussianFactorValuePair { - const auto &[conditional, factor] = pair; + auto correct = [&](const auto& pair) -> GaussianFactorValuePair { + const auto& [conditional, factor] = pair.first; if (conditional && factor) { auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); @@ -339,29 +329,27 @@ static std::shared_ptr createHybridGaussianFactor( const double negLogK = conditional->negLogConstant(); hf->constantTerm() += -2.0 * negLogK; return {factor, negLogK}; - } else if (!conditional && !factor){ + } else if (!conditional && !factor) { return {nullptr, 0.0}; // TODO(frank): or should this be infinity? } else { - throw std::runtime_error("createHybridGaussianFactors has mixed NULLs"); + throw std::runtime_error("createHybridGaussianFactors has mixed NULLs"); } }; - DecisionTree newFactors(eliminationResults, - correct); + DecisionTree newFactors(eliminationResults, correct); return std::make_shared(discreteSeparator, newFactors); } /* *******************************************************************************/ /// Get the discrete keys from the HybridGaussianFactorGraph as DiscreteKeys. -static auto GetDiscreteKeys = - [](const HybridGaussianFactorGraph &hfg) -> DiscreteKeys { +static auto GetDiscreteKeys = [](const HybridGaussianFactorGraph& hfg) -> DiscreteKeys { const std::set discreteKeySet = hfg.discreteKeys(); return {discreteKeySet.begin(), discreteKeySet.end()}; }; /* *******************************************************************************/ std::pair> -HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { +HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { // Since we eliminate all continuous variables first, // the discrete separator will be *all* the discrete keys. DiscreteKeys discreteSeparator = GetDiscreteKeys(*this); @@ -377,9 +365,12 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { // This is the elimination method on the leaf nodes bool someContinuousLeft = false; - auto eliminate = [&](const GaussianFactorGraph &graph) -> Result { + auto eliminate = + [&](const std::pair& pair) -> std::pair { + const auto& [graph, scalar] = pair; + if (graph.empty()) { - return {nullptr, nullptr}; + return {{nullptr, nullptr}, 0.0}; } // Expensive elimination of product factor. @@ -388,25 +379,24 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { // Record whether there any continuous variables left someContinuousLeft |= !result.second->empty(); - return result; + return {result, scalar}; }; // Perform elimination! - DecisionTree eliminationResults(prunedProductFactor, eliminate); + ResultTree eliminationResults(prunedProductFactor, eliminate); // If there are no more continuous parents we create a DiscreteFactor with the // error for each discrete choice. Otherwise, create a HybridGaussianFactor // on the separator, taking care to correct for conditional constants. - auto newFactor = - someContinuousLeft - ? createHybridGaussianFactor(eliminationResults, discreteSeparator) - : createDiscreteFactor(eliminationResults, discreteSeparator); + auto newFactor = someContinuousLeft + ? createHybridGaussianFactor(eliminationResults, discreteSeparator) + : createDiscreteFactor(eliminationResults, discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( - eliminationResults, [](const Result &pair) { return pair.first; }); - auto hybridGaussian = std::make_shared( - discreteSeparator, conditionals); + eliminationResults, [](const auto& pair) { return pair.first.first; }); + auto hybridGaussian = + std::make_shared(discreteSeparator, conditionals); return {std::make_shared(hybridGaussian), newFactor}; } @@ -426,8 +416,7 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { * be INCORRECT and there will be NO error raised. */ std::pair> // -EliminateHybrid(const HybridGaussianFactorGraph &factors, - const Ordering &keys) { +EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) { // NOTE: Because we are in the Conditional Gaussian regime there are only // a few cases: // 1. continuous variable, make a hybrid Gaussian conditional if there are @@ -478,7 +467,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // 3. if not, we do hybrid elimination: bool only_discrete = true, only_continuous = true; - for (auto &&factor : factors) { + for (auto&& factor : factors) { if (auto hybrid_factor = std::dynamic_pointer_cast(factor)) { if (hybrid_factor->isDiscrete()) { only_continuous = false; @@ -489,11 +478,9 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, only_discrete = false; break; } - } else if (auto cont_factor = - std::dynamic_pointer_cast(factor)) { + } else if (auto cont_factor = std::dynamic_pointer_cast(factor)) { only_discrete = false; - } else if (auto discrete_factor = - std::dynamic_pointer_cast(factor)) { + } else if (auto discrete_factor = std::dynamic_pointer_cast(factor)) { only_continuous = false; } } @@ -514,10 +501,10 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( - const VectorValues &continuousValues) const { + const VectorValues& continuousValues) const { AlgebraicDecisionTree result(0.0); // Iterate over each factor. - for (auto &factor : factors_) { + for (auto& factor : factors_) { if (auto hf = std::dynamic_pointer_cast(factor)) { // Add errorTree for hybrid factors, includes HybridGaussianConditionals! result = result + hf->errorTree(continuousValues); @@ -535,7 +522,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( } /* ************************************************************************ */ -double HybridGaussianFactorGraph::probPrime(const HybridValues &values) const { +double HybridGaussianFactorGraph::probPrime(const HybridValues& values) const { double error = this->error(values); // NOTE: The 0.5 term is handled by each factor return std::exp(-error); @@ -543,7 +530,7 @@ double HybridGaussianFactorGraph::probPrime(const HybridValues &values) const { /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::discretePosterior( - const VectorValues &continuousValues) const { + const VectorValues& continuousValues) const { AlgebraicDecisionTree errors = this->errorTree(continuousValues); AlgebraicDecisionTree p = errors.apply([](double error) { // NOTE: The 0.5 term is handled by each factor @@ -553,10 +540,9 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::discretePosterior( } /* ************************************************************************ */ -GaussianFactorGraph HybridGaussianFactorGraph::choose( - const DiscreteValues &assignment) const { +GaussianFactorGraph HybridGaussianFactorGraph::choose(const DiscreteValues& assignment) const { GaussianFactorGraph gfg; - for (auto &&f : *this) { + for (auto&& f : *this) { if (auto gf = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto gc = std::dynamic_pointer_cast(f)) { diff --git a/gtsam/hybrid/HybridGaussianProductFactor.cpp b/gtsam/hybrid/HybridGaussianProductFactor.cpp index c9b4c07dd6..2430750d1d 100644 --- a/gtsam/hybrid/HybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/HybridGaussianProductFactor.cpp @@ -24,66 +24,64 @@ namespace gtsam { -static GaussianFactorGraph add(const GaussianFactorGraph &graph1, - const GaussianFactorGraph &graph2) { - auto result = graph1; - result.push_back(graph2); - return result; +using Y = HybridGaussianProductFactor::Y; + +static Y add(const Y& y1, const Y& y2) { + GaussianFactorGraph result = y1.first; + result.push_back(y2.first); + return {result, y1.second + y2.second}; }; -HybridGaussianProductFactor operator+(const HybridGaussianProductFactor &a, - const HybridGaussianProductFactor &b) { +HybridGaussianProductFactor operator+(const HybridGaussianProductFactor& a, + const HybridGaussianProductFactor& b) { return a.empty() ? b : HybridGaussianProductFactor(a.apply(b, add)); } HybridGaussianProductFactor HybridGaussianProductFactor::operator+( - const HybridGaussianFactor &factor) const { + const HybridGaussianFactor& factor) const { return *this + factor.asProductFactor(); } HybridGaussianProductFactor HybridGaussianProductFactor::operator+( - const GaussianFactor::shared_ptr &factor) const { + const GaussianFactor::shared_ptr& factor) const { return *this + HybridGaussianProductFactor(factor); } -HybridGaussianProductFactor &HybridGaussianProductFactor::operator+=( - const GaussianFactor::shared_ptr &factor) { +HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=( + const GaussianFactor::shared_ptr& factor) { *this = *this + factor; return *this; } -HybridGaussianProductFactor & -HybridGaussianProductFactor::operator+=(const HybridGaussianFactor &factor) { +HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=( + const HybridGaussianFactor& factor) { *this = *this + factor; return *this; } -void HybridGaussianProductFactor::print(const std::string &s, - const KeyFormatter &formatter) const { +void HybridGaussianProductFactor::print(const std::string& s, const KeyFormatter& formatter) const { KeySet keys; - auto printer = [&](const Y &graph) { - if (keys.size() == 0) - keys = graph.keys(); - return "Graph of size " + std::to_string(graph.size()); + auto printer = [&](const Y& y) { + if (keys.empty()) keys = y.first.keys(); + return "Graph of size " + std::to_string(y.first.size()) + + ", scalar sum: " + std::to_string(y.second); }; Base::print(s, formatter, printer); - if (keys.size() > 0) { + if (!keys.empty()) { std::stringstream ss; ss << s << " Keys:"; - for (auto &&key : keys) - ss << " " << formatter(key); + for (auto&& key : keys) ss << " " << formatter(key); std::cout << ss.str() << "." << std::endl; } } HybridGaussianProductFactor HybridGaussianProductFactor::removeEmpty() const { - auto emptyGaussian = [](const GaussianFactorGraph &graph) { - bool hasNull = - std::any_of(graph.begin(), graph.end(), - [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); - return hasNull ? GaussianFactorGraph() : graph; + auto emptyGaussian = [](const Y& y) { + bool hasNull = std::any_of( + y.first.begin(), y.first.end(), [](const GaussianFactor::shared_ptr& ptr) { return !ptr; }); + return hasNull ? Y{GaussianFactorGraph(), 0.0} : y; }; return {Base(*this, emptyGaussian)}; } -} // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianProductFactor.h b/gtsam/hybrid/HybridGaussianProductFactor.h index f1bd8bc3c5..6d33daa1ba 100644 --- a/gtsam/hybrid/HybridGaussianProductFactor.h +++ b/gtsam/hybrid/HybridGaussianProductFactor.h @@ -26,10 +26,11 @@ namespace gtsam { class HybridGaussianFactor; -/// Alias for DecisionTree of GaussianFactorGraphs -class HybridGaussianProductFactor : public DecisionTree { +/// Alias for DecisionTree of GaussianFactorGraphs and their scalar sums +class HybridGaussianProductFactor + : public DecisionTree> { public: - using Y = GaussianFactorGraph; + using Y = std::pair; using Base = DecisionTree; /// @name Constructors @@ -44,7 +45,8 @@ class HybridGaussianProductFactor : public DecisionTree - HybridGaussianProductFactor(const std::shared_ptr& factor) : Base(Y{factor}) {} + HybridGaussianProductFactor(const std::shared_ptr& factor) + : Base(Y{GaussianFactorGraph{factor}, 0.0}) {} /** * @brief Construct from DecisionTree @@ -88,7 +90,9 @@ class HybridGaussianProductFactor : public DecisionTree(X(0), Z_3x1, I_3x3, X(1), I_3x3), - std::make_shared(X(0), Vector3::Ones(), I_3x3, X(1), - I_3x3)}); + std::make_shared(X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)}); hfg.add(gm); EXPECT_LONGS_EQUAL(2, hfg.size()); @@ -99,7 +99,7 @@ std::vector components(Key key) { return {std::make_shared(key, I_3x3, Z_3x1), std::make_shared(key, I_3x3, Vector3::Ones())}; } -} // namespace two +} // namespace two /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) { @@ -239,16 +239,16 @@ TEST(HybridGaussianFactorGraph, Conditionals) { Switching switching(4); HybridGaussianFactorGraph hfg; - hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X0) + hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X0) Ordering ordering; ordering.push_back(X(0)); HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering); HybridGaussianFactorGraph hfg2; - hfg2.push_back(*bayes_net); // P(X0) - hfg2.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0) - hfg2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1) - hfg2.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + hfg2.push_back(*bayes_net); // P(X0) + hfg2.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0) + hfg2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1) + hfg2.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) ordering += X(1), X(2), M(0), M(1); // Created product of first two factors and check eliminate: @@ -282,8 +282,7 @@ TEST(HybridGaussianFactorGraph, Conditionals) { expected_continuous.insert(X(1), 1); expected_continuous.insert(X(2), 2); expected_continuous.insert(X(3), 4); - Values result_continuous = - switching.linearizationPoint.retract(result.continuous()); + Values result_continuous = switching.linearizationPoint.retract(result.continuous()); EXPECT(assert_equal(expected_continuous, result_continuous)); DiscreteValues expected_discrete; @@ -318,7 +317,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) Switching s(3); - const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph; + const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph; const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); @@ -376,19 +375,18 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { auto error_tree2 = graph.errorTree(delta.continuous()); // regression - leaves = {0.50985198, 0.0097577296, 0.50009425, 0, - 0.52922138, 0.029127133, 0.50985105, 0.0097567964}; + leaves = { + 0.50985198, 0.0097577296, 0.50009425, 0, 0.52922138, 0.029127133, 0.50985105, 0.0097567964}; AlgebraicDecisionTree expected_error2(s.modes, leaves); EXPECT(assert_equal(expected_error, error_tree, 1e-7)); } /* ****************************************************************************/ -// Check that assembleGraphTree assembles Gaussian factor graphs for each -// assignment. +// Check that collectProductFactor works correctly. TEST(HybridGaussianFactorGraph, collectProductFactor) { const int num_measurements = 1; - auto fg = tiny::createHybridGaussianFactorGraph( - num_measurements, VectorValues{{Z(0), Vector1(5.0)}}); + VectorValues vv{{Z(0), Vector1(5.0)}}; + auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, vv); EXPECT_LONGS_EQUAL(3, fg.size()); // Assemble graph tree: @@ -411,23 +409,26 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}}; // Expected decision tree with two factor graphs: - // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) - HybridGaussianProductFactor expected{ - {M(0), GaussianFactorGraph(std::vector{(*hybrid)(d0), prior}), - GaussianFactorGraph(std::vector{(*hybrid)(d1), prior})}}; - - EXPECT(assert_equal(expected(d0), actual(d0), 1e-5)); - EXPECT(assert_equal(expected(d1), actual(d1), 1e-5)); + // f(x0;mode=0)P(x0) + GaussianFactorGraph expectedFG0{(*hybrid)(d0), prior}; + EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5)); + EXPECT(assert_equal(0.0, actual(d0).second, 1e-5)); + + // f(x0;mode=1)P(x0) + GaussianFactorGraph expectedFG1{(*hybrid)(d1), prior}; + EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5)); + EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5)); } /* ****************************************************************************/ // Check that the factor graph unnormalized probability is proportional to the - // Bayes net probability for the given measurements. - bool - ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, - const HybridGaussianFactorGraph &fg, size_t num_samples = 100) { - auto compute_ratio = [&](HybridValues *sample) -> double { - sample->update(measurements); // update sample with given measurements: +// Bayes net probability for the given measurements. +bool ratioTest(const HybridBayesNet& bn, + const VectorValues& measurements, + const HybridGaussianFactorGraph& fg, + size_t num_samples = 100) { + auto compute_ratio = [&](HybridValues* sample) -> double { + sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / fg.probPrime(*sample); }; @@ -437,8 +438,7 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { // Test ratios for a number of independent samples: for (size_t i = 0; i < num_samples; i++) { HybridValues sample = bn.sample(&kRng); - if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) - return false; + if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false; } return true; } @@ -446,10 +446,12 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { /* ****************************************************************************/ // Check that the bayes net unnormalized probability is proportional to the // Bayes net probability for the given measurements. -bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, - const HybridBayesNet &posterior, size_t num_samples = 100) { - auto compute_ratio = [&](HybridValues *sample) -> double { - sample->update(measurements); // update sample with given measurements: +bool ratioTest(const HybridBayesNet& bn, + const VectorValues& measurements, + const HybridBayesNet& posterior, + size_t num_samples = 100) { + auto compute_ratio = [&](HybridValues* sample) -> double { + sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / posterior.evaluate(*sample); }; @@ -461,8 +463,7 @@ bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, HybridValues sample = bn.sample(&kRng); // GTSAM_PRINT(sample); // std::cout << "ratio: " << compute_ratio(&sample) << std::endl; - if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) - return false; + if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false; } return true; } @@ -484,10 +485,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = std::make_shared( - X(0), Vector1(14.1421), I_1x1 * 2.82843), - conditional1 = std::make_shared( - X(0), Vector1(10.1379), I_1x1 * 2.02759); + const auto conditional0 = + std::make_shared(X(0), Vector1(14.1421), I_1x1 * 2.82843), + conditional1 = + std::make_shared(X(0), Vector1(10.1379), I_1x1 * 2.02759); expectedBayesNet.emplace_shared( mode, std::vector{conditional0, conditional1}); @@ -515,8 +516,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { bn.emplace_shared(m1, Z(0), I_1x1, X(0), parms); // Create prior on X(0). - bn.push_back( - GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); + bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on m1. bn.emplace_shared(m1, "1/1"); @@ -534,10 +534,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { // Create hybrid Gaussian factor on X(0). // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = std::make_shared( - X(0), Vector1(10.1379), I_1x1 * 2.02759), - conditional1 = std::make_shared( - X(0), Vector1(14.1421), I_1x1 * 2.82843); + const auto conditional0 = + std::make_shared(X(0), Vector1(10.1379), I_1x1 * 2.02759), + conditional1 = + std::make_shared(X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( m1, std::vector{conditional0, conditional1}); @@ -570,10 +570,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = std::make_shared( - X(0), Vector1(17.3205), I_1x1 * 3.4641), - conditional1 = std::make_shared( - X(0), Vector1(10.274), I_1x1 * 2.0548); + const auto conditional0 = + std::make_shared(X(0), Vector1(17.3205), I_1x1 * 3.4641), + conditional1 = + std::make_shared(X(0), Vector1(10.274), I_1x1 * 2.0548); expectedBayesNet.emplace_shared( mode, std::vector{conditional0, conditional1}); @@ -617,27 +617,25 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // NOTE: we add reverse topological so we can sample from the Bayes net.: // Add measurements: - std::vector> measurementModels{{Z_1x1, 3}, - {Z_1x1, 0.5}}; + std::vector> measurementModels{{Z_1x1, 3}, {Z_1x1, 0.5}}; for (size_t t : {0, 1, 2}) { // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_shared(noise_mode_t, Z(t), I_1x1, - X(t), measurementModels); + bn.emplace_shared( + noise_mode_t, Z(t), I_1x1, X(t), measurementModels); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); } // Add motion models. TODO(frank): why are they exactly the same? - std::vector> motionModels{{Z_1x1, 0.2}, - {Z_1x1, 0.2}}; + std::vector> motionModels{{Z_1x1, 0.2}, {Z_1x1, 0.2}}; for (size_t t : {2, 1}) { // Create hybrid Gaussian factor on X(t) conditioned on X(t-1) // and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - bn.emplace_shared(motion_model_t, X(t), I_1x1, - X(t - 1), motionModels); + bn.emplace_shared( + motion_model_t, X(t), I_1x1, X(t - 1), motionModels); // Create prior on motion model M(t): bn.emplace_shared(motion_model_t, "40/60"); @@ -650,8 +648,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { EXPECT_LONGS_EQUAL(6, bn.sample().continuous().size()); // Create measurements consistent with moving right every time: - const VectorValues measurements{ - {Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; + const VectorValues measurements{{Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; const HybridGaussianFactorGraph fg = bn.toFactorGraph(measurements); // Factor graph is: diff --git a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp index 6d650246e3..017df14a55 100644 --- a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp @@ -16,11 +16,11 @@ * @date October 2024 */ -#include "gtsam/inference/Key.h" #include #include #include #include +#include #include #include #include @@ -39,29 +39,27 @@ using symbol_shorthand::X; namespace examples { static const DiscreteKey m1(M(1), 2), m2(M(2), 3); -auto A1 = Matrix::Zero(2, 1); -auto A2 = Matrix::Zero(2, 2); -auto b = Matrix::Zero(2, 1); +const auto A1 = Matrix::Zero(2, 1); +const auto A2 = Matrix::Zero(2, 2); +const auto b = Matrix::Zero(2, 1); -auto f10 = std::make_shared(X(1), A1, X(2), A2, b); -auto f11 = std::make_shared(X(1), A1, X(2), A2, b); +const auto f10 = std::make_shared(X(1), A1, X(2), A2, b); +const auto f11 = std::make_shared(X(1), A1, X(2), A2, b); +const HybridGaussianFactor hybridFactorA(m1, {{f10, 10}, {f11, 11}}); -auto A3 = Matrix::Zero(2, 3); -auto f20 = std::make_shared(X(1), A1, X(3), A3, b); -auto f21 = std::make_shared(X(1), A1, X(3), A3, b); -auto f22 = std::make_shared(X(1), A1, X(3), A3, b); +const auto A3 = Matrix::Zero(2, 3); +const auto f20 = std::make_shared(X(1), A1, X(3), A3, b); +const auto f21 = std::make_shared(X(1), A1, X(3), A3, b); +const auto f22 = std::make_shared(X(1), A1, X(3), A3, b); -HybridGaussianFactor hybridFactorA(m1, {f10, f11}); -HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22}); +const HybridGaussianFactor hybridFactorB(m2, {{f20, 20}, {f21, 21}, {f22, 22}}); // Simulate a pruned hybrid factor, in this case m2==1 is nulled out. -HybridGaussianFactor prunedFactorB(m2, {f20, nullptr, f22}); -} // namespace examples +const HybridGaussianFactor prunedFactorB(m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}}); +} // namespace examples /* ************************************************************************* */ // Constructor -TEST(HybridGaussianProductFactor, Construct) { - HybridGaussianProductFactor product; -} +TEST(HybridGaussianProductFactor, Construct) { HybridGaussianProductFactor product; } /* ************************************************************************* */ // Add two Gaussian factors and check only one leaf in tree @@ -80,9 +78,10 @@ TEST(HybridGaussianProductFactor, AddTwoGaussianFactors) { auto leaf = product(Assignment()); // Check that the leaf contains both factors - EXPECT_LONGS_EQUAL(2, leaf.size()); - EXPECT(leaf.at(0) == f10); - EXPECT(leaf.at(1) == f11); + EXPECT_LONGS_EQUAL(2, leaf.first.size()); + EXPECT(leaf.first.at(0) == f10); + EXPECT(leaf.first.at(1) == f11); + EXPECT_DOUBLES_EQUAL(0, leaf.second, 1e-9); } /* ************************************************************************* */ @@ -107,9 +106,10 @@ TEST(HybridGaussianProductFactor, AddTwoGaussianConditionals) { auto leaf = product(Assignment()); // Check that the leaf contains both conditionals - EXPECT_LONGS_EQUAL(2, leaf.size()); - EXPECT(leaf.at(0) == gc1); - EXPECT(leaf.at(1) == gc2); + EXPECT_LONGS_EQUAL(2, leaf.first.size()); + EXPECT(leaf.first.at(0) == gc1); + EXPECT(leaf.first.at(1) == gc2); + EXPECT_DOUBLES_EQUAL(0, leaf.second, 1e-9); } /* ************************************************************************* */ @@ -120,9 +120,12 @@ TEST(HybridGaussianProductFactor, AsProductFactor) { // Let's check that this worked: Assignment mode; - mode[m1.first] = 1; + mode[m1.first] = 0; auto actual = product(mode); - EXPECT(actual.at(0) == f11); + EXPECT(actual.first.at(0) == f10); + EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9); + + // TODO(Frank): when killed hiding, f11 should also be there } /* ************************************************************************* */ @@ -134,9 +137,12 @@ TEST(HybridGaussianProductFactor, AddOne) { // Let's check that this worked: Assignment mode; - mode[m1.first] = 1; + mode[m1.first] = 0; auto actual = product(mode); - EXPECT(actual.at(0) == f11); + EXPECT(actual.first.at(0) == f10); + EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9); + + // TODO(Frank): when killed hiding, f11 should also be there } /* ************************************************************************* */ @@ -152,12 +158,15 @@ TEST(HybridGaussianProductFactor, AddTwo) { // Let's check that this worked: auto actual00 = product({{M(1), 0}, {M(2), 0}}); - EXPECT(actual00.at(0) == f10); - EXPECT(actual00.at(1) == f20); + EXPECT(actual00.first.at(0) == f10); + EXPECT(actual00.first.at(1) == f20); + EXPECT_DOUBLES_EQUAL(10 + 20, actual00.second, 1e-9); auto actual12 = product({{M(1), 1}, {M(2), 2}}); - EXPECT(actual12.at(0) == f11); - EXPECT(actual12.at(1) == f22); + // TODO(Frank): when killed hiding, these should also equal: + // EXPECT(actual12.first.at(0) == f11); + // EXPECT(actual12.first.at(1) == f22); + EXPECT_DOUBLES_EQUAL(11 + 22, actual12.second, 1e-9); } /* ************************************************************************* */ From e1c0d0e2275d95f65ccf5b6c7dadf6ba2379f17b Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 7 Oct 2024 09:12:56 +0900 Subject: [PATCH 09/18] operator returns pairs, extensive switching test --- gtsam/hybrid/HybridGaussianConditional.cpp | 8 +- gtsam/hybrid/HybridGaussianFactor.cpp | 4 +- gtsam/hybrid/HybridGaussianFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 12 +- gtsam/hybrid/tests/testHybridBayesNet.cpp | 108 ++++++++-------- .../tests/testHybridGaussianConditional.cpp | 2 +- .../tests/testHybridGaussianFactorGraph.cpp | 119 +++++++++++++++++- 7 files changed, 180 insertions(+), 75 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index a6dcc6624d..ba2c34414b 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -273,13 +273,7 @@ std::shared_ptr HybridGaussianConditional::likelihood( [&](const GaussianConditional::shared_ptr& conditional) -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; - if (Cgm_Kgcm == 0.0) { - return {likelihood_m, 0.0}; - } else { - // Add a constant to the likelihood in case the noise models - // are not all equal. - return {likelihood_m, Cgm_Kgcm}; - } + return {likelihood_m, Cgm_Kgcm}; }); return std::make_shared(discreteParentKeys, likelihoods); } diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index d49630b642..e01df539e8 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -187,9 +187,9 @@ void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& forma } /* *******************************************************************************/ -HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( +GaussianFactorValuePair HybridGaussianFactor::operator()( const DiscreteValues& assignment) const { - return factors_(assignment).first; + return factors_(assignment); } /* *******************************************************************************/ diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index bb4d3b368f..08debcf48f 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -129,7 +129,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// @{ /// Get factor at a given discrete assignment. - sharedFactor operator()(const DiscreteValues &assignment) const; + GaussianFactorValuePair operator()(const DiscreteValues &assignment) const; /** * @brief Compute error of the HybridGaussianFactor as a tree. diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index b94905652b..2d4ff38bd8 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -81,7 +81,7 @@ static void printFactor(const std::shared_ptr& factor, if (assignment.empty()) hgf->print("HybridGaussianFactor:", keyFormatter); else - hgf->operator()(assignment)->print("HybridGaussianFactor, component:", keyFormatter); + hgf->operator()(assignment).first->print("HybridGaussianFactor, component:", keyFormatter); } else if (auto gf = std::dynamic_pointer_cast(factor)) { factor->print("GaussianFactor:\n", keyFormatter); @@ -329,6 +329,7 @@ static std::shared_ptr createHybridGaussianFactor(const ResultTree& elim const double negLogK = conditional->negLogConstant(); hf->constantTerm() += -2.0 * negLogK; return {factor, negLogK}; + return {factor, scalar + negLogK}; } else if (!conditional && !factor) { return {nullptr, 0.0}; // TODO(frank): or should this be infinity? } else { @@ -355,7 +356,9 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { DiscreteKeys discreteSeparator = GetDiscreteKeys(*this); // Collect all the factors to create a set of Gaussian factor graphs in a - // decision tree indexed by all discrete keys involved. + // decision tree indexed by all discrete keys involved. Just like any hybrid factor, every + // assignment also has a scalar error, in this case the sum of all errors in the graph. This error + // is assignment-specific and accounts for any difference in noise models used. HybridGaussianProductFactor productFactor = collectProductFactor(); // Convert factor graphs with a nullptr to an empty factor graph. @@ -374,11 +377,12 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { } // Expensive elimination of product factor. - auto result = EliminatePreferCholesky(graph, keys); + auto result = EliminatePreferCholesky(graph, keys); /// <<<<<< MOST COMPUTE IS HERE // Record whether there any continuous variables left someContinuousLeft |= !result.second->empty(); + // We pass on the scalar unmodified. return {result, scalar}; }; @@ -548,7 +552,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::choose(const DiscreteValues& assi } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto hgf = std::dynamic_pointer_cast(f)) { - gfg.push_back((*hgf)(assignment)); + gfg.push_back((*hgf)(assignment).first); } else if (auto hgc = std::dynamic_pointer_cast(f)) { gfg.push_back((*hgc)(assignment)); } else if (auto hc = std::dynamic_pointer_cast(f)) { diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 1d22b3d73e..438bfd2670 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -18,9 +18,12 @@ * @date December 2021 */ +#include +#include #include #include #include +#include #include #include "Switching.h" @@ -28,6 +31,7 @@ // Include for test suite #include +#include using namespace std; using namespace gtsam; @@ -113,7 +117,7 @@ TEST(HybridBayesNet, EvaluatePureDiscrete) { } /* ****************************************************************************/ -// Test creation of a tiny hybrid Bayes net. +// Test API for a tiny hybrid Bayes net. TEST(HybridBayesNet, Tiny) { auto bayesNet = tiny::createHybridBayesNet(); // P(z|x,mode)P(x)P(mode) EXPECT_LONGS_EQUAL(3, bayesNet.size()); @@ -164,10 +168,8 @@ TEST(HybridBayesNet, Tiny) { EXPECT(!pruned.equals(bayesNet)); // error - const double error0 = chosen0.error(vv) + gc0->negLogConstant() - - px->negLogConstant() - log(0.4); - const double error1 = chosen1.error(vv) + gc1->negLogConstant() - - px->negLogConstant() - log(0.6); + const double error0 = chosen0.error(vv) + gc0->negLogConstant() - px->negLogConstant() - log(0.4); + const double error1 = chosen1.error(vv) + gc1->negLogConstant() - px->negLogConstant() - log(0.6); // print errors: EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9); EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9); @@ -188,6 +190,23 @@ TEST(HybridBayesNet, Tiny) { // toFactorGraph auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}}); EXPECT_LONGS_EQUAL(3, fg.size()); + GTSAM_PRINT(fg); + + // Create the product factor for eliminating x0: + HybridGaussianFactorGraph factors_x0; + factors_x0.push_back(fg.at(0)); + factors_x0.push_back(fg.at(1)); + auto productFactor = factors_x0.collectProductFactor(); + + // Check that scalars are 0 and 1.79 + EXPECT_DOUBLES_EQUAL(0.0, productFactor({{M(0), 0}}).second, 1e-9); + EXPECT_DOUBLES_EQUAL(1.791759, productFactor({{M(0), 1}}).second, 1e-5); + + // Call eliminate and check scalar: + auto result = factors_x0.eliminate({X(0)}); + GTSAM_PRINT(*result.first); + auto df = std::dynamic_pointer_cast(result.second); + GTSAM_PRINT(df->errorTree()); // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); @@ -209,17 +228,13 @@ TEST(HybridBayesNet, Tiny) { /* ****************************************************************************/ // Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). namespace different_sigmas { -const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), - Vector1(-4.0), 5.0); +const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); -const std::vector> parms{{Vector1(5), 2.0}, - {Vector1(2), 3.0}}; +const std::vector> parms{{Vector1(5), 2.0}, {Vector1(2), 3.0}}; const auto hgc = std::make_shared(Asia, X(1), parms); const auto prior = std::make_shared(Asia, "99/1"); -auto wrap = [](const auto& c) { - return std::make_shared(c); -}; +auto wrap = [](const auto& c) { return std::make_shared(c); }; const HybridBayesNet bayesNet{wrap(gc), wrap(hgc), wrap(prior)}; // Create values at which to evaluate. @@ -233,8 +248,8 @@ TEST(HybridBayesNet, evaluateHybrid) { const double conditionalProbability = gc->evaluate(values.continuous()); const double mixtureProbability = hgc->evaluate(values); - EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, - bayesNet.evaluate(values), 1e-9); + EXPECT_DOUBLES_EQUAL( + conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); } /* ****************************************************************************/ @@ -256,14 +271,10 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); - EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), - *gbn.at(0))); - EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), - *gbn.at(1))); - EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), - *gbn.at(2))); - EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), - *gbn.at(3))); + EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), *gbn.at(0))); + EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), *gbn.at(1))); + EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), *gbn.at(2))); + EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), *gbn.at(3))); } /* ****************************************************************************/ @@ -300,8 +311,7 @@ TEST(HybridBayesNet, OptimizeAssignment) { TEST(HybridBayesNet, Optimize) { Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1"); - HybridBayesNet::shared_ptr hybridBayesNet = - s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr hybridBayesNet = s.linearizedFactorGraph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); @@ -328,8 +338,7 @@ TEST(HybridBayesNet, Pruning) { // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) Switching s(3); - HybridBayesNet::shared_ptr posterior = - s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(5, posterior->size()); // Optimize @@ -355,12 +364,9 @@ TEST(HybridBayesNet, Pruning) { logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); - logProbability += - posterior->at(3)->asDiscrete()->logProbability(hybridValues); - logProbability += - posterior->at(4)->asDiscrete()->logProbability(hybridValues); - EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), - 1e-9); + logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); + logProbability += posterior->at(4)->asDiscrete()->logProbability(hybridValues); + EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), 1e-9); // Check agreement with discrete posterior // double density = exp(logProbability); @@ -381,8 +387,7 @@ TEST(HybridBayesNet, Pruning) { TEST(HybridBayesNet, Prune) { Switching s(4); - HybridBayesNet::shared_ptr posterior = - s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(7, posterior->size()); HybridValues delta = posterior->optimize(); @@ -399,8 +404,7 @@ TEST(HybridBayesNet, Prune) { TEST(HybridBayesNet, UpdateDiscreteConditionals) { Switching s(4); - HybridBayesNet::shared_ptr posterior = - s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(7, posterior->size()); DiscreteConditional joint; @@ -412,8 +416,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { auto prunedDecisionTree = joint.prune(maxNrLeaves); #ifdef GTSAM_DT_MERGING - EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, - prunedDecisionTree.nrLeaves()); + EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, prunedDecisionTree.nrLeaves()); #else EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree.nrLeaves()); #endif @@ -421,16 +424,14 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { // regression // NOTE(Frank): I had to include *three* non-zeroes here now. DecisionTreeFactor::ADT potentials( - s.modes, - std::vector{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); + s.modes, std::vector{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); DiscreteConditional expectedConditional(3, s.modes, potentials); // Prune! auto pruned = posterior->prune(maxNrLeaves); // Functor to verify values against the expectedConditional - auto checker = [&](const Assignment& assignment, - double probability) -> double { + auto checker = [&](const Assignment& assignment, double probability) -> double { // typecast so we can use this to get probability value DiscreteValues choices(assignment); if (prunedDecisionTree(choices) == 0) { @@ -445,8 +446,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { CHECK(pruned.at(0)->asDiscrete()); auto pruned_discrete_conditionals = pruned.at(0)->asDiscrete(); auto discrete_conditional_tree = - std::dynamic_pointer_cast( - pruned_discrete_conditionals); + std::dynamic_pointer_cast(pruned_discrete_conditionals); // The checker functor verifies the values for us. discrete_conditional_tree->apply(checker); @@ -460,13 +460,10 @@ TEST(HybridBayesNet, Sampling) { auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); nfg.emplace_shared>(X(0), 0.0, noise_model); - auto zero_motion = - std::make_shared>(X(0), X(1), 0, noise_model); - auto one_motion = - std::make_shared>(X(0), X(1), 1, noise_model); + auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); + auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); nfg.emplace_shared( - DiscreteKey(M(0), 2), - std::vector{zero_motion, one_motion}); + DiscreteKey(M(0), 2), std::vector{zero_motion, one_motion}); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); @@ -538,18 +535,17 @@ TEST(HybridBayesNet, ErrorTreeWithConditional) { hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); // Add measurement P(z0 | x0) - hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, - measurement_model); + hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); // Add hybrid motion model double mu = 0.0; double sigma0 = 1e2, sigma1 = 1e-2; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, - x0, -I_1x1, model0), - c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, - x0, -I_1x1, model1); + auto c0 = + make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model0), + c1 = + make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); DiscreteKey m1(M(2), 2); hbn.emplace_shared(m1, std::vector{c0, c1}); diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index cd9c182cd5..05f7c6c612 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -217,7 +217,7 @@ TEST(HybridGaussianConditional, Likelihood2) { // Check the detailed JacobianFactor calculation for mode==1. { // We have a JacobianFactor - const auto gf1 = (*likelihood)(assignment1); + const auto [gf1, _] = (*likelihood)(assignment1); const auto jf1 = std::dynamic_pointer_cast(gf1); CHECK(jf1); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index ed444c13c8..caf62616fa 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -165,8 +165,119 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { EXPECT_LONGS_EQUAL(4, result->size()); } -/* -****************************************************************************/ +/* ************************************************************************* */ +// Test API for the smallest switching network. +// None of these are regression tests. +TEST(HybridBayesNet, Switching) { + const double betweenSigma = 0.3, priorSigma = 0.1; + Switching s(2, betweenSigma, priorSigma); + const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph; + EXPECT_LONGS_EQUAL(4, graph.size()); + + // Create some continuous and discrete values + VectorValues continuousValues{{X(0), Vector1(0.1)}, {X(1), Vector1(1.2)}}; + DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}}; + + // Get the hybrid gaussian factor and check it is as expected + auto hgf = std::dynamic_pointer_cast(graph.at(1)); + CHECK(hgf); + + // Get factors and scalars for both modes + auto [factor0, scalar0] = (*hgf)(modeZero); + auto [factor1, scalar1] = (*hgf)(modeOne); + CHECK(factor0); + CHECK(factor1); + + // Check scalars against negLogConstant of noise model + auto betweenModel = noiseModel::Isotropic::Sigma(1, betweenSigma); + EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar0, 1e-9); + EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar1, 1e-9); + + // Check error for M(0) = 0 + HybridValues values0{continuousValues, modeZero}; + double expectedError0 = 0; + for (const auto& factor : graph) expectedError0 += factor->error(values0); + EXPECT_DOUBLES_EQUAL(expectedError0, graph.error(values0), 1e-5); + + // Check error for M(0) = 1 + HybridValues values1{continuousValues, modeOne}; + double expectedError1 = 0; + for (const auto& factor : graph) expectedError1 += factor->error(values1); + EXPECT_DOUBLES_EQUAL(expectedError1, graph.error(values1), 1e-5); + + // Check errorTree + AlgebraicDecisionTree actualErrors = graph.errorTree(continuousValues); + // Create expected error tree + AlgebraicDecisionTree expectedErrors(M(0), expectedError0, expectedError1); + + // Check that the actual error tree matches the expected one + EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5)); + + // Check probPrime + double probPrime0 = graph.probPrime(values0); + EXPECT_DOUBLES_EQUAL(std::exp(-expectedError0), probPrime0, 1e-5); + + double probPrime1 = graph.probPrime(values1); + EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5); + + // Check discretePosterior + AlgebraicDecisionTree posterior = graph.discretePosterior(continuousValues); + double sum = probPrime0 + probPrime1; + AlgebraicDecisionTree expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); + EXPECT(assert_equal(expectedPosterior, posterior, 1e-5)); + + // Make the clique of factors connected to x0: + HybridGaussianFactorGraph factors_x0; + factors_x0.push_back(graph.at(0)); + factors_x0.push_back(hgf); + + // Test collectProductFactor + auto productFactor = factors_x0.collectProductFactor(); + + // For M(0) = 0 + auto [gaussianFactor0, actualScalar0] = productFactor(modeZero); + EXPECT(gaussianFactor0.size() == 2); + EXPECT_DOUBLES_EQUAL((*hgf)(modeZero).second, actualScalar0, 1e-5); + + // For M(0) = 1 + auto [gaussianFactor1, actualScalar1] = productFactor(modeOne); + EXPECT(gaussianFactor1.size() == 2); + EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5); + + // Test eliminate + Ordering ordering{X(0)}; + auto [conditional, factor] = factors_x0.eliminate(ordering); + + // Check the conditional + CHECK(conditional); + EXPECT(conditional->isHybrid()); + auto hybridConditional = conditional->asHybrid(); + CHECK(hybridConditional); + EXPECT_LONGS_EQUAL(1, hybridConditional->nrFrontals()); // x0 + EXPECT_LONGS_EQUAL(2, hybridConditional->nrParents()); // x1, m0 + + // Check the remaining factor + EXPECT(factor); + EXPECT(std::dynamic_pointer_cast(factor)); + auto hybridFactor = std::dynamic_pointer_cast(factor); + EXPECT_LONGS_EQUAL(2, hybridFactor->keys().size()); // x1, m0 + + // Check that the conditional and remaining factor are consistent for both modes + for (auto&& mode : {modeZero, modeOne}) { + auto gc = (*hybridConditional)(mode); + auto gf = (*hybridFactor)(mode); + + // The error of the original factors should equal the sum of errors of the conditional and + // remaining factor, modulo the normalization constant of the conditional. + double originalError = factors_x0.error({continuousValues, mode}); + EXPECT_DOUBLES_EQUAL( + originalError, + gc->negLogConstant() + gc->error(continuousValues) + gf.first->error(continuousValues), + 1e-9); + } +} + +/* ************************************************************************* */ // Select a particular continuous factor graph given a discrete assignment TEST(HybridGaussianFactorGraph, DiscreteSelection) { Switching s(3); @@ -410,12 +521,12 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) - GaussianFactorGraph expectedFG0{(*hybrid)(d0), prior}; + GaussianFactorGraph expectedFG0{(*hybrid)(d0).first, prior}; EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5)); EXPECT(assert_equal(0.0, actual(d0).second, 1e-5)); // f(x0;mode=1)P(x0) - GaussianFactorGraph expectedFG1{(*hybrid)(d1), prior}; + GaussianFactorGraph expectedFG1{(*hybrid)(d1).first, prior}; EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5)); EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5)); } From 04cfb063aec39c026a38f1cb254627fdeb5e8b7f Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 7 Oct 2024 09:19:35 +0900 Subject: [PATCH 10/18] Cherry-pick Varun's bugfix --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2d4ff38bd8..38995b60a8 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -247,7 +247,8 @@ static std::pair> discret // In this case, compute discrete probabilities. auto logProbability = [&](const auto& pair) -> double { auto [factor, _] = pair; - if (!factor) return 0.0; + // If the factor is null, it is has been pruned hence return ∞ + // so that the exp(-∞)=0. return factor->error(VectorValues()); }; AlgebraicDecisionTree logProbabilities = @@ -299,7 +300,9 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. return factor->error(kEmpty) - conditional->negLogConstant(); } else if (!conditional && !factor) { - return 1.0; // TODO(dellaert): not loving this, what should this be?? + // If the factor is null, it has been pruned, hence return ∞ + // so that the exp(-∞)=0. + return std::numeric_limits::infinity(); } else { throw std::runtime_error("createDiscreteFactor has mixed NULLs"); } From b3c698047d93c6ba9f11eafd05d70ff6992a63dd Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 7 Oct 2024 17:08:38 +0900 Subject: [PATCH 11/18] Don't normalize probabilities for a mere DiscreteFactor --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 68 ++++++++-------------- 1 file changed, 23 insertions(+), 45 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 38995b60a8..95acb6ad6a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -53,9 +53,12 @@ namespace gtsam { /// Specialize EliminateableFactorGraph for HybridGaussianFactorGraph: template class EliminateableFactorGraph; +using std::dynamic_pointer_cast; using OrphanWrapper = BayesTreeOrphanWrapper; +using Result = std::pair, GaussianFactor::shared_ptr>; +using ResultTree = DecisionTree>; -using std::dynamic_pointer_cast; +static const VectorValues kEmpty; /* ************************************************************************ */ // Throw a runtime exception for method specified in string s, and factor f: @@ -215,25 +218,6 @@ static std::pair> continu return {std::make_shared(result.first), result.second}; } -/* ************************************************************************ */ -/** - * @brief Exponentiate (not necessarily normalized) negative log-values, - * normalize, and then return as AlgebraicDecisionTree. - * - * @param logValues DecisionTree of (unnormalized) log values. - * @return AlgebraicDecisionTree - */ -static AlgebraicDecisionTree probabilitiesFromNegativeLogValues( - const AlgebraicDecisionTree& logValues) { - // Perform normalization - double min_log = logValues.min(); - AlgebraicDecisionTree probabilities = DecisionTree( - logValues, [&min_log](const double x) { return exp(-(x - min_log)); }); - probabilities = probabilities.normalize(probabilities.sum()); - - return probabilities; -} - /* ************************************************************************ */ static std::pair> discreteElimination( const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { @@ -245,18 +229,17 @@ static std::pair> discret } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - auto logProbability = [&](const auto& pair) -> double { + // TODO(frank): What about the scalar!? + auto potential = [&](const auto& pair) -> double { auto [factor, _] = pair; - // If the factor is null, it is has been pruned hence return ∞ - // so that the exp(-∞)=0. - return factor->error(VectorValues()); + // If the factor is null, it has been pruned, hence return potential of zero + if (!factor) + return 0; + else + return exp(-factor->error(kEmpty)); }; - AlgebraicDecisionTree logProbabilities = - DecisionTree(gmf->factors(), logProbability); - - AlgebraicDecisionTree probabilities = - probabilitiesFromNegativeLogValues(logProbabilities); - dfg.emplace_shared(gmf->discreteKeys(), probabilities); + DecisionTree potentials(gmf->factors(), potential); + dfg.emplace_shared(gmf->discreteKeys(), potentials); } else if (auto orphan = dynamic_pointer_cast(f)) { // Ignore orphaned clique. @@ -277,9 +260,6 @@ static std::pair> discret } /* ************************************************************************ */ -using Result = std::pair, GaussianFactor::shared_ptr>; -using ResultTree = DecisionTree>; - /** * Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) * from the residual error ||b||^2 at the mean μ. @@ -288,34 +268,31 @@ using ResultTree = DecisionTree>; */ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminationResults, const DiscreteKeys& discreteSeparator) { - auto negLogProbability = [&](const auto& pair) -> double { + auto potential = [&](const auto& pair) -> double { const auto& [conditional, factor] = pair.first; if (conditional && factor) { - static const VectorValues kEmpty; // If the factor is not null, it has no keys, just contains the residual. // Negative-log-space version of: // exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); // negLogConstant gives `-log(k)` // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. - return factor->error(kEmpty) - conditional->negLogConstant(); + const double negLogK = conditional->negLogConstant(); + const double old = factor->error(kEmpty) - negLogK; + return exp(-old); } else if (!conditional && !factor) { - // If the factor is null, it has been pruned, hence return ∞ - // so that the exp(-∞)=0. - return std::numeric_limits::infinity(); + // If the factor is null, it has been pruned, hence return potential of zero + return 0; } else { throw std::runtime_error("createDiscreteFactor has mixed NULLs"); } }; - AlgebraicDecisionTree negLogProbabilities( - DecisionTree(eliminationResults, negLogProbability)); - AlgebraicDecisionTree probabilities = - probabilitiesFromNegativeLogValues(negLogProbabilities); - - return std::make_shared(discreteSeparator, probabilities); + DecisionTree potentials(eliminationResults, potential); + return std::make_shared(discreteSeparator, potentials); } +/* *******************************************************************************/ // Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. static std::shared_ptr createHybridGaussianFactor(const ResultTree& eliminationResults, @@ -323,6 +300,7 @@ static std::shared_ptr createHybridGaussianFactor(const ResultTree& elim // Correct for the normalization constant used up by the conditional auto correct = [&](const auto& pair) -> GaussianFactorValuePair { const auto& [conditional, factor] = pair.first; + const double scalar = pair.second; if (conditional && factor) { auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); From 586b177b7873acff7c2449adbe21fcae0f556a2a Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 7 Oct 2024 22:28:38 +0900 Subject: [PATCH 12/18] Extensive new API test --- .../tests/testHybridGaussianFactorGraph.cpp | 97 +++++++++++++++---- 1 file changed, 80 insertions(+), 17 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index caf62616fa..1bed2533a0 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -117,7 +117,8 @@ TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) { // Check that factor is discrete and correct auto factor = std::dynamic_pointer_cast(result.second); CHECK(factor); - EXPECT(assert_equal(DecisionTreeFactor{m1, "1 1"}, *factor)); + // regression test + EXPECT(assert_equal(DecisionTreeFactor{m1, "15.74961 15.74961"}, *factor, 1e-5)); } /* ************************************************************************* */ @@ -221,10 +222,10 @@ TEST(HybridBayesNet, Switching) { EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5); // Check discretePosterior - AlgebraicDecisionTree posterior = graph.discretePosterior(continuousValues); + AlgebraicDecisionTree graphPosterior = graph.discretePosterior(continuousValues); double sum = probPrime0 + probPrime1; AlgebraicDecisionTree expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); - EXPECT(assert_equal(expectedPosterior, posterior, 1e-5)); + EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5)); // Make the clique of factors connected to x0: HybridGaussianFactorGraph factors_x0; @@ -244,37 +245,100 @@ TEST(HybridBayesNet, Switching) { EXPECT(gaussianFactor1.size() == 2); EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5); - // Test eliminate + // Test eliminate x0 Ordering ordering{X(0)}; auto [conditional, factor] = factors_x0.eliminate(ordering); // Check the conditional CHECK(conditional); EXPECT(conditional->isHybrid()); - auto hybridConditional = conditional->asHybrid(); - CHECK(hybridConditional); - EXPECT_LONGS_EQUAL(1, hybridConditional->nrFrontals()); // x0 - EXPECT_LONGS_EQUAL(2, hybridConditional->nrParents()); // x1, m0 + auto p_x0_given_x1_m = conditional->asHybrid(); + CHECK(p_x0_given_x1_m); + EXPECT_LONGS_EQUAL(1, p_x0_given_x1_m->nrFrontals()); // x0 + EXPECT_LONGS_EQUAL(2, p_x0_given_x1_m->nrParents()); // x1, m0 // Check the remaining factor EXPECT(factor); EXPECT(std::dynamic_pointer_cast(factor)); - auto hybridFactor = std::dynamic_pointer_cast(factor); - EXPECT_LONGS_EQUAL(2, hybridFactor->keys().size()); // x1, m0 + auto phi_x1_m = std::dynamic_pointer_cast(factor); + EXPECT_LONGS_EQUAL(2, phi_x1_m->keys().size()); // x1, m0 + // Check that the scalars incorporate the negative log constant of the conditional + EXPECT_DOUBLES_EQUAL( + scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(), (*phi_x1_m)(modeZero).second, 1e-9); + EXPECT_DOUBLES_EQUAL( + scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(), (*phi_x1_m)(modeOne).second, 1e-9); // Check that the conditional and remaining factor are consistent for both modes for (auto&& mode : {modeZero, modeOne}) { - auto gc = (*hybridConditional)(mode); - auto gf = (*hybridFactor)(mode); + auto gc = (*p_x0_given_x1_m)(mode); + auto [gf, scalar] = (*phi_x1_m)(mode); // The error of the original factors should equal the sum of errors of the conditional and // remaining factor, modulo the normalization constant of the conditional. double originalError = factors_x0.error({continuousValues, mode}); - EXPECT_DOUBLES_EQUAL( - originalError, - gc->negLogConstant() + gc->error(continuousValues) + gf.first->error(continuousValues), - 1e-9); + const double actualError = + gc->negLogConstant() + gc->error(continuousValues) + gf->error(continuousValues) + scalar; + EXPECT_DOUBLES_EQUAL(originalError, actualError, 1e-9); } + + // Create a clique for x1 + HybridGaussianFactorGraph factors_x1; + factors_x1.push_back(factor); // Use the remaining factor from previous elimination + factors_x1.push_back(graph.at(2)); // Add the factor for x1 from the original graph + + // Test collectProductFactor for x1 clique + auto productFactor_x1 = factors_x1.collectProductFactor(); + + // For M(0) = 0 + auto [gaussianFactor_x1_0, actualScalar_x1_0] = productFactor_x1(modeZero); + EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_0.size()); + // NOTE(Frank): prior on x1 does not contribute to the scalar + EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeZero).second, actualScalar_x1_0, 1e-5); + + // For M(0) = 1 + auto [gaussianFactor_x1_1, actualScalar_x1_1] = productFactor_x1(modeOne); + EXPECT_LONGS_EQUAL(2, gaussianFactor_x1_1.size()); + // NOTE(Frank): prior on x1 does not contribute to the scalar + EXPECT_DOUBLES_EQUAL((*phi_x1_m)(modeOne).second, actualScalar_x1_1, 1e-5); + + // Test eliminate for x1 clique + Ordering ordering_x1{X(1)}; + auto [conditional_x1, factor_x1] = factors_x1.eliminate(ordering_x1); + + // Check the conditional for x1 + CHECK(conditional_x1); + EXPECT(conditional_x1->isHybrid()); + auto p_x1_given_m = conditional_x1->asHybrid(); + CHECK(p_x1_given_m); + EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrFrontals()); // x1 + EXPECT_LONGS_EQUAL(1, p_x1_given_m->nrParents()); // m0 + + // Check the remaining factor for x1 + CHECK(factor_x1); + auto phi_x1 = std::dynamic_pointer_cast(factor_x1); + CHECK(phi_x1); + EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0 + // We can't really check the error of the decision tree factor phi_x1, because the continuos + // factor whose error(kEmpty) we need is not available.. + + // However, we can still check the total error for the clique factors_x1 and the elimination + // results are equal, modulo -again- the negative log constant of the conditional. + for (auto&& mode : {modeZero, modeOne}) { + auto gc_x1 = (*p_x1_given_m)(mode); + double originalError_x1 = factors_x1.error({continuousValues, mode}); + const double actualError = + gc_x1->negLogConstant() + gc_x1->error(continuousValues) + phi_x1->error(mode); + EXPECT_DOUBLES_EQUAL(originalError_x1, actualError, 1e-9); + } + + // Now test full elimination of the graph: + auto posterior = graph.eliminateSequential(); + CHECK(posterior); + + // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the + // same posterior from the graph. This is a sanity check that the elimination is done correctly. + AlgebraicDecisionTree bnPosterior = graph.discretePosterior(continuousValues); + EXPECT(assert_equal(graphPosterior, bnPosterior)); } /* ************************************************************************* */ @@ -572,7 +636,6 @@ bool ratioTest(const HybridBayesNet& bn, // Test ratios for a number of independent samples: for (size_t i = 0; i < num_samples; i++) { HybridValues sample = bn.sample(&kRng); - // GTSAM_PRINT(sample); // std::cout << "ratio: " << compute_ratio(&sample) << std::endl; if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false; } From 518ea81d1ef7c0071ac8380723783a55753cedd4 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Mon, 7 Oct 2024 22:30:30 +0900 Subject: [PATCH 13/18] No more hiding! --- gtsam/hybrid/HybridGaussianFactor.cpp | 72 +++------------------- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 18 ++---- 2 files changed, 15 insertions(+), 75 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index e01df539e8..44a56910ed 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -31,45 +31,6 @@ namespace gtsam { -/* *******************************************************************************/ -HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment( - const FactorValuePairs& factors) { - // Find the minimum value so we can "proselytize" to positive values. - // Done because we can't have sqrt of negative numbers. - DecisionTree gaussianFactors; - AlgebraicDecisionTree valueTree; - std::tie(gaussianFactors, valueTree) = unzip(factors); - - // Compute minimum value for normalization. - double min_value = valueTree.min(); - - // Finally, update the [A|b] matrices. - auto update = [&min_value](const auto& gfv) -> GaussianFactorValuePair { - auto [gf, value] = gfv; - - auto jf = std::dynamic_pointer_cast(gf); - if (!jf) return {gf, 0.0}; // should this be zero or infinite? - - double normalized_value = value - min_value; - - // If the value is 0, do nothing - if (normalized_value == 0.0) return {gf, value}; - - GaussianFactorGraph gfg; - gfg.push_back(jf); - - Vector c(1); - // When hiding c inside the `b` vector, value == 0.5*c^2 - c << std::sqrt(2.0 * normalized_value); - auto constantFactor = std::make_shared(c); - - gfg.push_back(constantFactor); - // NOTE(Frank): we store the actual value, not the normalized value: - return {std::make_shared(gfg), value}; - }; - return FactorValuePairs(factors, update); -} - /* *******************************************************************************/ struct HybridGaussianFactor::ConstructorHelper { KeyVector continuousKeys; // Continuous keys extracted from factors @@ -88,10 +49,10 @@ struct HybridGaussianFactor::ConstructorHelper { } } // Build the FactorValuePairs DecisionTree - pairs = FactorValuePairs(DecisionTree(discreteKeys, factors), - [](const auto& f) { - return std::pair{f, 0.0}; - }); + pairs = FactorValuePairs( + DecisionTree(discreteKeys, factors), [](const auto& f) { + return std::pair{f, f ? 0.0 : std::numeric_limits::infinity()}; + }); } /// Constructor for a single discrete key and a vector of GaussianFactorValuePairs @@ -128,7 +89,7 @@ struct HybridGaussianFactor::ConstructorHelper { /* *******************************************************************************/ HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper& helper) - : Base(helper.continuousKeys, helper.discreteKeys), factors_(augment(helper.pairs)) {} + : Base(helper.continuousKeys, helper.discreteKeys), factors_(helper.pairs) {} HybridGaussianFactor::HybridGaussianFactor( const DiscreteKey& discreteKey, const std::vector& factorPairs) @@ -187,8 +148,7 @@ void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& forma } /* *******************************************************************************/ -GaussianFactorValuePair HybridGaussianFactor::operator()( - const DiscreteValues& assignment) const { +GaussianFactorValuePair HybridGaussianFactor::operator()(const DiscreteValues& assignment) const { return factors_(assignment); } @@ -202,26 +162,13 @@ HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { }}}; } -/* *******************************************************************************/ -/// Helper method to compute the error of a component. -static double PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr& gf, - const VectorValues& values) { - // Check if valid pointer - if (gf) { - return gf->error(values); - } else { - // If nullptr this component was pruned, so we return maximum error. This - // way the negative exponential will give a probability value close to 0.0. - return std::numeric_limits::max(); - } -} - /* *******************************************************************************/ AlgebraicDecisionTree HybridGaussianFactor::errorTree( const VectorValues& continuousValues) const { // functor to convert from sharedFactor to double error value. auto errorFunc = [&continuousValues](const auto& pair) { - return PotentiallyPrunedComponentError(pair.first, continuousValues); + return pair.first ? pair.first->error(continuousValues) + pair.second + : std::numeric_limits::infinity(); }; DecisionTree error_tree(factors_, errorFunc); return error_tree; @@ -231,7 +178,8 @@ AlgebraicDecisionTree HybridGaussianFactor::errorTree( double HybridGaussianFactor::error(const HybridValues& values) const { // Directly index to get the component, no need to build the whole tree. const auto pair = factors_(values.discrete()); - return PotentiallyPrunedComponentError(pair.first, values.continuous()); + return pair.first ? pair.first->error(values.continuous()) + pair.second + : std::numeric_limits::infinity(); } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 95acb6ad6a..c2d84fdb83 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -154,8 +154,6 @@ void HybridGaussianFactorGraph::printErrors( } /* ************************************************************************ */ -// TODO(dellaert): it's probably more efficient to first collect the discrete -// keys, and then loop over all assignments to populate a vector. HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() const { HybridGaussianProductFactor result; @@ -270,6 +268,7 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio const DiscreteKeys& discreteSeparator) { auto potential = [&](const auto& pair) -> double { const auto& [conditional, factor] = pair.first; + const double scalar = pair.second; if (conditional && factor) { // If the factor is not null, it has no keys, just contains the residual. @@ -278,8 +277,8 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio // negLogConstant gives `-log(k)` // which is `-log(k) = log(1/k) = log(\sqrt{|2πΣ|})`. const double negLogK = conditional->negLogConstant(); - const double old = factor->error(kEmpty) - negLogK; - return exp(-old); + const double error = scalar + factor->error(kEmpty) - negLogK; + return exp(-error); } else if (!conditional && !factor) { // If the factor is null, it has been pruned, hence return potential of zero return 0; @@ -302,17 +301,10 @@ static std::shared_ptr createHybridGaussianFactor(const ResultTree& elim const auto& [conditional, factor] = pair.first; const double scalar = pair.second; if (conditional && factor) { - auto hf = std::dynamic_pointer_cast(factor); - if (!hf) throw std::runtime_error("Expected HessianFactor!"); - // Add 2.0 term since the constant term will be premultiplied by 0.5 - // as per the Hessian definition, - // and negative since we want log(k) const double negLogK = conditional->negLogConstant(); - hf->constantTerm() += -2.0 * negLogK; - return {factor, negLogK}; - return {factor, scalar + negLogK}; + return {factor, scalar - negLogK}; } else if (!conditional && !factor) { - return {nullptr, 0.0}; // TODO(frank): or should this be infinity? + return {nullptr, std::numeric_limits::infinity()}; } else { throw std::runtime_error("createHybridGaussianFactors has mixed NULLs"); } From f0770c26cf345dab8d8e6c439ec85d3ada26d916 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 8 Oct 2024 16:14:01 +0900 Subject: [PATCH 14/18] Less regression, more API tests for s(3). --- .../tests/testHybridGaussianFactorGraph.cpp | 133 ++++++++---------- 1 file changed, 57 insertions(+), 76 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 1bed2533a0..3b5a6a80c5 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -170,14 +170,18 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { // Test API for the smallest switching network. // None of these are regression tests. TEST(HybridBayesNet, Switching) { + // Create switching network with two continuous variables and one discrete: + // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1;z1) ϕ(m0) const double betweenSigma = 0.3, priorSigma = 0.1; Switching s(2, betweenSigma, priorSigma); + + // Check size of linearized factor graph const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph; EXPECT_LONGS_EQUAL(4, graph.size()); // Create some continuous and discrete values - VectorValues continuousValues{{X(0), Vector1(0.1)}, {X(1), Vector1(1.2)}}; - DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}}; + const VectorValues continuousValues{{X(0), Vector1(0.1)}, {X(1), Vector1(1.2)}}; + const DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}}; // Get the hybrid gaussian factor and check it is as expected auto hgf = std::dynamic_pointer_cast(graph.at(1)); @@ -195,13 +199,13 @@ TEST(HybridBayesNet, Switching) { EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar1, 1e-9); // Check error for M(0) = 0 - HybridValues values0{continuousValues, modeZero}; + const HybridValues values0{continuousValues, modeZero}; double expectedError0 = 0; for (const auto& factor : graph) expectedError0 += factor->error(values0); EXPECT_DOUBLES_EQUAL(expectedError0, graph.error(values0), 1e-5); // Check error for M(0) = 1 - HybridValues values1{continuousValues, modeOne}; + const HybridValues values1{continuousValues, modeOne}; double expectedError1 = 0; for (const auto& factor : graph) expectedError1 += factor->error(values1); EXPECT_DOUBLES_EQUAL(expectedError1, graph.error(values1), 1e-5); @@ -209,22 +213,22 @@ TEST(HybridBayesNet, Switching) { // Check errorTree AlgebraicDecisionTree actualErrors = graph.errorTree(continuousValues); // Create expected error tree - AlgebraicDecisionTree expectedErrors(M(0), expectedError0, expectedError1); + const AlgebraicDecisionTree expectedErrors(M(0), expectedError0, expectedError1); // Check that the actual error tree matches the expected one EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5)); // Check probPrime - double probPrime0 = graph.probPrime(values0); + const double probPrime0 = graph.probPrime(values0); EXPECT_DOUBLES_EQUAL(std::exp(-expectedError0), probPrime0, 1e-5); - double probPrime1 = graph.probPrime(values1); + const double probPrime1 = graph.probPrime(values1); EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5); // Check discretePosterior - AlgebraicDecisionTree graphPosterior = graph.discretePosterior(continuousValues); - double sum = probPrime0 + probPrime1; - AlgebraicDecisionTree expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); + const AlgebraicDecisionTree graphPosterior = graph.discretePosterior(continuousValues); + const double sum = probPrime0 + probPrime1; + const AlgebraicDecisionTree expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5)); // Make the clique of factors connected to x0: @@ -246,7 +250,7 @@ TEST(HybridBayesNet, Switching) { EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5); // Test eliminate x0 - Ordering ordering{X(0)}; + const Ordering ordering{X(0)}; auto [conditional, factor] = factors_x0.eliminate(ordering); // Check the conditional @@ -254,6 +258,7 @@ TEST(HybridBayesNet, Switching) { EXPECT(conditional->isHybrid()); auto p_x0_given_x1_m = conditional->asHybrid(); CHECK(p_x0_given_x1_m); + EXPECT(HybridGaussianConditional::CheckInvariants(*p_x0_given_x1_m, values1)); EXPECT_LONGS_EQUAL(1, p_x0_given_x1_m->nrFrontals()); // x0 EXPECT_LONGS_EQUAL(2, p_x0_given_x1_m->nrParents()); // x1, m0 @@ -270,8 +275,8 @@ TEST(HybridBayesNet, Switching) { // Check that the conditional and remaining factor are consistent for both modes for (auto&& mode : {modeZero, modeOne}) { - auto gc = (*p_x0_given_x1_m)(mode); - auto [gf, scalar] = (*phi_x1_m)(mode); + const auto gc = (*p_x0_given_x1_m)(mode); + const auto [gf, scalar] = (*phi_x1_m)(mode); // The error of the original factors should equal the sum of errors of the conditional and // remaining factor, modulo the normalization constant of the conditional. @@ -332,12 +337,41 @@ TEST(HybridBayesNet, Switching) { } // Now test full elimination of the graph: - auto posterior = graph.eliminateSequential(); - CHECK(posterior); + auto hybridBayesNet = graph.eliminateSequential(); + CHECK(hybridBayesNet); // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the // same posterior from the graph. This is a sanity check that the elimination is done correctly. - AlgebraicDecisionTree bnPosterior = graph.discretePosterior(continuousValues); + AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(continuousValues); + EXPECT(assert_equal(graphPosterior, bnPosterior)); +} + +/* ****************************************************************************/ +// Test subset of API for switching network with 3 states. +// None of these are regression tests. +TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { + // Create switching network with three continuous variables and two discrete: + // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) + Switching s(3); + + // Check size of linearized factor graph + const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph; + EXPECT_LONGS_EQUAL(7, graph.size()); + + // Eliminate the graph + const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); + + const HybridValues delta = hybridBayesNet->optimize(); + const double error = graph.error(delta); + + // Check that the probability prime is the exponential of the error + EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7)); + + // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the + // same posterior from the graph. This is a sanity check that the elimination is done correctly. + const AlgebraicDecisionTree graphPosterior = graph.discretePosterior(delta.continuous()); + const AlgebraicDecisionTree bnPosterior = + hybridBayesNet->discretePosterior(delta.continuous()); EXPECT(assert_equal(graphPosterior, bnPosterior)); } @@ -466,51 +500,6 @@ TEST(HybridGaussianFactorGraph, Conditionals) { EXPECT(assert_equal(expected_discrete, result.discrete())); } -/* ****************************************************************************/ -// Test hybrid gaussian factor graph error and unnormalized probabilities -TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { - Switching s(3); - - HybridGaussianFactorGraph graph = s.linearizedFactorGraph; - - HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); - - const HybridValues delta = hybridBayesNet->optimize(); - const double error = graph.error(delta); - - // regression - EXPECT(assert_equal(1.58886, error, 1e-5)); - - // Real test: - EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7)); -} - -/* ****************************************************************************/ -// Test hybrid gaussian factor graph error and unnormalized probabilities -TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { - // Create switching network with three continuous variables and two discrete: - // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) - Switching s(3); - - const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph; - - const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); - - const HybridValues delta = hybridBayesNet->optimize(); - - // regression test for errorTree - std::vector leaves = {2.7916153, 1.5888555, 1.7233422, 1.6191947}; - AlgebraicDecisionTree expectedErrors(s.modes, leaves); - const auto error_tree = graph.errorTree(delta.continuous()); - EXPECT(assert_equal(expectedErrors, error_tree, 1e-7)); - - // regression test for discretePosterior - const AlgebraicDecisionTree expectedPosterior( - s.modes, std::vector{0.095516068, 0.31800092, 0.27798511, 0.3084979}); - auto posterior = graph.discretePosterior(delta.continuous()); - EXPECT(assert_equal(expectedPosterior, posterior, 1e-7)); -} - /* ****************************************************************************/ // Test hybrid gaussian factor graph errorTree during incremental operation TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { @@ -528,15 +517,11 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); + // Check discrete posterior at optimum HybridValues delta = hybridBayesNet->optimize(); - auto error_tree = graph.errorTree(delta.continuous()); - - std::vector discrete_keys = {m0, m1}; - std::vector leaves = {2.7916153, 1.5888555, 1.7233422, 1.6191947}; - AlgebraicDecisionTree expected_error(discrete_keys, leaves); - - // regression - EXPECT(assert_equal(expected_error, error_tree, 1e-7)); + AlgebraicDecisionTree graphPosterior = graph.discretePosterior(delta.continuous()); + AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(delta.continuous()); + EXPECT(assert_equal(graphPosterior, bnPosterior)); graph = HybridGaussianFactorGraph(); graph.push_back(*hybridBayesNet); @@ -547,13 +532,9 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { EXPECT_LONGS_EQUAL(7, hybridBayesNet->size()); delta = hybridBayesNet->optimize(); - auto error_tree2 = graph.errorTree(delta.continuous()); - - // regression - leaves = { - 0.50985198, 0.0097577296, 0.50009425, 0, 0.52922138, 0.029127133, 0.50985105, 0.0097567964}; - AlgebraicDecisionTree expected_error2(s.modes, leaves); - EXPECT(assert_equal(expected_error, error_tree, 1e-7)); + graphPosterior = graph.discretePosterior(delta.continuous()); + bnPosterior = hybridBayesNet->discretePosterior(delta.continuous()); + EXPECT(assert_equal(graphPosterior, bnPosterior)); } /* ****************************************************************************/ From bcd94e32a300efbcbeed1360f37a38482f653ecc Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 8 Oct 2024 16:14:56 +0900 Subject: [PATCH 15/18] Store negLogConstant[i] - negLogConstant_ --- gtsam/hybrid/HybridGaussianConditional.cpp | 33 ++++-- .../tests/testHybridGaussianConditional.cpp | 106 ++++++++---------- 2 files changed, 66 insertions(+), 73 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index ba2c34414b..4147af8cb4 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -33,6 +33,15 @@ namespace gtsam { /* *******************************************************************************/ +/** + * @brief Helper struct for constructing HybridGaussianConditional objects + * + * This struct contains the following fields: + * - nrFrontals: Optional size_t for number of frontal variables + * - pairs: FactorValuePairs for storing conditionals with their negLogConstant + * - conditionals: Conditionals for storing conditionals. TODO(frank): kill! + * - minNegLogConstant: minimum negLogConstant, computed here, subtracted in constructor + */ struct HybridGaussianConditional::Helper { std::optional nrFrontals; FactorValuePairs pairs; @@ -67,16 +76,12 @@ struct HybridGaussianConditional::Helper { /// Construct from tree of GaussianConditionals. explicit Helper(const Conditionals& conditionals) : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { - auto func = [this](const GC::shared_ptr& c) -> GaussianFactorValuePair { - double value = 0.0; - if (c) { - if (!nrFrontals.has_value()) { - nrFrontals = c->nrFrontals(); - } - value = c->negLogConstant(); - minNegLogConstant = std::min(minNegLogConstant, value); - } - return {std::dynamic_pointer_cast(c), value}; + auto func = [this](const GC::shared_ptr& gc) -> GaussianFactorValuePair { + if (!gc) return {nullptr, std::numeric_limits::infinity()}; + if (!nrFrontals) nrFrontals = gc->nrFrontals(); + double value = gc->negLogConstant(); + minNegLogConstant = std::min(minNegLogConstant, value); + return {gc, value}; }; pairs = FactorValuePairs(conditionals, func); if (!nrFrontals.has_value()) { @@ -90,7 +95,13 @@ struct HybridGaussianConditional::Helper { /* *******************************************************************************/ HybridGaussianConditional::HybridGaussianConditional(const DiscreteKeys& discreteParents, const Helper& helper) - : BaseFactor(discreteParents, helper.pairs), + : BaseFactor( + discreteParents, + FactorValuePairs( + helper.pairs, + [&](const GaussianFactorValuePair& pair) { // subtract minNegLogConstant + return GaussianFactorValuePair{pair.first, pair.second - helper.minNegLogConstant}; + })), BaseConditional(*helper.nrFrontals), conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 05f7c6c612..5631ac3219 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -18,6 +18,8 @@ * @date December 2021 */ +#include +#include #include #include #include @@ -28,9 +30,6 @@ #include #include -#include "gtsam/discrete/DecisionTree.h" -#include "gtsam/discrete/DiscreteKey.h" - // Include for test suite #include @@ -52,10 +51,8 @@ namespace equal_constants { // Create a simple HybridGaussianConditional const double commonSigma = 2.0; const std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), - commonSigma), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), - commonSigma)}; + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace equal_constants @@ -80,8 +77,8 @@ TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->logProbability(vv), - hybrid_conditional.logProbability(hv), 1e-8); + EXPECT_DOUBLES_EQUAL( + conditionals[mode]->logProbability(vv), hybrid_conditional.logProbability(hv), 1e-8); } } @@ -93,8 +90,7 @@ TEST(HybridGaussianConditional, Error) { // Check result. DiscreteKeys discrete_keys{mode}; - std::vector leaves = {conditionals[0]->error(vv), - conditionals[1]->error(vv)}; + std::vector leaves = {conditionals[0]->error(vv), conditionals[1]->error(vv)}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -102,8 +98,7 @@ TEST(HybridGaussianConditional, Error) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), - hybrid_conditional.error(hv), 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), hybrid_conditional.error(hv), 1e-8); } } @@ -118,10 +113,8 @@ TEST(HybridGaussianConditional, Likelihood) { // Check that the hybrid conditional error and the likelihood error are the // same. - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), - 1e-8); - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), - 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), 1e-8); // Check that likelihood error is as expected, i.e., just the errors of the // individual likelihoods, in the `equal_constants` case. @@ -135,8 +128,7 @@ TEST(HybridGaussianConditional, Likelihood) { std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = - std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); + ratio[mode] = std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -146,10 +138,8 @@ namespace mode_dependent_constants { // Create a HybridGaussianConditional with mode-dependent noise models. // 0 is low-noise, 1 is high-noise. const std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), - 0.5), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), - 3.0)}; + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 0.5), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace mode_dependent_constants @@ -181,9 +171,8 @@ TEST(HybridGaussianConditional, Error2) { // Expected error is e(X) + log(sqrt(|2πΣ|)). // We normalize log(sqrt(|2πΣ|)) with min(negLogConstant) // so it is non-negative. - std::vector leaves = { - conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, - conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; + std::vector leaves = {conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, + conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -191,10 +180,10 @@ TEST(HybridGaussianConditional, Error2) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + - conditionals[mode]->negLogConstant() - - minErrorConstant, - hybrid_conditional.error(hv), 1e-8); + EXPECT_DOUBLES_EQUAL( + conditionals[mode]->error(vv) + conditionals[mode]->negLogConstant() - minErrorConstant, + hybrid_conditional.error(hv), + 1e-8); } } @@ -209,10 +198,8 @@ TEST(HybridGaussianConditional, Likelihood2) { // Check that the hybrid conditional error and the likelihood error are as // expected, this invariant is the same as the equal noise case: - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), - 1e-8); - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), - 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), 1e-8); // Check the detailed JacobianFactor calculation for mode==1. { @@ -221,34 +208,18 @@ TEST(HybridGaussianConditional, Likelihood2) { const auto jf1 = std::dynamic_pointer_cast(gf1); CHECK(jf1); - // It has 2 rows, not 1! - CHECK(jf1->rows() == 2); - - // Check that the constant C1 is properly encoded in the JacobianFactor. - const double C1 = - conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant(); - const double c1 = std::sqrt(2.0 * C1); - Vector expected_unwhitened(2); - expected_unwhitened << 4.9 - 5.0, -c1; - Vector actual_unwhitened = jf1->unweighted_error(vv); - EXPECT(assert_equal(expected_unwhitened, actual_unwhitened)); - - // Make sure the noise model does not touch it. - Vector expected_whitened(2); - expected_whitened << (4.9 - 5.0) / 3.0, -c1; - Vector actual_whitened = jf1->error_vector(vv); - EXPECT(assert_equal(expected_whitened, actual_whitened)); - - // Check that the error is equal to the conditional error: - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), jf1->error(hv1), 1e-8); + // Check that the JacobianFactor error with constants is equal to the conditional error: + EXPECT_DOUBLES_EQUAL( + hybrid_conditional.error(hv1), + jf1->error(hv1) + conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant(), + 1e-8); } // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = - std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); + ratio[mode] = std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -261,8 +232,7 @@ TEST(HybridGaussianConditional, Prune) { DiscreteKeys modes{{M(1), 2}, {M(2), 2}}; std::vector gcs; for (size_t i = 0; i < 4; i++) { - gcs.push_back( - GaussianConditional::sharedMeanAndStddev(Z(0), Vector1(i + 1), i + 1)); + gcs.push_back(GaussianConditional::sharedMeanAndStddev(Z(0), Vector1(i + 1), i + 1)); } auto empty = std::make_shared(); HybridGaussianConditional::Conditionals conditionals(modes, gcs); @@ -282,8 +252,14 @@ TEST(HybridGaussianConditional, Prune) { } } { - const std::vector potentials{0, 0, 0.5, 0, // - 0, 0, 0.5, 0}; + const std::vector potentials{0, + 0, + 0.5, + 0, // + 0, + 0, + 0.5, + 0}; const DecisionTreeFactor decisionTreeFactor(keys, potentials); const auto pruned = hgc.prune(decisionTreeFactor); @@ -292,8 +268,14 @@ TEST(HybridGaussianConditional, Prune) { EXPECT_LONGS_EQUAL(2, pruned->nrComponents()); } { - const std::vector potentials{0.2, 0, 0.3, 0, // - 0, 0, 0.5, 0}; + const std::vector potentials{0.2, + 0, + 0.3, + 0, // + 0, + 0, + 0.5, + 0}; const DecisionTreeFactor decisionTreeFactor(keys, potentials); const auto pruned = hgc.prune(decisionTreeFactor); From 0f48efb0a9d8949af1ac1c0c5058f4becba111ee Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 8 Oct 2024 16:34:50 +0900 Subject: [PATCH 16/18] asProductFactor from base class! --- gtsam/hybrid/HybridGaussianConditional.cpp | 30 ------- gtsam/hybrid/HybridGaussianConditional.h | 3 - gtsam/hybrid/tests/testHybridMotionModel.cpp | 88 ++++++++++---------- 3 files changed, 44 insertions(+), 77 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 4147af8cb4..c3c1b893ec 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -150,36 +150,6 @@ const HybridGaussianConditional::Conditionals& HybridGaussianConditional::condit return conditionals_; } -/* *******************************************************************************/ -HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() const { - auto wrap = [this](const std::shared_ptr& gc) - -> std::pair { - // First check if conditional has not been pruned - if (gc) { - const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_; - // If there is a difference in the covariances, we need to account for - // that since the error is dependent on the mode. - if (Cgm_Kgcm > 0.0) { - // We add a constant factor which will be used when computing - // the probability of the discrete variables. - Vector c(1); - c << std::sqrt(2.0 * Cgm_Kgcm); - auto constantFactor = std::make_shared(c); - return {GaussianFactorGraph{gc, constantFactor}, Cgm_Kgcm}; - } else { - // The scalar can be zero. - // TODO(Frank): after hiding is gone, this should be only case here. - return {GaussianFactorGraph{gc}, Cgm_Kgcm}; - } - } else { - // If the conditional is pruned, return an empty GaussianFactorGraph with zero scalar sum - // TODO(Frank): Could we just return an *empty* GaussianFactorGraph? - return {GaussianFactorGraph{nullptr}, 0.0}; - } - }; - return {{conditionals_, wrap}}; -} - /* *******************************************************************************/ size_t HybridGaussianConditional::nrComponents() const { size_t total = 0; diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index 3e6574abc4..25391cb837 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -222,9 +222,6 @@ class GTSAM_EXPORT HybridGaussianConditional HybridGaussianConditional::shared_ptr prune( const DecisionTreeFactor &discreteProbs) const; - /// Convert to a DecisionTree of Gaussian factor graphs. - HybridGaussianProductFactor asProductFactor() const override; - /// @} private: diff --git a/gtsam/hybrid/tests/testHybridMotionModel.cpp b/gtsam/hybrid/tests/testHybridMotionModel.cpp index cbfdc7fe4e..bcffbe6282 100644 --- a/gtsam/hybrid/tests/testHybridMotionModel.cpp +++ b/gtsam/hybrid/tests/testHybridMotionModel.cpp @@ -46,25 +46,25 @@ using symbol_shorthand::Z; DiscreteKey m1(M(1), 2); -void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { +void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); - hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, - -I_1x1, measurement_model); + hbn.emplace_shared( + z_key, Vector1(0.0), I_1x1, x_key, -I_1x1, measurement_model); } /// Create hybrid motion model p(x1 | x0, m1) -static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( - double mu0, double mu1, double sigma0, double sigma1) { +static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(double mu0, + double mu1, + double sigma0, + double sigma1) { std::vector> motionModels{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; - return std::make_shared(m1, X(1), I_1x1, X(0), - motionModels); + return std::make_shared(m1, X(1), I_1x1, X(0), motionModels); } /// Create two state Bayes network with 1 or two measurement models -HybridBayesNet CreateBayesNet( - const HybridGaussianConditional::shared_ptr &hybridMotionModel, - bool add_second_measurement = false) { +HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr& hybridMotionModel, + bool add_second_measurement = false) { HybridBayesNet hbn; // Add measurement model p(z0 | x0) @@ -86,15 +86,16 @@ HybridBayesNet CreateBayesNet( /// Approximate the discrete marginal P(m1) using importance sampling std::pair approximateDiscreteMarginal( - const HybridBayesNet &hbn, - const HybridGaussianConditional::shared_ptr &hybridMotionModel, - const VectorValues &given, size_t N = 100000) { + const HybridBayesNet& hbn, + const HybridGaussianConditional::shared_ptr& hybridMotionModel, + const VectorValues& given, + size_t N = 100000) { /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), /// using q(x0) = N(z0, sigmaQ) to sample x0. HybridBayesNet q; q.push_back(hybridMotionModel); // Add hybrid motion model q.emplace_shared(GaussianConditional::FromMeanAndStddev( - X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 + X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 q.emplace_shared(m1, "50/50"); // Discrete prior. // Do importance sampling @@ -192,24 +193,25 @@ TEST(HybridGaussianFactor, TwoStateModel2) { HybridBayesNet hbn = CreateBayesNet(hybridMotionModel); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); - // Check that ratio of Bayes net and factor graph for different modes is - // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential(); + + for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN - HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } + const auto& expectedDiscretePosterior = hbn.discretePosterior(vv); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // Equality of posteriors asserts that the factor graph is correct (same ratios for all modes) + EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + + // This one asserts that HBN resulting from elimination is correct. + EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv))); + } // Importance sampling run with 100k samples gives 50.095/49.905 // approximateDiscreteMarginal(hbn, hybridMotionModel, given); // Since no measurement on x1, we a 50/50 probability - auto p_m = bn->at(2)->asDiscrete(); + auto p_m = eliminated->at(2)->asDiscrete(); EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9); EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9); } @@ -221,24 +223,26 @@ TEST(HybridGaussianFactor, TwoStateModel2) { HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true); HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); + HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential(); // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN - HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); - } + const auto& expectedDiscretePosterior = hbn.discretePosterior(vv); - HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); + // Equality of posteriors asserts that the factor graph is correct (same ratios for all modes) + EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + + // This one asserts that HBN resulting from elimination is correct. + EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv))); + } // Values taken from an importance sampling run with 100k samples: // approximateDiscreteMarginal(hbn, hybridMotionModel, given); DiscreteConditional expected(m1, "48.3158/51.6842"); - EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002)); + EXPECT(assert_equal(expected, *(eliminated->at(2)->asDiscrete()), 0.002)); } { @@ -286,13 +290,11 @@ TEST(HybridGaussianFactor, TwoStateModel3) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); @@ -316,13 +318,11 @@ TEST(HybridGaussianFactor, TwoStateModel3) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : - {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), - gfg.error(hv1) / hbn.error(hv1), 1e-9); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); From 524161403a3d626c41f8f797bc74b596e128354e Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 8 Oct 2024 17:09:47 +0900 Subject: [PATCH 17/18] Fixed discreteEimination --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 6 +-- gtsam/hybrid/tests/testGaussianMixture.cpp | 54 +++++++++++---------- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 - 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index c2d84fdb83..f6b3a9eb75 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -229,12 +229,12 @@ static std::pair> discret // In this case, compute discrete probabilities. // TODO(frank): What about the scalar!? auto potential = [&](const auto& pair) -> double { - auto [factor, _] = pair; + auto [factor, scalar] = pair; // If the factor is null, it has been pruned, hence return potential of zero if (!factor) - return 0; + return 0.0; else - return exp(-factor->error(kEmpty)); + return exp(-scalar - factor->error(kEmpty)); }; DecisionTree potentials(gmf->factors(), potential); dfg.emplace_shared(gmf->discreteKeys(), potentials); diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 3256f5686c..6bd9a22a87 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -40,8 +40,7 @@ const DiscreteKey m(M(0), 2); const DiscreteValues m1Assignment{{M(0), 1}}; // Define a 50/50 prior on the mode -DiscreteConditional::shared_ptr mixing = - std::make_shared(m, "60/40"); +DiscreteConditional::shared_ptr mixing = std::make_shared(m, "60/40"); /// Gaussian density function double Gaussian(double mu, double sigma, double z) { @@ -53,24 +52,12 @@ double Gaussian(double mu, double sigma, double z) { * If sigma0 == sigma1, it simplifies to a sigmoid function. * Hardcodes 60/40 prior on mode. */ -double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, - double z) { +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z) { const double p0 = 0.6 * Gaussian(mu0, sigma0, z); const double p1 = 0.4 * Gaussian(mu1, sigma1, z); return p1 / (p0 + p1); }; -/// Given \phi(m;z)\phi(m) use eliminate to obtain P(m|z). -DiscreteConditional SolveHFG(const HybridGaussianFactorGraph &hfg) { - return *hfg.eliminateSequential()->at(0)->asDiscrete(); -} - -/// Given p(z,m) and z, convert to HFG and solve. -DiscreteConditional SolveHBN(const HybridBayesNet &hbn, double z) { - VectorValues given{{Z(0), Vector1(z)}}; - return SolveHFG(hbn.toFactorGraph(given)); -} - /* * Test a Gaussian Mixture Model P(m)p(z|m) with same sigma. * The posterior, as a function of z, should be a sigmoid function. @@ -81,14 +68,14 @@ TEST(GaussianMixture, GaussianMixtureModel) { // Create a Gaussian mixture model p(z|m) with same sigma. HybridBayesNet gmm; - std::vector> parameters{{Vector1(mu0), sigma}, - {Vector1(mu1), sigma}}; + std::vector> parameters{{Vector1(mu0), sigma}, {Vector1(mu1), sigma}}; gmm.emplace_shared(m, Z(0), parameters); gmm.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; - auto pMid = SolveHBN(gmm, midway); + auto eliminationResult = gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential(); + auto pMid = *eliminationResult->at(0)->asDiscrete(); EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid)); // Everywhere else, the result should be a sigmoid. @@ -97,7 +84,8 @@ TEST(GaussianMixture, GaussianMixtureModel) { const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); // Workflow 1: convert HBN to HFG and solve - auto posterior1 = SolveHBN(gmm, z); + auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); + auto posterior1 = *eliminationResult1->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); // Workflow 2: directly specify HFG and solve @@ -105,7 +93,8 @@ TEST(GaussianMixture, GaussianMixtureModel) { hfg1.emplace_shared( m, std::vector{Gaussian(mu0, sigma, z), Gaussian(mu1, sigma, z)}); hfg1.push_back(mixing); - auto posterior2 = SolveHFG(hfg1); + auto eliminationResult2 = hfg1.eliminateSequential(); + auto posterior2 = *eliminationResult2->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8); } } @@ -120,15 +109,27 @@ TEST(GaussianMixture, GaussianMixtureModel2) { // Create a Gaussian mixture model p(z|m) with same sigma. HybridBayesNet gmm; - std::vector> parameters{{Vector1(mu0), sigma0}, - {Vector1(mu1), sigma1}}; + std::vector> parameters{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; gmm.emplace_shared(m, Z(0), parameters); gmm.push_back(mixing); // We get zMax=3.1333 by finding the maximum value of the function, at which // point the mode m==1 is about twice as probable as m==0. double zMax = 3.133; - auto pMax = SolveHBN(gmm, zMax); + const VectorValues vv{{Z(0), Vector1(zMax)}}; + auto gfg = gmm.toFactorGraph(vv); + + // Equality of posteriors asserts that the elimination is correct (same ratios for all modes) + const auto& expectedDiscretePosterior = gmm.discretePosterior(vv); + EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + + // Eliminate the graph! + auto eliminationResultMax = gfg.eliminateSequential(); + + // Equality of posteriors asserts that the elimination is correct (same ratios for all modes) + EXPECT(assert_equal(expectedDiscretePosterior, eliminationResultMax->discretePosterior(vv))); + + auto pMax = *eliminationResultMax->at(0)->asDiscrete(); EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4)); // Everywhere else, the result should be a bell curve like function. @@ -137,7 +138,8 @@ TEST(GaussianMixture, GaussianMixtureModel2) { const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); // Workflow 1: convert HBN to HFG and solve - auto posterior1 = SolveHBN(gmm, z); + auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); + auto posterior1 = *eliminationResult1->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); // Workflow 2: directly specify HFG and solve @@ -145,11 +147,11 @@ TEST(GaussianMixture, GaussianMixtureModel2) { hfg.emplace_shared( m, std::vector{Gaussian(mu0, sigma0, z), Gaussian(mu1, sigma1, z)}); hfg.push_back(mixing); - auto posterior2 = SolveHFG(hfg); + auto eliminationResult2 = hfg.eliminateSequential(); + auto posterior2 = *eliminationResult2->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior2(m1Assignment), 1e-8); } } - /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 88d8be0bc5..b2a909ac31 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -518,8 +518,6 @@ TEST(HybridEstimation, CorrectnessViaSampling) { // the normalizing term computed via the Bayes net determinant. const HybridValues sample = bn->sample(&rng); double expected_ratio = compute_ratio(sample); - // regression - EXPECT_DOUBLES_EQUAL(0.728588, expected_ratio, 1e-6); // 3. Do sampling constexpr int num_samples = 10; From 9f7ccbb33c29a69459054ddfa0e61ca0abcd4526 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Tue, 8 Oct 2024 17:57:53 +0900 Subject: [PATCH 18/18] Minimize formatting changes --- gtsam/hybrid/HybridGaussianConditional.cpp | 154 +++++++++--------- gtsam/hybrid/HybridGaussianFactor.cpp | 54 +++--- gtsam/hybrid/HybridGaussianFactor.h | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 149 +++++++++-------- gtsam/hybrid/HybridGaussianProductFactor.cpp | 9 +- gtsam/hybrid/HybridGaussianProductFactor.h | 30 ++-- gtsam/hybrid/HybridNonlinearFactorGraph.cpp | 6 +- gtsam/hybrid/HybridNonlinearFactorGraph.h | 4 +- gtsam/hybrid/tests/testGaussianMixture.cpp | 30 ++-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 88 ++++++---- gtsam/hybrid/tests/testHybridBayesTree.cpp | 46 +++--- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 - .../tests/testHybridGaussianConditional.cpp | 87 +++++----- .../tests/testHybridGaussianFactorGraph.cpp | 152 +++++++++-------- .../tests/testHybridGaussianProductFactor.cpp | 7 +- gtsam/hybrid/tests/testHybridMotionModel.cpp | 73 +++++---- .../tests/testHybridNonlinearFactorGraph.cpp | 1 - 17 files changed, 509 insertions(+), 385 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index c3c1b893ec..4dcb840341 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -40,7 +40,8 @@ namespace gtsam { * - nrFrontals: Optional size_t for number of frontal variables * - pairs: FactorValuePairs for storing conditionals with their negLogConstant * - conditionals: Conditionals for storing conditionals. TODO(frank): kill! - * - minNegLogConstant: minimum negLogConstant, computed here, subtracted in constructor + * - minNegLogConstant: minimum negLogConstant, computed here, subtracted in + * constructor */ struct HybridGaussianConditional::Helper { std::optional nrFrontals; @@ -53,7 +54,7 @@ struct HybridGaussianConditional::Helper { /// Construct from a vector of mean and sigma pairs, plus extra args. template - explicit Helper(const DiscreteKey& mode, const P& p, Args&&... args) { + explicit Helper(const DiscreteKey &mode, const P &p, Args &&...args) { nrFrontals = 1; minNegLogConstant = std::numeric_limits::infinity(); @@ -61,8 +62,9 @@ struct HybridGaussianConditional::Helper { std::vector gcs; fvs.reserve(p.size()); gcs.reserve(p.size()); - for (auto&& [mean, sigma] : p) { - auto gaussianConditional = GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); + for (auto &&[mean, sigma] : p) { + auto gaussianConditional = + GC::sharedMeanAndStddev(std::forward(args)..., mean, sigma); double value = gaussianConditional->negLogConstant(); minNegLogConstant = std::min(minNegLogConstant, value); fvs.emplace_back(gaussianConditional, value); @@ -74,8 +76,9 @@ struct HybridGaussianConditional::Helper { } /// Construct from tree of GaussianConditionals. - explicit Helper(const Conditionals& conditionals) - : conditionals(conditionals), minNegLogConstant(std::numeric_limits::infinity()) { + explicit Helper(const Conditionals &conditionals) + : conditionals(conditionals), + minNegLogConstant(std::numeric_limits::infinity()) { auto func = [this](const GC::shared_ptr& gc) -> GaussianFactorValuePair { if (!gc) return {nullptr, std::numeric_limits::infinity()}; if (!nrFrontals) nrFrontals = gc->nrFrontals(); @@ -93,67 +96,63 @@ struct HybridGaussianConditional::Helper { }; /* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional(const DiscreteKeys& discreteParents, - const Helper& helper) - : BaseFactor( - discreteParents, - FactorValuePairs( - helper.pairs, - [&](const GaussianFactorValuePair& pair) { // subtract minNegLogConstant - return GaussianFactorValuePair{pair.first, pair.second - helper.minNegLogConstant}; - })), +HybridGaussianConditional::HybridGaussianConditional( + const DiscreteKeys& discreteParents, const Helper& helper) + : BaseFactor(discreteParents, + FactorValuePairs(helper.pairs, + [&](const GaussianFactorValuePair& + pair) { // subtract minNegLogConstant + return GaussianFactorValuePair{ + pair.first, + pair.second - helper.minNegLogConstant}; + })), BaseConditional(*helper.nrFrontals), conditionals_(helper.conditionals), negLogConstant_(helper.minNegLogConstant) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - const std::vector& conditionals) + const DiscreteKey &discreteParent, + const std::vector &conditionals) : HybridGaussianConditional(DiscreteKeys{discreteParent}, Conditionals({discreteParent}, conditionals)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - Key key, // - const std::vector>& parameters) + const DiscreteKey &discreteParent, Key key, // + const std::vector> ¶meters) : HybridGaussianConditional(DiscreteKeys{discreteParent}, Helper(discreteParent, parameters, key)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - Key key, // - const Matrix& A, - Key parent, - const std::vector>& parameters) - : HybridGaussianConditional(DiscreteKeys{discreteParent}, - Helper(discreteParent, parameters, key, A, parent)) {} + const DiscreteKey &discreteParent, Key key, // + const Matrix &A, Key parent, + const std::vector> ¶meters) + : HybridGaussianConditional( + DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A, parent)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKey& discreteParent, - Key key, // - const Matrix& A1, - Key parent1, - const Matrix& A2, - Key parent2, - const std::vector>& parameters) - : HybridGaussianConditional(DiscreteKeys{discreteParent}, - Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) { -} + const DiscreteKey &discreteParent, Key key, // + const Matrix &A1, Key parent1, const Matrix &A2, Key parent2, + const std::vector> ¶meters) + : HybridGaussianConditional( + DiscreteKeys{discreteParent}, + Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {} HybridGaussianConditional::HybridGaussianConditional( - const DiscreteKeys& discreteParents, - const HybridGaussianConditional::Conditionals& conditionals) + const DiscreteKeys &discreteParents, + const HybridGaussianConditional::Conditionals &conditionals) : HybridGaussianConditional(discreteParents, Helper(conditionals)) {} /* *******************************************************************************/ -const HybridGaussianConditional::Conditionals& HybridGaussianConditional::conditionals() const { +const HybridGaussianConditional::Conditionals & +HybridGaussianConditional::conditionals() const { return conditionals_; } /* *******************************************************************************/ size_t HybridGaussianConditional::nrComponents() const { size_t total = 0; - conditionals_.visit([&total](const GaussianFactor::shared_ptr& node) { + conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) { if (node) total += 1; }); return total; @@ -161,19 +160,21 @@ size_t HybridGaussianConditional::nrComponents() const { /* *******************************************************************************/ GaussianConditional::shared_ptr HybridGaussianConditional::choose( - const DiscreteValues& discreteValues) const { - auto& ptr = conditionals_(discreteValues); + const DiscreteValues &discreteValues) const { + auto &ptr = conditionals_(discreteValues); if (!ptr) return nullptr; auto conditional = std::dynamic_pointer_cast(ptr); if (conditional) return conditional; else - throw std::logic_error("A HybridGaussianConditional unexpectedly contained a non-conditional"); + throw std::logic_error( + "A HybridGaussianConditional unexpectedly contained a non-conditional"); } /* *******************************************************************************/ -bool HybridGaussianConditional::equals(const HybridFactor& lf, double tol) const { - const This* e = dynamic_cast(&lf); +bool HybridGaussianConditional::equals(const HybridFactor &lf, + double tol) const { + const This *e = dynamic_cast(&lf); if (e == nullptr) return false; // This will return false if either conditionals_ is empty or e->conditionals_ @@ -182,26 +183,27 @@ bool HybridGaussianConditional::equals(const HybridFactor& lf, double tol) const // Check the base and the factors: return BaseFactor::equals(*e, tol) && - conditionals_.equals(e->conditionals_, [tol](const auto& f1, const auto& f2) { - return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); - }); + conditionals_.equals( + e->conditionals_, [tol](const auto &f1, const auto &f2) { + return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol)); + }); } /* *******************************************************************************/ -void HybridGaussianConditional::print(const std::string& s, const KeyFormatter& formatter) const { +void HybridGaussianConditional::print(const std::string &s, + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); BaseConditional::print("", formatter); std::cout << " Discrete Keys = "; - for (auto& dk : discreteKeys()) { + for (auto &dk : discreteKeys()) { std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } std::cout << std::endl << " logNormalizationConstant: " << -negLogConstant() << std::endl << std::endl; conditionals_.print( - "", - [&](Key k) { return formatter(k); }, - [&](const GaussianConditional::shared_ptr& gf) -> std::string { + "", [&](Key k) { return formatter(k); }, + [&](const GaussianConditional::shared_ptr &gf) -> std::string { RedirectCout rd; if (gf && !gf->empty()) { gf->print("", formatter); @@ -218,19 +220,20 @@ KeyVector HybridGaussianConditional::continuousParents() const { const auto range = parents(); KeyVector continuousParentKeys(range.begin(), range.end()); // Loop over all discrete keys: - for (const auto& discreteKey : discreteKeys()) { + for (const auto &discreteKey : discreteKeys()) { const Key key = discreteKey.first; // remove that key from continuousParentKeys: - continuousParentKeys.erase( - std::remove(continuousParentKeys.begin(), continuousParentKeys.end(), key), + continuousParentKeys.erase(std::remove(continuousParentKeys.begin(), + continuousParentKeys.end(), key), continuousParentKeys.end()); } return continuousParentKeys; } /* ************************************************************************* */ -bool HybridGaussianConditional::allFrontalsGiven(const VectorValues& given) const { - for (auto&& kv : given) { +bool HybridGaussianConditional::allFrontalsGiven( + const VectorValues &given) const { + for (auto &&kv : given) { if (given.find(kv.first) == given.end()) { return false; } @@ -240,7 +243,7 @@ bool HybridGaussianConditional::allFrontalsGiven(const VectorValues& given) cons /* ************************************************************************* */ std::shared_ptr HybridGaussianConditional::likelihood( - const VectorValues& given) const { + const VectorValues &given) const { if (!allFrontalsGiven(given)) { throw std::runtime_error( "HybridGaussianConditional::likelihood: given values are missing some " @@ -251,29 +254,32 @@ std::shared_ptr HybridGaussianConditional::likelihood( const KeyVector continuousParentKeys = continuousParents(); const HybridGaussianFactor::FactorValuePairs likelihoods( conditionals_, - [&](const GaussianConditional::shared_ptr& conditional) -> GaussianFactorValuePair { + [&](const GaussianConditional::shared_ptr &conditional) + -> GaussianFactorValuePair { const auto likelihood_m = conditional->likelihood(given); const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; return {likelihood_m, Cgm_Kgcm}; }); - return std::make_shared(discreteParentKeys, likelihoods); + return std::make_shared(discreteParentKeys, + likelihoods); } /* ************************************************************************* */ -std::set DiscreteKeysAsSet(const DiscreteKeys& discreteKeys) { +std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) { std::set s(discreteKeys.begin(), discreteKeys.end()); return s; } /* *******************************************************************************/ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune( - const DecisionTreeFactor& discreteProbs) const { + const DecisionTreeFactor &discreteProbs) const { // Find keys in discreteProbs.keys() but not in this->keys(): std::set mine(this->keys().begin(), this->keys().end()); - std::set theirs(discreteProbs.keys().begin(), discreteProbs.keys().end()); + std::set theirs(discreteProbs.keys().begin(), + discreteProbs.keys().end()); std::vector diff; - std::set_difference( - theirs.begin(), theirs.end(), mine.begin(), mine.end(), std::back_inserter(diff)); + std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(), + std::back_inserter(diff)); // Find maximum probability value for every combination of our keys. Ordering keys(diff); @@ -281,24 +287,26 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune( // Check the max value for every combination of our keys. // If the max value is 0.0, we can prune the corresponding conditional. - auto pruner = - [&](const Assignment& choices, - const GaussianConditional::shared_ptr& conditional) -> GaussianConditional::shared_ptr { + auto pruner = [&](const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { return (max->evaluate(choices) == 0.0) ? nullptr : conditional; }; auto pruned_conditionals = conditionals_.apply(pruner); - return std::make_shared(discreteKeys(), pruned_conditionals); + return std::make_shared(discreteKeys(), + pruned_conditionals); } /* *******************************************************************************/ -double HybridGaussianConditional::logProbability(const HybridValues& values) const { +double HybridGaussianConditional::logProbability( + const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->logProbability(values.continuous()); } /* *******************************************************************************/ -double HybridGaussianConditional::evaluate(const HybridValues& values) const { +double HybridGaussianConditional::evaluate(const HybridValues &values) const { auto conditional = conditionals_(values.discrete()); return conditional->evaluate(values.continuous()); } diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index 44a56910ed..afc818c4a7 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -50,12 +50,15 @@ struct HybridGaussianFactor::ConstructorHelper { } // Build the FactorValuePairs DecisionTree pairs = FactorValuePairs( - DecisionTree(discreteKeys, factors), [](const auto& f) { - return std::pair{f, f ? 0.0 : std::numeric_limits::infinity()}; + DecisionTree(discreteKeys, factors), + [](const auto& f) { + return std::pair{f, + f ? 0.0 : std::numeric_limits::infinity()}; }); } - /// Constructor for a single discrete key and a vector of GaussianFactorValuePairs + /// Constructor for a single discrete key and a vector of + /// GaussianFactorValuePairs ConstructorHelper(const DiscreteKey& discreteKey, const std::vector& factorPairs) : discreteKeys({discreteKey}) { @@ -71,8 +74,10 @@ struct HybridGaussianFactor::ConstructorHelper { pairs = FactorValuePairs(discreteKeys, factorPairs); } - /// Constructor for a vector of discrete keys and a vector of GaussianFactorValuePairs - ConstructorHelper(const DiscreteKeys& discreteKeys, const FactorValuePairs& factorPairs) + /// Constructor for a vector of discrete keys and a vector of + /// GaussianFactorValuePairs + ConstructorHelper(const DiscreteKeys& discreteKeys, + const FactorValuePairs& factorPairs) : discreteKeys(discreteKeys) { // Extract continuous keys from the first non-null factor // TODO: just stop after first non-null factor @@ -88,24 +93,27 @@ struct HybridGaussianFactor::ConstructorHelper { }; /* *******************************************************************************/ -HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper& helper) - : Base(helper.continuousKeys, helper.discreteKeys), factors_(helper.pairs) {} +HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper) + : Base(helper.continuousKeys, helper.discreteKeys), + factors_(helper.pairs) {} HybridGaussianFactor::HybridGaussianFactor( - const DiscreteKey& discreteKey, const std::vector& factorPairs) - : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} + const DiscreteKey &discreteKey, + const std::vector &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKey, factors)) {} -HybridGaussianFactor::HybridGaussianFactor(const DiscreteKey& discreteKey, - const std::vector& factorPairs) +HybridGaussianFactor::HybridGaussianFactor( + const DiscreteKey &discreteKey, + const std::vector &factorPairs) : HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {} -HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys& discreteKeys, - const FactorValuePairs& factorPairs) - : HybridGaussianFactor(ConstructorHelper(discreteKeys, factorPairs)) {} +HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys, + const FactorValuePairs &factors) + : HybridGaussianFactor(ConstructorHelper(discreteKeys, factors)) {} /* *******************************************************************************/ -bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const { - const This* e = dynamic_cast(&lf); +bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const { + const This *e = dynamic_cast(&lf); if (e == nullptr) return false; // This will return false if either factors_ is empty or e->factors_ is @@ -122,7 +130,8 @@ bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const { } /* *******************************************************************************/ -void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& formatter) const { +void HybridGaussianFactor::print(const std::string &s, + const KeyFormatter &formatter) const { std::cout << (s.empty() ? "" : s + "\n"); HybridFactor::print("", formatter); std::cout << "{\n"; @@ -148,7 +157,8 @@ void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& forma } /* *******************************************************************************/ -GaussianFactorValuePair HybridGaussianFactor::operator()(const DiscreteValues& assignment) const { +GaussianFactorValuePair HybridGaussianFactor::operator()( + const DiscreteValues &assignment) const { return factors_(assignment); } @@ -156,15 +166,17 @@ GaussianFactorValuePair HybridGaussianFactor::operator()(const DiscreteValues& a HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const { // Implemented by creating a new DecisionTree where: // - The structure (keys and assignments) is preserved from factors_ - // - Each leaf converted to a GaussianFactorGraph with just the factor and its scalar. - return {{factors_, [](const auto& pair) -> std::pair { + // - Each leaf converted to a GaussianFactorGraph with just the factor and its + // scalar. + return {{factors_, + [](const auto& pair) -> std::pair { return {GaussianFactorGraph{pair.first}, pair.second}; }}}; } /* *******************************************************************************/ AlgebraicDecisionTree HybridGaussianFactor::errorTree( - const VectorValues& continuousValues) const { + const VectorValues &continuousValues) const { // functor to convert from sharedFactor to double error value. auto errorFunc = [&continuousValues](const auto& pair) { return pair.first ? pair.first->error(continuousValues) + pair.second diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 08debcf48f..48a8bf48de 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -191,4 +191,4 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { template <> struct traits : public Testable {}; -} // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 2d90f2dea9..26cdb8dd2c 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -55,21 +55,23 @@ template class EliminateableFactorGraph; using std::dynamic_pointer_cast; using OrphanWrapper = BayesTreeOrphanWrapper; -using Result = std::pair, GaussianFactor::shared_ptr>; +using Result = + std::pair, GaussianFactor::shared_ptr>; using ResultTree = DecisionTree>; static const VectorValues kEmpty; /* ************************************************************************ */ // Throw a runtime exception for method specified in string s, and factor f: -static void throwRuntimeError(const std::string& s, const std::shared_ptr& f) { +static void throwRuntimeError(const std::string& s, + const std::shared_ptr& f) { auto& fr = *f; - throw std::runtime_error(s + " not implemented for factor type " + demangle(typeid(fr).name()) + - "."); + throw std::runtime_error(s + " not implemented for factor type " + + demangle(typeid(fr).name()) + "."); } /* ************************************************************************ */ -const Ordering HybridOrdering(const HybridGaussianFactorGraph& graph) { +const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) { KeySet discrete_keys = graph.discreteKeySet(); const VariableIndex index(graph); return Ordering::ColamdConstrainedLast( @@ -77,20 +79,21 @@ const Ordering HybridOrdering(const HybridGaussianFactorGraph& graph) { } /* ************************************************************************ */ -static void printFactor(const std::shared_ptr& factor, - const DiscreteValues& assignment, - const KeyFormatter& keyFormatter) { - if (auto hgf = std::dynamic_pointer_cast(factor)) { +static void printFactor(const std::shared_ptr &factor, + const DiscreteValues &assignment, + const KeyFormatter &keyFormatter) { + if (auto hgf = dynamic_pointer_cast(factor)) { if (assignment.empty()) hgf->print("HybridGaussianFactor:", keyFormatter); else - hgf->operator()(assignment).first->print("HybridGaussianFactor, component:", keyFormatter); - } else if (auto gf = std::dynamic_pointer_cast(factor)) { + hgf->operator()(assignment) + .first->print("HybridGaussianFactor, component:", keyFormatter); + } else if (auto gf = dynamic_pointer_cast(factor)) { factor->print("GaussianFactor:\n", keyFormatter); - } else if (auto df = std::dynamic_pointer_cast(factor)) { + } else if (auto df = dynamic_pointer_cast(factor)) { factor->print("DiscreteFactor:\n", keyFormatter); - } else if (auto hc = std::dynamic_pointer_cast(factor)) { + } else if (auto hc = dynamic_pointer_cast(factor)) { if (hc->isContinuous()) { factor->print("GaussianConditional:\n", keyFormatter); } else if (hc->isDiscrete()) { @@ -99,7 +102,9 @@ static void printFactor(const std::shared_ptr& factor, if (assignment.empty()) hc->print("HybridConditional:", keyFormatter); else - hc->asHybrid()->choose(assignment)->print("HybridConditional, component:\n", keyFormatter); + hc->asHybrid() + ->choose(assignment) + ->print("HybridConditional, component:\n", keyFormatter); } } else { factor->print("Unknown factor type\n", keyFormatter); @@ -128,15 +133,15 @@ void HybridGaussianFactorGraph::print(const std::string& s, /* ************************************************************************ */ void HybridGaussianFactorGraph::printErrors( - const HybridValues& values, - const std::string& str, - const KeyFormatter& keyFormatter, - const std::function& - printCondition) const { - std::cout << str << " size: " << size() << std::endl << std::endl; + const HybridValues &values, const std::string &str, + const KeyFormatter &keyFormatter, + const std::function + &printCondition) const { + std::cout << str << "size: " << size() << std::endl << std::endl; for (size_t i = 0; i < factors_.size(); i++) { - auto&& factor = factors_[i]; + auto &&factor = factors_[i]; if (factor == nullptr) { std::cout << "Factor " << i << ": nullptr\n"; continue; @@ -154,7 +159,8 @@ void HybridGaussianFactorGraph::printErrors( } /* ************************************************************************ */ -HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() const { +HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() + const { HybridGaussianProductFactor result; for (auto& f : factors_) { @@ -194,10 +200,11 @@ HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() co } /* ************************************************************************ */ -static std::pair> continuousElimination( - const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { +static std::pair> +continuousElimination(const HybridGaussianFactorGraph &factors, + const Ordering &frontalKeys) { GaussianFactorGraph gfg; - for (auto& f : factors) { + for (auto &f : factors) { if (auto gf = dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto orphan = dynamic_pointer_cast(f)) { @@ -217,20 +224,20 @@ static std::pair> continu } /* ************************************************************************ */ -static std::pair> discreteElimination( - const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) { +static std::pair> +discreteElimination(const HybridGaussianFactorGraph &factors, + const Ordering &frontalKeys) { DiscreteFactorGraph dfg; - for (auto& f : factors) { + for (auto &f : factors) { if (auto df = dynamic_pointer_cast(f)) { dfg.push_back(df); } else if (auto gmf = dynamic_pointer_cast(f)) { // Case where we have a HybridGaussianFactor with no continuous keys. // In this case, compute discrete probabilities. - // TODO(frank): What about the scalar!? auto potential = [&](const auto& pair) -> double { auto [factor, scalar] = pair; - // If the factor is null, it has been pruned, hence return potential of zero + // If factor is null, it has been pruned, hence return potential zero if (!factor) return 0.0; return exp(-scalar - factor->error(kEmpty)); }; @@ -262,9 +269,10 @@ static std::pair> discret * The residual error contains no keys, and only * depends on the discrete separator if present. */ -static std::shared_ptr createDiscreteFactor(const ResultTree& eliminationResults, - const DiscreteKeys& discreteSeparator) { - auto potential = [&](const auto& pair) -> double { +static std::shared_ptr createDiscreteFactor( + const ResultTree& eliminationResults, + const DiscreteKeys &discreteSeparator) { + auto potential = [&](const auto &pair) -> double { const auto& [conditional, factor] = pair.first; const double scalar = pair.second; if (conditional && factor) { @@ -276,7 +284,8 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio const double error = scalar + factor->error(kEmpty) - negLogK; return exp(-error); } else if (!conditional && !factor) { - // If the factor is null, it has been pruned, hence return potential of zero + // If the factor is null, it has been pruned, hence return potential of + // zero return 0.0; } else { throw std::runtime_error("createDiscreteFactor has mixed NULLs"); @@ -290,11 +299,12 @@ static std::shared_ptr createDiscreteFactor(const ResultTree& eliminatio /* *******************************************************************************/ // Create HybridGaussianFactor on the separator, taking care to correct // for conditional constants. -static std::shared_ptr createHybridGaussianFactor(const ResultTree& eliminationResults, - const DiscreteKeys& discreteSeparator) { +static std::shared_ptr createHybridGaussianFactor( + const ResultTree &eliminationResults, + const DiscreteKeys &discreteSeparator) { // Correct for the normalization constant used up by the conditional - auto correct = [&](const auto& pair) -> GaussianFactorValuePair { - const auto& [conditional, factor] = pair.first; + auto correct = [&](const auto &pair) -> GaussianFactorValuePair { + const auto &[conditional, factor] = pair.first; const double scalar = pair.second; if (conditional && factor) { const double negLogK = conditional->negLogConstant(); @@ -305,29 +315,32 @@ static std::shared_ptr createHybridGaussianFactor(const ResultTree& elim throw std::runtime_error("createHybridGaussianFactors has mixed NULLs"); } }; - DecisionTree newFactors(eliminationResults, correct); + DecisionTree newFactors(eliminationResults, + correct); return std::make_shared(discreteSeparator, newFactors); } /* *******************************************************************************/ /// Get the discrete keys from the HybridGaussianFactorGraph as DiscreteKeys. -static auto GetDiscreteKeys = [](const HybridGaussianFactorGraph& hfg) -> DiscreteKeys { +static auto GetDiscreteKeys = + [](const HybridGaussianFactorGraph &hfg) -> DiscreteKeys { const std::set discreteKeySet = hfg.discreteKeys(); return {discreteKeySet.begin(), discreteKeySet.end()}; }; /* *******************************************************************************/ std::pair> -HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { +HybridGaussianFactorGraph::eliminate(const Ordering &keys) const { // Since we eliminate all continuous variables first, // the discrete separator will be *all* the discrete keys. DiscreteKeys discreteSeparator = GetDiscreteKeys(*this); // Collect all the factors to create a set of Gaussian factor graphs in a - // decision tree indexed by all discrete keys involved. Just like any hybrid factor, every - // assignment also has a scalar error, in this case the sum of all errors in the graph. This error - // is assignment-specific and accounts for any difference in noise models used. + // decision tree indexed by all discrete keys involved. Just like any hybrid + // factor, every assignment also has a scalar error, in this case the sum of + // all errors in the graph. This error is assignment-specific and accounts for + // any difference in noise models used. HybridGaussianProductFactor productFactor = collectProductFactor(); // Convert factor graphs with a nullptr to an empty factor graph. @@ -337,8 +350,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { // This is the elimination method on the leaf nodes bool someContinuousLeft = false; - auto eliminate = - [&](const std::pair& pair) -> std::pair { + auto eliminate = [&](const std::pair& pair) + -> std::pair { const auto& [graph, scalar] = pair; if (graph.empty()) { @@ -346,7 +359,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { } // Expensive elimination of product factor. - auto result = EliminatePreferCholesky(graph, keys); /// <<<<<< MOST COMPUTE IS HERE + auto result = + EliminatePreferCholesky(graph, keys); /// <<<<<< MOST COMPUTE IS HERE // Record whether there any continuous variables left someContinuousLeft |= !result.second->empty(); @@ -361,15 +375,16 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { // If there are no more continuous parents we create a DiscreteFactor with the // error for each discrete choice. Otherwise, create a HybridGaussianFactor // on the separator, taking care to correct for conditional constants. - auto newFactor = someContinuousLeft - ? createHybridGaussianFactor(eliminationResults, discreteSeparator) - : createDiscreteFactor(eliminationResults, discreteSeparator); + auto newFactor = + someContinuousLeft + ? createHybridGaussianFactor(eliminationResults, discreteSeparator) + : createDiscreteFactor(eliminationResults, discreteSeparator); // Create the HybridGaussianConditional from the conditionals HybridGaussianConditional::Conditionals conditionals( eliminationResults, [](const auto& pair) { return pair.first.first; }); - auto hybridGaussian = - std::make_shared(discreteSeparator, conditionals); + auto hybridGaussian = std::make_shared( + discreteSeparator, conditionals); return {std::make_shared(hybridGaussian), newFactor}; } @@ -389,7 +404,8 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { * be INCORRECT and there will be NO error raised. */ std::pair> // -EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) { +EliminateHybrid(const HybridGaussianFactorGraph &factors, + const Ordering &keys) { // NOTE: Because we are in the Conditional Gaussian regime there are only // a few cases: // 1. continuous variable, make a hybrid Gaussian conditional if there are @@ -440,7 +456,7 @@ EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) // 3. if not, we do hybrid elimination: bool only_discrete = true, only_continuous = true; - for (auto&& factor : factors) { + for (auto &&factor : factors) { if (auto hybrid_factor = std::dynamic_pointer_cast(factor)) { if (hybrid_factor->isDiscrete()) { only_continuous = false; @@ -451,9 +467,11 @@ EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) only_discrete = false; break; } - } else if (auto cont_factor = std::dynamic_pointer_cast(factor)) { + } else if (auto cont_factor = + std::dynamic_pointer_cast(factor)) { only_discrete = false; - } else if (auto discrete_factor = std::dynamic_pointer_cast(factor)) { + } else if (auto discrete_factor = + std::dynamic_pointer_cast(factor)) { only_continuous = false; } } @@ -474,17 +492,17 @@ EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( - const VectorValues& continuousValues) const { + const VectorValues &continuousValues) const { AlgebraicDecisionTree result(0.0); // Iterate over each factor. - for (auto& factor : factors_) { + for (auto &factor : factors_) { if (auto hf = std::dynamic_pointer_cast(factor)) { // Add errorTree for hybrid factors, includes HybridGaussianConditionals! result = result + hf->errorTree(continuousValues); } else if (auto df = std::dynamic_pointer_cast(factor)) { // If discrete, just add its errorTree as well result = result + df->errorTree(); - } else if (auto gf = std::dynamic_pointer_cast(factor)) { + } else if (auto gf = dynamic_pointer_cast(factor)) { // For a continuous only factor, just add its error result = result + gf->error(continuousValues); } else { @@ -495,7 +513,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::errorTree( } /* ************************************************************************ */ -double HybridGaussianFactorGraph::probPrime(const HybridValues& values) const { +double HybridGaussianFactorGraph::probPrime(const HybridValues &values) const { double error = this->error(values); // NOTE: The 0.5 term is handled by each factor return std::exp(-error); @@ -503,7 +521,7 @@ double HybridGaussianFactorGraph::probPrime(const HybridValues& values) const { /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::discretePosterior( - const VectorValues& continuousValues) const { + const VectorValues &continuousValues) const { AlgebraicDecisionTree errors = this->errorTree(continuousValues); AlgebraicDecisionTree p = errors.apply([](double error) { // NOTE: The 0.5 term is handled by each factor @@ -513,18 +531,19 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::discretePosterior( } /* ************************************************************************ */ -GaussianFactorGraph HybridGaussianFactorGraph::choose(const DiscreteValues& assignment) const { +GaussianFactorGraph HybridGaussianFactorGraph::choose( + const DiscreteValues &assignment) const { GaussianFactorGraph gfg; - for (auto&& f : *this) { + for (auto &&f : *this) { if (auto gf = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto gc = std::dynamic_pointer_cast(f)) { gfg.push_back(gf); } else if (auto hgf = std::dynamic_pointer_cast(f)) { gfg.push_back((*hgf)(assignment).first); - } else if (auto hgc = std::dynamic_pointer_cast(f)) { + } else if (auto hgc = dynamic_pointer_cast(f)) { gfg.push_back((*hgc)(assignment)); - } else if (auto hc = std::dynamic_pointer_cast(f)) { + } else if (auto hc = dynamic_pointer_cast(f)) { if (auto gc = hc->asGaussian()) gfg.push_back(gc); else if (auto hgc = hc->asHybrid()) diff --git a/gtsam/hybrid/HybridGaussianProductFactor.cpp b/gtsam/hybrid/HybridGaussianProductFactor.cpp index 2430750d1d..2e95ea8d11 100644 --- a/gtsam/hybrid/HybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/HybridGaussianProductFactor.cpp @@ -59,7 +59,8 @@ HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=( return *this; } -void HybridGaussianProductFactor::print(const std::string& s, const KeyFormatter& formatter) const { +void HybridGaussianProductFactor::print(const std::string& s, + const KeyFormatter& formatter) const { KeySet keys; auto printer = [&](const Y& y) { if (keys.empty()) keys = y.first.keys(); @@ -77,8 +78,10 @@ void HybridGaussianProductFactor::print(const std::string& s, const KeyFormatter HybridGaussianProductFactor HybridGaussianProductFactor::removeEmpty() const { auto emptyGaussian = [](const Y& y) { - bool hasNull = std::any_of( - y.first.begin(), y.first.end(), [](const GaussianFactor::shared_ptr& ptr) { return !ptr; }); + bool hasNull = + std::any_of(y.first.begin(), + y.first.end(), + [](const GaussianFactor::shared_ptr& ptr) { return !ptr; }); return hasNull ? Y{GaussianFactorGraph(), 0.0} : y; }; return {Base(*this, emptyGaussian)}; diff --git a/gtsam/hybrid/HybridGaussianProductFactor.h b/gtsam/hybrid/HybridGaussianProductFactor.h index 6d33daa1ba..9c2aee74ab 100644 --- a/gtsam/hybrid/HybridGaussianProductFactor.h +++ b/gtsam/hybrid/HybridGaussianProductFactor.h @@ -60,13 +60,16 @@ class HybridGaussianProductFactor ///@{ /// Add GaussianFactor into HybridGaussianProductFactor - HybridGaussianProductFactor operator+(const GaussianFactor::shared_ptr& factor) const; + HybridGaussianProductFactor operator+( + const GaussianFactor::shared_ptr& factor) const; /// Add HybridGaussianFactor into HybridGaussianProductFactor - HybridGaussianProductFactor operator+(const HybridGaussianFactor& factor) const; + HybridGaussianProductFactor operator+( + const HybridGaussianFactor& factor) const; /// Add-assign operator for GaussianFactor - HybridGaussianProductFactor& operator+=(const GaussianFactor::shared_ptr& factor); + HybridGaussianProductFactor& operator+=( + const GaussianFactor::shared_ptr& factor); /// Add-assign operator for HybridGaussianFactor HybridGaussianProductFactor& operator+=(const HybridGaussianFactor& factor); @@ -81,7 +84,8 @@ class HybridGaussianProductFactor * @param s Optional string to prepend * @param formatter Optional key formatter */ - void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const; + void print(const std::string& s = "", + const KeyFormatter& formatter = DefaultKeyFormatter) const; /** * @brief Check if this HybridGaussianProductFactor is equal to another @@ -89,9 +93,11 @@ class HybridGaussianProductFactor * @param tol Tolerance for floating point comparisons * @return true if equal, false otherwise */ - bool equals(const HybridGaussianProductFactor& other, double tol = 1e-9) const { + bool equals(const HybridGaussianProductFactor& other, + double tol = 1e-9) const { return Base::equals(other, [tol](const Y& a, const Y& b) { - return a.first.equals(b.first, tol) && std::abs(a.second - b.second) < tol; + return a.first.equals(b.first, tol) && + std::abs(a.second - b.second) < tol; }); } @@ -102,12 +108,13 @@ class HybridGaussianProductFactor /** * @brief Remove empty GaussianFactorGraphs from the decision tree - * @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs removed + * @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs + * removed * * If any GaussianFactorGraph in the decision tree contains a nullptr, convert - * that leaf to an empty GaussianFactorGraph with zero scalar sum. This is needed because the - * DecisionTree will otherwise create a GaussianFactorGraph with a single (null) factor, which - * doesn't register as null. + * that leaf to an empty GaussianFactorGraph with zero scalar sum. This is + * needed because the DecisionTree will otherwise create a GaussianFactorGraph + * with a single (null) factor, which doesn't register as null. */ HybridGaussianProductFactor removeEmpty() const; @@ -116,6 +123,7 @@ class HybridGaussianProductFactor // Testable traits template <> -struct traits : public Testable {}; +struct traits + : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp index 56b75d15ee..2f5031cf28 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -181,19 +181,19 @@ HybridGaussianFactorGraph::shared_ptr HybridNonlinearFactorGraph::linearize( /* ************************************************************************* */ AlgebraicDecisionTree HybridNonlinearFactorGraph::errorTree( - const Values& continuousValues) const { + const Values& values) const { AlgebraicDecisionTree result(0.0); // Iterate over each factor. for (auto& factor : factors_) { if (auto hnf = std::dynamic_pointer_cast(factor)) { // Compute factor error and add it. - result = result + hnf->errorTree(continuousValues); + result = result + hnf->errorTree(values); } else if (auto nf = std::dynamic_pointer_cast(factor)) { // If continuous only, get the (double) error // and add it to every leaf of the result - result = result + nf->error(continuousValues); + result = result + nf->error(values); } else if (auto df = std::dynamic_pointer_cast(factor)) { // If discrete, just add its errorTree as well diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h index dd18cfa601..f79f7b4524 100644 --- a/gtsam/hybrid/HybridNonlinearFactorGraph.h +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -98,10 +98,10 @@ class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { * * @note: Gaussian and hybrid Gaussian factors are not considered! * - * @param continuousValues Manifold values at which to compute the error. + * @param values Manifold values at which to compute the error. * @return AlgebraicDecisionTree */ - AlgebraicDecisionTree errorTree(const Values& continuousValues) const; + AlgebraicDecisionTree errorTree(const Values& values) const; /** * @brief Computer posterior P(M|X=x) when all continuous values X are given. diff --git a/gtsam/hybrid/tests/testGaussianMixture.cpp b/gtsam/hybrid/tests/testGaussianMixture.cpp index 6bd9a22a87..14bef5fbb4 100644 --- a/gtsam/hybrid/tests/testGaussianMixture.cpp +++ b/gtsam/hybrid/tests/testGaussianMixture.cpp @@ -40,7 +40,8 @@ const DiscreteKey m(M(0), 2); const DiscreteValues m1Assignment{{M(0), 1}}; // Define a 50/50 prior on the mode -DiscreteConditional::shared_ptr mixing = std::make_shared(m, "60/40"); +DiscreteConditional::shared_ptr mixing = + std::make_shared(m, "60/40"); /// Gaussian density function double Gaussian(double mu, double sigma, double z) { @@ -52,7 +53,8 @@ double Gaussian(double mu, double sigma, double z) { * If sigma0 == sigma1, it simplifies to a sigmoid function. * Hardcodes 60/40 prior on mode. */ -double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, double z) { +double prob_m_z(double mu0, double mu1, double sigma0, double sigma1, + double z) { const double p0 = 0.6 * Gaussian(mu0, sigma0, z); const double p1 = 0.4 * Gaussian(mu1, sigma1, z); return p1 / (p0 + p1); @@ -68,13 +70,15 @@ TEST(GaussianMixture, GaussianMixtureModel) { // Create a Gaussian mixture model p(z|m) with same sigma. HybridBayesNet gmm; - std::vector> parameters{{Vector1(mu0), sigma}, {Vector1(mu1), sigma}}; + std::vector> parameters{{Vector1(mu0), sigma}, + {Vector1(mu1), sigma}}; gmm.emplace_shared(m, Z(0), parameters); gmm.push_back(mixing); // At the halfway point between the means, we should get P(m|z)=0.5 double midway = mu1 - mu0; - auto eliminationResult = gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential(); + auto eliminationResult = + gmm.toFactorGraph({{Z(0), Vector1(midway)}}).eliminateSequential(); auto pMid = *eliminationResult->at(0)->asDiscrete(); EXPECT(assert_equal(DiscreteConditional(m, "60/40"), pMid)); @@ -84,7 +88,8 @@ TEST(GaussianMixture, GaussianMixtureModel) { const double expected = prob_m_z(mu0, mu1, sigma, sigma, z); // Workflow 1: convert HBN to HFG and solve - auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); + auto eliminationResult1 = + gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); auto posterior1 = *eliminationResult1->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); @@ -109,7 +114,8 @@ TEST(GaussianMixture, GaussianMixtureModel2) { // Create a Gaussian mixture model p(z|m) with same sigma. HybridBayesNet gmm; - std::vector> parameters{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; + std::vector> parameters{{Vector1(mu0), sigma0}, + {Vector1(mu1), sigma1}}; gmm.emplace_shared(m, Z(0), parameters); gmm.push_back(mixing); @@ -119,15 +125,18 @@ TEST(GaussianMixture, GaussianMixtureModel2) { const VectorValues vv{{Z(0), Vector1(zMax)}}; auto gfg = gmm.toFactorGraph(vv); - // Equality of posteriors asserts that the elimination is correct (same ratios for all modes) + // Equality of posteriors asserts that the elimination is correct (same ratios + // for all modes) const auto& expectedDiscretePosterior = gmm.discretePosterior(vv); EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); // Eliminate the graph! auto eliminationResultMax = gfg.eliminateSequential(); - // Equality of posteriors asserts that the elimination is correct (same ratios for all modes) - EXPECT(assert_equal(expectedDiscretePosterior, eliminationResultMax->discretePosterior(vv))); + // Equality of posteriors asserts that the elimination is correct (same ratios + // for all modes) + EXPECT(assert_equal(expectedDiscretePosterior, + eliminationResultMax->discretePosterior(vv))); auto pMax = *eliminationResultMax->at(0)->asDiscrete(); EXPECT(assert_equal(DiscreteConditional(m, "42/58"), pMax, 1e-4)); @@ -138,7 +147,8 @@ TEST(GaussianMixture, GaussianMixtureModel2) { const double expected = prob_m_z(mu0, mu1, sigma0, sigma1, z); // Workflow 1: convert HBN to HFG and solve - auto eliminationResult1 = gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); + auto eliminationResult1 = + gmm.toFactorGraph({{Z(0), Vector1(z)}}).eliminateSequential(); auto posterior1 = *eliminationResult1->at(0)->asDiscrete(); EXPECT_DOUBLES_EQUAL(expected, posterior1(m1Assignment), 1e-8); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 438bfd2670..d4192cb712 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -168,8 +168,10 @@ TEST(HybridBayesNet, Tiny) { EXPECT(!pruned.equals(bayesNet)); // error - const double error0 = chosen0.error(vv) + gc0->negLogConstant() - px->negLogConstant() - log(0.4); - const double error1 = chosen1.error(vv) + gc1->negLogConstant() - px->negLogConstant() - log(0.6); + const double error0 = chosen0.error(vv) + gc0->negLogConstant() - + px->negLogConstant() - log(0.4); + const double error1 = chosen1.error(vv) + gc1->negLogConstant() - + px->negLogConstant() - log(0.6); // print errors: EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9); EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9); @@ -190,7 +192,6 @@ TEST(HybridBayesNet, Tiny) { // toFactorGraph auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}}); EXPECT_LONGS_EQUAL(3, fg.size()); - GTSAM_PRINT(fg); // Create the product factor for eliminating x0: HybridGaussianFactorGraph factors_x0; @@ -204,9 +205,7 @@ TEST(HybridBayesNet, Tiny) { // Call eliminate and check scalar: auto result = factors_x0.eliminate({X(0)}); - GTSAM_PRINT(*result.first); auto df = std::dynamic_pointer_cast(result.second); - GTSAM_PRINT(df->errorTree()); // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); @@ -228,13 +227,17 @@ TEST(HybridBayesNet, Tiny) { /* ****************************************************************************/ // Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia). namespace different_sigmas { -const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); +const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), + Vector1(-4.0), 5.0); -const std::vector> parms{{Vector1(5), 2.0}, {Vector1(2), 3.0}}; +const std::vector> parms{{Vector1(5), 2.0}, + {Vector1(2), 3.0}}; const auto hgc = std::make_shared(Asia, X(1), parms); const auto prior = std::make_shared(Asia, "99/1"); -auto wrap = [](const auto& c) { return std::make_shared(c); }; +auto wrap = [](const auto& c) { + return std::make_shared(c); +}; const HybridBayesNet bayesNet{wrap(gc), wrap(hgc), wrap(prior)}; // Create values at which to evaluate. @@ -248,8 +251,8 @@ TEST(HybridBayesNet, evaluateHybrid) { const double conditionalProbability = gc->evaluate(values.continuous()); const double mixtureProbability = hgc->evaluate(values); - EXPECT_DOUBLES_EQUAL( - conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); + EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, + bayesNet.evaluate(values), 1e-9); } /* ****************************************************************************/ @@ -271,10 +274,14 @@ TEST(HybridBayesNet, Choose) { EXPECT_LONGS_EQUAL(4, gbn.size()); - EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), *gbn.at(0))); - EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), *gbn.at(1))); - EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), *gbn.at(2))); - EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), *gbn.at(3))); + EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), + *gbn.at(0))); + EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), + *gbn.at(1))); + EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), + *gbn.at(2))); + EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), + *gbn.at(3))); } /* ****************************************************************************/ @@ -311,7 +318,8 @@ TEST(HybridBayesNet, OptimizeAssignment) { TEST(HybridBayesNet, Optimize) { Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1"); - HybridBayesNet::shared_ptr hybridBayesNet = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr hybridBayesNet = + s.linearizedFactorGraph.eliminateSequential(); HybridValues delta = hybridBayesNet->optimize(); @@ -338,7 +346,8 @@ TEST(HybridBayesNet, Pruning) { // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1) Switching s(3); - HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = + s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(5, posterior->size()); // Optimize @@ -364,9 +373,12 @@ TEST(HybridBayesNet, Pruning) { logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); - logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); - logProbability += posterior->at(4)->asDiscrete()->logProbability(hybridValues); - EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), 1e-9); + logProbability += + posterior->at(3)->asDiscrete()->logProbability(hybridValues); + logProbability += + posterior->at(4)->asDiscrete()->logProbability(hybridValues); + EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), + 1e-9); // Check agreement with discrete posterior // double density = exp(logProbability); @@ -387,7 +399,8 @@ TEST(HybridBayesNet, Pruning) { TEST(HybridBayesNet, Prune) { Switching s(4); - HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = + s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(7, posterior->size()); HybridValues delta = posterior->optimize(); @@ -404,7 +417,8 @@ TEST(HybridBayesNet, Prune) { TEST(HybridBayesNet, UpdateDiscreteConditionals) { Switching s(4); - HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); + HybridBayesNet::shared_ptr posterior = + s.linearizedFactorGraph.eliminateSequential(); EXPECT_LONGS_EQUAL(7, posterior->size()); DiscreteConditional joint; @@ -416,7 +430,8 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { auto prunedDecisionTree = joint.prune(maxNrLeaves); #ifdef GTSAM_DT_MERGING - EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, prunedDecisionTree.nrLeaves()); + EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, + prunedDecisionTree.nrLeaves()); #else EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree.nrLeaves()); #endif @@ -424,14 +439,16 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { // regression // NOTE(Frank): I had to include *three* non-zeroes here now. DecisionTreeFactor::ADT potentials( - s.modes, std::vector{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); + s.modes, + std::vector{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); DiscreteConditional expectedConditional(3, s.modes, potentials); // Prune! auto pruned = posterior->prune(maxNrLeaves); // Functor to verify values against the expectedConditional - auto checker = [&](const Assignment& assignment, double probability) -> double { + auto checker = [&](const Assignment& assignment, + double probability) -> double { // typecast so we can use this to get probability value DiscreteValues choices(assignment); if (prunedDecisionTree(choices) == 0) { @@ -446,7 +463,8 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { CHECK(pruned.at(0)->asDiscrete()); auto pruned_discrete_conditionals = pruned.at(0)->asDiscrete(); auto discrete_conditional_tree = - std::dynamic_pointer_cast(pruned_discrete_conditionals); + std::dynamic_pointer_cast( + pruned_discrete_conditionals); // The checker functor verifies the values for us. discrete_conditional_tree->apply(checker); @@ -460,10 +478,13 @@ TEST(HybridBayesNet, Sampling) { auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); nfg.emplace_shared>(X(0), 0.0, noise_model); - auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); - auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); + auto zero_motion = + std::make_shared>(X(0), X(1), 0, noise_model); + auto one_motion = + std::make_shared>(X(0), X(1), 1, noise_model); nfg.emplace_shared( - DiscreteKey(M(0), 2), std::vector{zero_motion, one_motion}); + DiscreteKey(M(0), 2), + std::vector{zero_motion, one_motion}); DiscreteKey mode(M(0), 2); nfg.emplace_shared(mode, "1/1"); @@ -535,17 +556,18 @@ TEST(HybridBayesNet, ErrorTreeWithConditional) { hbn.emplace_shared(x0, Vector1(0.0), I_1x1, prior_model); // Add measurement P(z0 | x0) - hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); + hbn.emplace_shared(z0, Vector1(0.0), -I_1x1, x0, I_1x1, + measurement_model); // Add hybrid motion model double mu = 0.0; double sigma0 = 1e2, sigma1 = 1e-2; auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); - auto c0 = - make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model0), - c1 = - make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); + auto c0 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model0), + c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, + x0, -I_1x1, model1); DiscreteKey m1(M(2), 2); hbn.emplace_shared(m1, std::vector{c0, c1}); diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index f31846cb3a..319c6d52b0 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -59,7 +59,7 @@ std::vector components(Key key) { return {std::make_shared(key, I_3x3, Z_3x1), std::make_shared(key, I_3x3, Vector3::Ones())}; } -} // namespace two +} // namespace two /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { @@ -142,9 +142,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { } /* ************************************************************************* */ -void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, - const HybridBayesTree::shared_ptr &hbt, - const Ordering &ordering) { +void dotPrint(const HybridGaussianFactorGraph::shared_ptr& hfg, + const HybridBayesTree::shared_ptr& hbt, + const Ordering& ordering) { DotWriter dw; dw.positionHints['c'] = 2; dw.positionHints['x'] = 1; @@ -179,13 +179,15 @@ TEST(HybridGaussianFactorGraph, Switching) { std::vector naturalX(N); std::iota(naturalX.begin(), naturalX.end(), 1); std::vector ordX; - std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), - [](int x) { return X(x); }); + std::transform( + naturalX.begin(), naturalX.end(), std::back_inserter(ordX), [](int x) { + return X(x); + }); auto [ndX, lvls] = makeBinaryOrdering(ordX); std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); // TODO(dellaert): this has no effect! - for (auto &l : lvls) { + for (auto& l : lvls) { l = -l; } } @@ -193,8 +195,10 @@ TEST(HybridGaussianFactorGraph, Switching) { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; - std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return M(x); }); + std::transform( + naturalC.begin(), naturalC.end(), std::back_inserter(ordC), [](int x) { + return M(x); + }); // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); const auto [ndC, lvls] = makeBinaryOrdering(ordC); @@ -233,13 +237,15 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { std::vector naturalX(N); std::iota(naturalX.begin(), naturalX.end(), 1); std::vector ordX; - std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX), - [](int x) { return X(x); }); + std::transform( + naturalX.begin(), naturalX.end(), std::back_inserter(ordX), [](int x) { + return X(x); + }); auto [ndX, lvls] = makeBinaryOrdering(ordX); std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); // TODO(dellaert): this has no effect! - for (auto &l : lvls) { + for (auto& l : lvls) { l = -l; } } @@ -247,8 +253,10 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; - std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return M(x); }); + std::transform( + naturalC.begin(), naturalC.end(), std::back_inserter(ordC), [](int x) { + return M(x); + }); // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); const auto [ndC, lvls] = makeBinaryOrdering(ordC); @@ -408,8 +416,7 @@ TEST(HybridBayesTree, OptimizeAssignment) { // Create ordering. Ordering ordering; - for (size_t k = 0; k < s.K; k++) - ordering.push_back(X(k)); + for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); const auto [hybridBayesNet, remainingFactorGraph] = s.linearizedFactorGraph.eliminatePartialSequential(ordering); @@ -451,21 +458,20 @@ TEST(HybridBayesTree, Optimize) { // Create ordering. Ordering ordering; - for (size_t k = 0; k < s.K; k++) - ordering.push_back(X(k)); + for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); const auto [hybridBayesNet, remainingFactorGraph] = s.linearizedFactorGraph.eliminatePartialSequential(ordering); DiscreteFactorGraph dfg; - for (auto &&f : *remainingFactorGraph) { + for (auto&& f : *remainingFactorGraph) { auto discreteFactor = dynamic_pointer_cast(f); assert(discreteFactor); dfg.push_back(discreteFactor); } // Add the probabilities for each branch - DiscreteKeys discrete_keys = {m0, m1, m2}; + DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; vector probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, 0.037152205, 0.12248971, 0.07349729, 0.08}; dfg.emplace_shared(discrete_keys, probs); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 80812a3d8b..6206e5f459 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -37,8 +37,6 @@ // Include for test suite #include -#include - #include "Switching.h" using namespace std; diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 5631ac3219..e29c485afd 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -51,8 +51,10 @@ namespace equal_constants { // Create a simple HybridGaussianConditional const double commonSigma = 2.0; const std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + commonSigma), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + commonSigma)}; const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace equal_constants @@ -77,8 +79,8 @@ TEST(HybridGaussianConditional, LogProbability) { using namespace equal_constants; for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL( - conditionals[mode]->logProbability(vv), hybrid_conditional.logProbability(hv), 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->logProbability(vv), + hybrid_conditional.logProbability(hv), 1e-8); } } @@ -90,7 +92,8 @@ TEST(HybridGaussianConditional, Error) { // Check result. DiscreteKeys discrete_keys{mode}; - std::vector leaves = {conditionals[0]->error(vv), conditionals[1]->error(vv)}; + std::vector leaves = {conditionals[0]->error(vv), + conditionals[1]->error(vv)}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -98,7 +101,8 @@ TEST(HybridGaussianConditional, Error) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), hybrid_conditional.error(hv), 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv), + hybrid_conditional.error(hv), 1e-8); } } @@ -113,8 +117,10 @@ TEST(HybridGaussianConditional, Likelihood) { // Check that the hybrid conditional error and the likelihood error are the // same. - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check that likelihood error is as expected, i.e., just the errors of the // individual likelihoods, in the `equal_constants` case. @@ -128,7 +134,8 @@ TEST(HybridGaussianConditional, Likelihood) { std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -138,8 +145,10 @@ namespace mode_dependent_constants { // Create a HybridGaussianConditional with mode-dependent noise models. // 0 is low-noise, 1 is high-noise. const std::vector conditionals{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 0.5), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + 0.5), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), + 3.0)}; const HybridGaussianConditional hybrid_conditional(mode, conditionals); } // namespace mode_dependent_constants @@ -171,8 +180,9 @@ TEST(HybridGaussianConditional, Error2) { // Expected error is e(X) + log(sqrt(|2πΣ|)). // We normalize log(sqrt(|2πΣ|)) with min(negLogConstant) // so it is non-negative. - std::vector leaves = {conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, - conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; + std::vector leaves = { + conditionals[0]->error(vv) + negLogConstant0 - minErrorConstant, + conditionals[1]->error(vv) + negLogConstant1 - minErrorConstant}; AlgebraicDecisionTree expected(discrete_keys, leaves); EXPECT(assert_equal(expected, actual, 1e-6)); @@ -180,10 +190,10 @@ TEST(HybridGaussianConditional, Error2) { // Check for non-tree version. for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - EXPECT_DOUBLES_EQUAL( - conditionals[mode]->error(vv) + conditionals[mode]->negLogConstant() - minErrorConstant, - hybrid_conditional.error(hv), - 1e-8); + EXPECT_DOUBLES_EQUAL(conditionals[mode]->error(vv) + + conditionals[mode]->negLogConstant() - + minErrorConstant, + hybrid_conditional.error(hv), 1e-8); } } @@ -198,8 +208,10 @@ TEST(HybridGaussianConditional, Likelihood2) { // Check that the hybrid conditional error and the likelihood error are as // expected, this invariant is the same as the equal noise case: - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), 1e-8); - EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv0), likelihood->error(hv0), + 1e-8); + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), likelihood->error(hv1), + 1e-8); // Check the detailed JacobianFactor calculation for mode==1. { @@ -208,18 +220,20 @@ TEST(HybridGaussianConditional, Likelihood2) { const auto jf1 = std::dynamic_pointer_cast(gf1); CHECK(jf1); - // Check that the JacobianFactor error with constants is equal to the conditional error: - EXPECT_DOUBLES_EQUAL( - hybrid_conditional.error(hv1), - jf1->error(hv1) + conditionals[1]->negLogConstant() - hybrid_conditional.negLogConstant(), - 1e-8); + // Check that the JacobianFactor error with constants is equal to the + // conditional error: + EXPECT_DOUBLES_EQUAL(hybrid_conditional.error(hv1), + jf1->error(hv1) + conditionals[1]->negLogConstant() - + hybrid_conditional.negLogConstant(), + 1e-8); } // Check that the ratio of probPrime to evaluate is the same for all modes. std::vector ratio(2); for (size_t mode : {0, 1}) { const HybridValues hv{vv, {{M(0), mode}}}; - ratio[mode] = std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); + ratio[mode] = + std::exp(-likelihood->error(hv)) / hybrid_conditional.evaluate(hv); } EXPECT_DOUBLES_EQUAL(ratio[0], ratio[1], 1e-8); } @@ -232,7 +246,8 @@ TEST(HybridGaussianConditional, Prune) { DiscreteKeys modes{{M(1), 2}, {M(2), 2}}; std::vector gcs; for (size_t i = 0; i < 4; i++) { - gcs.push_back(GaussianConditional::sharedMeanAndStddev(Z(0), Vector1(i + 1), i + 1)); + gcs.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), Vector1(i + 1), i + 1)); } auto empty = std::make_shared(); HybridGaussianConditional::Conditionals conditionals(modes, gcs); @@ -252,14 +267,8 @@ TEST(HybridGaussianConditional, Prune) { } } { - const std::vector potentials{0, - 0, - 0.5, - 0, // - 0, - 0, - 0.5, - 0}; + const std::vector potentials{0, 0, 0.5, 0, // + 0, 0, 0.5, 0}; const DecisionTreeFactor decisionTreeFactor(keys, potentials); const auto pruned = hgc.prune(decisionTreeFactor); @@ -268,14 +277,8 @@ TEST(HybridGaussianConditional, Prune) { EXPECT_LONGS_EQUAL(2, pruned->nrComponents()); } { - const std::vector potentials{0.2, - 0, - 0.3, - 0, // - 0, - 0, - 0.5, - 0}; + const std::vector potentials{0.2, 0, 0.3, 0, // + 0, 0, 0.5, 0}; const DecisionTreeFactor decisionTreeFactor(keys, potentials); const auto pruned = hgc.prune(decisionTreeFactor); diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 3b5a6a80c5..9fdc1aaea5 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -46,7 +46,6 @@ #include "Switching.h" #include "TinyHybridExample.h" -#include "gtsam/linear/GaussianFactorGraph.h" using namespace std; using namespace gtsam; @@ -74,7 +73,8 @@ TEST(HybridGaussianFactorGraph, Creation) { HybridGaussianConditional gm( m0, {std::make_shared(X(0), Z_3x1, I_3x3, X(1), I_3x3), - std::make_shared(X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)}); + std::make_shared( + X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)}); hfg.add(gm); EXPECT_LONGS_EQUAL(2, hfg.size()); @@ -118,7 +118,8 @@ TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) { auto factor = std::dynamic_pointer_cast(result.second); CHECK(factor); // regression test - EXPECT(assert_equal(DecisionTreeFactor{m1, "15.74961 15.74961"}, *factor, 1e-5)); + EXPECT( + assert_equal(DecisionTreeFactor{m1, "15.74961 15.74961"}, *factor, 1e-5)); } /* ************************************************************************* */ @@ -180,7 +181,8 @@ TEST(HybridBayesNet, Switching) { EXPECT_LONGS_EQUAL(4, graph.size()); // Create some continuous and discrete values - const VectorValues continuousValues{{X(0), Vector1(0.1)}, {X(1), Vector1(1.2)}}; + const VectorValues continuousValues{{X(0), Vector1(0.1)}, + {X(1), Vector1(1.2)}}; const DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}}; // Get the hybrid gaussian factor and check it is as expected @@ -213,7 +215,8 @@ TEST(HybridBayesNet, Switching) { // Check errorTree AlgebraicDecisionTree actualErrors = graph.errorTree(continuousValues); // Create expected error tree - const AlgebraicDecisionTree expectedErrors(M(0), expectedError0, expectedError1); + const AlgebraicDecisionTree expectedErrors( + M(0), expectedError0, expectedError1); // Check that the actual error tree matches the expected one EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5)); @@ -226,9 +229,11 @@ TEST(HybridBayesNet, Switching) { EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5); // Check discretePosterior - const AlgebraicDecisionTree graphPosterior = graph.discretePosterior(continuousValues); + const AlgebraicDecisionTree graphPosterior = + graph.discretePosterior(continuousValues); const double sum = probPrime0 + probPrime1; - const AlgebraicDecisionTree expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); + const AlgebraicDecisionTree expectedPosterior( + M(0), probPrime0 / sum, probPrime1 / sum); EXPECT(assert_equal(expectedPosterior, graphPosterior, 1e-5)); // Make the clique of factors connected to x0: @@ -267,29 +272,37 @@ TEST(HybridBayesNet, Switching) { EXPECT(std::dynamic_pointer_cast(factor)); auto phi_x1_m = std::dynamic_pointer_cast(factor); EXPECT_LONGS_EQUAL(2, phi_x1_m->keys().size()); // x1, m0 - // Check that the scalars incorporate the negative log constant of the conditional - EXPECT_DOUBLES_EQUAL( - scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(), (*phi_x1_m)(modeZero).second, 1e-9); - EXPECT_DOUBLES_EQUAL( - scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(), (*phi_x1_m)(modeOne).second, 1e-9); - - // Check that the conditional and remaining factor are consistent for both modes + // Check that the scalars incorporate the negative log constant of the + // conditional + EXPECT_DOUBLES_EQUAL(scalar0 - (*p_x0_given_x1_m)(modeZero)->negLogConstant(), + (*phi_x1_m)(modeZero).second, + 1e-9); + EXPECT_DOUBLES_EQUAL(scalar1 - (*p_x0_given_x1_m)(modeOne)->negLogConstant(), + (*phi_x1_m)(modeOne).second, + 1e-9); + + // Check that the conditional and remaining factor are consistent for both + // modes for (auto&& mode : {modeZero, modeOne}) { const auto gc = (*p_x0_given_x1_m)(mode); const auto [gf, scalar] = (*phi_x1_m)(mode); - // The error of the original factors should equal the sum of errors of the conditional and - // remaining factor, modulo the normalization constant of the conditional. + // The error of the original factors should equal the sum of errors of the + // conditional and remaining factor, modulo the normalization constant of + // the conditional. double originalError = factors_x0.error({continuousValues, mode}); - const double actualError = - gc->negLogConstant() + gc->error(continuousValues) + gf->error(continuousValues) + scalar; + const double actualError = gc->negLogConstant() + + gc->error(continuousValues) + + gf->error(continuousValues) + scalar; EXPECT_DOUBLES_EQUAL(originalError, actualError, 1e-9); } // Create a clique for x1 HybridGaussianFactorGraph factors_x1; - factors_x1.push_back(factor); // Use the remaining factor from previous elimination - factors_x1.push_back(graph.at(2)); // Add the factor for x1 from the original graph + factors_x1.push_back( + factor); // Use the remaining factor from previous elimination + factors_x1.push_back( + graph.at(2)); // Add the factor for x1 from the original graph // Test collectProductFactor for x1 clique auto productFactor_x1 = factors_x1.collectProductFactor(); @@ -323,16 +336,18 @@ TEST(HybridBayesNet, Switching) { auto phi_x1 = std::dynamic_pointer_cast(factor_x1); CHECK(phi_x1); EXPECT_LONGS_EQUAL(1, phi_x1->keys().size()); // m0 - // We can't really check the error of the decision tree factor phi_x1, because the continuos - // factor whose error(kEmpty) we need is not available.. + // We can't really check the error of the decision tree factor phi_x1, because + // the continuos factor whose error(kEmpty) we need is not available.. - // However, we can still check the total error for the clique factors_x1 and the elimination - // results are equal, modulo -again- the negative log constant of the conditional. + // However, we can still check the total error for the clique factors_x1 and + // the elimination results are equal, modulo -again- the negative log constant + // of the conditional. for (auto&& mode : {modeZero, modeOne}) { auto gc_x1 = (*p_x1_given_m)(mode); double originalError_x1 = factors_x1.error({continuousValues, mode}); - const double actualError = - gc_x1->negLogConstant() + gc_x1->error(continuousValues) + phi_x1->error(mode); + const double actualError = gc_x1->negLogConstant() + + gc_x1->error(continuousValues) + + phi_x1->error(mode); EXPECT_DOUBLES_EQUAL(originalError_x1, actualError, 1e-9); } @@ -340,9 +355,11 @@ TEST(HybridBayesNet, Switching) { auto hybridBayesNet = graph.eliminateSequential(); CHECK(hybridBayesNet); - // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the - // same posterior from the graph. This is a sanity check that the elimination is done correctly. - AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(continuousValues); + // Check that the posterior P(M|X=continuousValues) from the Bayes net is the + // same as the same posterior from the graph. This is a sanity check that the + // elimination is done correctly. + AlgebraicDecisionTree bnPosterior = + hybridBayesNet->discretePosterior(continuousValues); EXPECT(assert_equal(graphPosterior, bnPosterior)); } @@ -367,9 +384,11 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { // Check that the probability prime is the exponential of the error EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7)); - // Check that the posterior P(M|X=continuousValues) from the Bayes net is the same as the - // same posterior from the graph. This is a sanity check that the elimination is done correctly. - const AlgebraicDecisionTree graphPosterior = graph.discretePosterior(delta.continuous()); + // Check that the posterior P(M|X=continuousValues) from the Bayes net is the + // same as the same posterior from the graph. This is a sanity check that the + // elimination is done correctly. + const AlgebraicDecisionTree graphPosterior = + graph.discretePosterior(delta.continuous()); const AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(delta.continuous()); EXPECT(assert_equal(graphPosterior, bnPosterior)); @@ -491,7 +510,8 @@ TEST(HybridGaussianFactorGraph, Conditionals) { expected_continuous.insert(X(1), 1); expected_continuous.insert(X(2), 2); expected_continuous.insert(X(3), 4); - Values result_continuous = switching.linearizationPoint.retract(result.continuous()); + Values result_continuous = + switching.linearizationPoint.retract(result.continuous()); EXPECT(assert_equal(expected_continuous, result_continuous)); DiscreteValues expected_discrete; @@ -519,8 +539,10 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { // Check discrete posterior at optimum HybridValues delta = hybridBayesNet->optimize(); - AlgebraicDecisionTree graphPosterior = graph.discretePosterior(delta.continuous()); - AlgebraicDecisionTree bnPosterior = hybridBayesNet->discretePosterior(delta.continuous()); + AlgebraicDecisionTree graphPosterior = + graph.discretePosterior(delta.continuous()); + AlgebraicDecisionTree bnPosterior = + hybridBayesNet->discretePosterior(delta.continuous()); EXPECT(assert_equal(graphPosterior, bnPosterior)); graph = HybridGaussianFactorGraph(); @@ -579,11 +601,9 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { /* ****************************************************************************/ // Check that the factor graph unnormalized probability is proportional to the // Bayes net probability for the given measurements. -bool ratioTest(const HybridBayesNet& bn, - const VectorValues& measurements, - const HybridGaussianFactorGraph& fg, - size_t num_samples = 100) { - auto compute_ratio = [&](HybridValues* sample) -> double { +bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, + const HybridGaussianFactorGraph &fg, size_t num_samples = 100) { + auto compute_ratio = [&](HybridValues *sample) -> double { sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / fg.probPrime(*sample); }; @@ -602,11 +622,9 @@ bool ratioTest(const HybridBayesNet& bn, /* ****************************************************************************/ // Check that the bayes net unnormalized probability is proportional to the // Bayes net probability for the given measurements. -bool ratioTest(const HybridBayesNet& bn, - const VectorValues& measurements, - const HybridBayesNet& posterior, - size_t num_samples = 100) { - auto compute_ratio = [&](HybridValues* sample) -> double { +bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements, + const HybridBayesNet &posterior, size_t num_samples = 100) { + auto compute_ratio = [&](HybridValues *sample) -> double { sample->update(measurements); // update sample with given measurements: return bn.evaluate(*sample) / posterior.evaluate(*sample); }; @@ -640,10 +658,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = - std::make_shared(X(0), Vector1(14.1421), I_1x1 * 2.82843), - conditional1 = - std::make_shared(X(0), Vector1(10.1379), I_1x1 * 2.02759); + const auto conditional0 = std::make_shared( + X(0), Vector1(14.1421), I_1x1 * 2.82843), + conditional1 = std::make_shared( + X(0), Vector1(10.1379), I_1x1 * 2.02759); expectedBayesNet.emplace_shared( mode, std::vector{conditional0, conditional1}); @@ -671,7 +689,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { bn.emplace_shared(m1, Z(0), I_1x1, X(0), parms); // Create prior on X(0). - bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5)); // Add prior on m1. bn.emplace_shared(m1, "1/1"); @@ -689,10 +708,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { // Create hybrid Gaussian factor on X(0). // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = - std::make_shared(X(0), Vector1(10.1379), I_1x1 * 2.02759), - conditional1 = - std::make_shared(X(0), Vector1(14.1421), I_1x1 * 2.82843); + const auto conditional0 = std::make_shared( + X(0), Vector1(10.1379), I_1x1 * 2.02759), + conditional1 = std::make_shared( + X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( m1, std::vector{conditional0, conditional1}); @@ -725,10 +744,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { // Create hybrid Gaussian factor on X(0). using tiny::mode; // regression, but mean checked to be 5.0 in both cases: - const auto conditional0 = - std::make_shared(X(0), Vector1(17.3205), I_1x1 * 3.4641), - conditional1 = - std::make_shared(X(0), Vector1(10.274), I_1x1 * 2.0548); + const auto conditional0 = std::make_shared( + X(0), Vector1(17.3205), I_1x1 * 3.4641), + conditional1 = std::make_shared( + X(0), Vector1(10.274), I_1x1 * 2.0548); expectedBayesNet.emplace_shared( mode, std::vector{conditional0, conditional1}); @@ -772,25 +791,27 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { // NOTE: we add reverse topological so we can sample from the Bayes net.: // Add measurements: - std::vector> measurementModels{{Z_1x1, 3}, {Z_1x1, 0.5}}; + std::vector> measurementModels{{Z_1x1, 3}, + {Z_1x1, 0.5}}; for (size_t t : {0, 1, 2}) { // Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; - bn.emplace_shared( - noise_mode_t, Z(t), I_1x1, X(t), measurementModels); + bn.emplace_shared(noise_mode_t, Z(t), I_1x1, + X(t), measurementModels); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); } // Add motion models. TODO(frank): why are they exactly the same? - std::vector> motionModels{{Z_1x1, 0.2}, {Z_1x1, 0.2}}; + std::vector> motionModels{{Z_1x1, 0.2}, + {Z_1x1, 0.2}}; for (size_t t : {2, 1}) { // Create hybrid Gaussian factor on X(t) conditioned on X(t-1) // and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; - bn.emplace_shared( - motion_model_t, X(t), I_1x1, X(t - 1), motionModels); + bn.emplace_shared(motion_model_t, X(t), I_1x1, + X(t - 1), motionModels); // Create prior on motion model M(t): bn.emplace_shared(motion_model_t, "40/60"); @@ -803,7 +824,8 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { EXPECT_LONGS_EQUAL(6, bn.sample().continuous().size()); // Create measurements consistent with moving right every time: - const VectorValues measurements{{Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; + const VectorValues measurements{ + {Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}}; const HybridGaussianFactorGraph fg = bn.toFactorGraph(measurements); // Factor graph is: diff --git a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp index 017df14a55..f41c5f0aa2 100644 --- a/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianProductFactor.cpp @@ -54,12 +54,15 @@ const auto f22 = std::make_shared(X(1), A1, X(3), A3, b); const HybridGaussianFactor hybridFactorB(m2, {{f20, 20}, {f21, 21}, {f22, 22}}); // Simulate a pruned hybrid factor, in this case m2==1 is nulled out. -const HybridGaussianFactor prunedFactorB(m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}}); +const HybridGaussianFactor prunedFactorB( + m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}}); } // namespace examples /* ************************************************************************* */ // Constructor -TEST(HybridGaussianProductFactor, Construct) { HybridGaussianProductFactor product; } +TEST(HybridGaussianProductFactor, Construct) { + HybridGaussianProductFactor product; +} /* ************************************************************************* */ // Add two Gaussian factors and check only one leaf in tree diff --git a/gtsam/hybrid/tests/testHybridMotionModel.cpp b/gtsam/hybrid/tests/testHybridMotionModel.cpp index bcffbe6282..16a2bd476d 100644 --- a/gtsam/hybrid/tests/testHybridMotionModel.cpp +++ b/gtsam/hybrid/tests/testHybridMotionModel.cpp @@ -46,25 +46,25 @@ using symbol_shorthand::Z; DiscreteKey m1(M(1), 2); -void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) { +void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) { auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma); - hbn.emplace_shared( - z_key, Vector1(0.0), I_1x1, x_key, -I_1x1, measurement_model); + hbn.emplace_shared(z_key, Vector1(0.0), I_1x1, x_key, + -I_1x1, measurement_model); } /// Create hybrid motion model p(x1 | x0, m1) -static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(double mu0, - double mu1, - double sigma0, - double sigma1) { +static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( + double mu0, double mu1, double sigma0, double sigma1) { std::vector> motionModels{{Vector1(mu0), sigma0}, {Vector1(mu1), sigma1}}; - return std::make_shared(m1, X(1), I_1x1, X(0), motionModels); + return std::make_shared(m1, X(1), I_1x1, X(0), + motionModels); } /// Create two state Bayes network with 1 or two measurement models -HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr& hybridMotionModel, - bool add_second_measurement = false) { +HybridBayesNet CreateBayesNet( + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + bool add_second_measurement = false) { HybridBayesNet hbn; // Add measurement model p(z0 | x0) @@ -86,16 +86,15 @@ HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr& hybri /// Approximate the discrete marginal P(m1) using importance sampling std::pair approximateDiscreteMarginal( - const HybridBayesNet& hbn, - const HybridGaussianConditional::shared_ptr& hybridMotionModel, - const VectorValues& given, - size_t N = 100000) { + const HybridBayesNet &hbn, + const HybridGaussianConditional::shared_ptr &hybridMotionModel, + const VectorValues &given, size_t N = 100000) { /// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1), /// using q(x0) = N(z0, sigmaQ) to sample x0. HybridBayesNet q; q.push_back(hybridMotionModel); // Add hybrid motion model q.emplace_shared(GaussianConditional::FromMeanAndStddev( - X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 + X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0 q.emplace_shared(m1, "50/50"); // Discrete prior. // Do importance sampling @@ -195,16 +194,20 @@ TEST(HybridGaussianFactor, TwoStateModel2) { HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential(); - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN const auto& expectedDiscretePosterior = hbn.discretePosterior(vv); - // Equality of posteriors asserts that the factor graph is correct (same ratios for all modes) - EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + // Equality of posteriors asserts that the factor graph is correct (same + // ratios for all modes) + EXPECT( + assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); // This one asserts that HBN resulting from elimination is correct. - EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv))); + EXPECT(assert_equal(expectedDiscretePosterior, + eliminated->discretePosterior(vv))); } // Importance sampling run with 100k samples gives 50.095/49.905 @@ -227,16 +230,20 @@ TEST(HybridGaussianFactor, TwoStateModel2) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN const auto& expectedDiscretePosterior = hbn.discretePosterior(vv); - // Equality of posteriors asserts that the factor graph is correct (same ratios for all modes) - EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); + // Equality of posteriors asserts that the factor graph is correct (same + // ratios for all modes) + EXPECT( + assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv))); // This one asserts that HBN resulting from elimination is correct. - EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv))); + EXPECT(assert_equal(expectedDiscretePosterior, + eliminated->discretePosterior(vv))); } // Values taken from an importance sampling run with 100k samples: @@ -290,11 +297,13 @@ TEST(HybridGaussianFactor, TwoStateModel3) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); @@ -318,11 +327,13 @@ TEST(HybridGaussianFactor, TwoStateModel3) { // Check that ratio of Bayes net and factor graph for different modes is // equal for several values of {x0,x1}. - for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, - VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { + for (VectorValues vv : + {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}}, + VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) { vv.insert(given); // add measurements for HBN HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}}); - EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9); + EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), + gfg.error(hv1) / hbn.error(hv1), 1e-9); } HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 5f939e5893..b5486c6cdb 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -34,7 +34,6 @@ #include #include "Switching.h" -#include "Test.h" // Include for test suite #include