diff --git a/gtsam/discrete/discrete.i b/gtsam/discrete/discrete.i index b3a12a8d52..fa98f36fa5 100644 --- a/gtsam/discrete/discrete.i +++ b/gtsam/discrete/discrete.i @@ -219,6 +219,16 @@ class DiscreteBayesTree { }; #include + +class DiscreteLookupTable : gtsam::DiscreteConditional{ + DiscreteLookupTable(size_t nFrontals, const gtsam::DiscreteKeys& keys, + const gtsam::DecisionTreeFactor::ADT& potentials); + void print( + const std::string& s = "Discrete Lookup Table: ", + const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; + size_t argmax(const gtsam::DiscreteValues& parentsValues) const; +}; + class DiscreteLookupDAG { DiscreteLookupDAG(); void push_back(const gtsam::DiscreteLookupTable* table); diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index b04800d21e..6816dfbf67 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -119,11 +119,36 @@ void GaussianMixture::print(const std::string &s, "", [&](Key k) { return formatter(k); }, [&](const GaussianConditional::shared_ptr &gf) -> std::string { RedirectCout rd; - if (!gf->empty()) + if (gf && !gf->empty()) gf->print("", formatter); else return {"nullptr"}; return rd.str(); }); } + +/* *******************************************************************************/ +void GaussianMixture::prune(const DecisionTreeFactor &decisionTree) { + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = [&decisionTree]( + const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if (decisionTree(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return conditional; + } + }; + + auto pruned_conditionals = conditionals_.apply(pruner); + conditionals_.root_ = pruned_conditionals.root_; +} + } // namespace gtsam diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index fc1eb0f060..75deb4d553 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -30,11 +31,14 @@ namespace gtsam { /** * @brief A conditional of gaussian mixtures indexed by discrete variables, as - * part of a Bayes Network. + * part of a Bayes Network. This is the result of the elimination of a + * continuous variable in a hybrid scheme, such that the remaining variables are + * discrete+continuous. * - * Represents the conditional density P(X | M, Z) where X is a continuous random - * variable, M is the selection of discrete variables corresponding to a subset - * of the Gaussian variables and Z is parent of this node + * Represents the conditional density P(X | M, Z) where X is the set of + * continuous random variables, M is the selection of discrete variables + * corresponding to a subset of the Gaussian variables and Z is parent of this + * node . * * The probability P(x|y,z,...) is proportional to * \f$ \sum_i k_i \exp - \frac{1}{2} |R_i x - (d_i - S_i y - T_i z - ...)|^2 \f$ @@ -118,7 +122,7 @@ class GTSAM_EXPORT GaussianMixture /// Test equality with base HybridFactor bool equals(const HybridFactor &lf, double tol = 1e-9) const override; - /* print utility */ + /// Print utility void print( const std::string &s = "GaussianMixture\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; @@ -128,6 +132,15 @@ class GTSAM_EXPORT GaussianMixture /// Getter for the underlying Conditionals DecisionTree const Conditionals &conditionals(); + /** + * @brief Prune the decision tree of Gaussian factors as per the discrete + * `decisionTree`. + * + * @param decisionTree A pruned decision tree of discrete keys where the + * leaves are probabilities. + */ + void prune(const DecisionTreeFactor &decisionTree); + /** * @brief Merge the Gaussian Factor Graphs in `this` and `sum` while * maintaining the decision tree structure. diff --git a/gtsam/hybrid/GaussianMixtureFactor.cpp b/gtsam/hybrid/GaussianMixtureFactor.cpp index 8f832d8eab..9b5be188a6 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.cpp +++ b/gtsam/hybrid/GaussianMixtureFactor.cpp @@ -51,7 +51,7 @@ GaussianMixtureFactor GaussianMixtureFactor::FromFactors( void GaussianMixtureFactor::print(const std::string &s, const KeyFormatter &formatter) const { HybridFactor::print(s, formatter); - std::cout << "]{\n"; + std::cout << "{\n"; factors_.print( "", [&](Key k) { return formatter(k); }, [&](const GaussianFactor::shared_ptr &gf) -> std::string { diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 6c90ee6a71..a0a51af55e 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -22,7 +22,7 @@ #include #include -#include +#include #include namespace gtsam { @@ -108,7 +108,7 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { bool equals(const HybridFactor &lf, double tol = 1e-9) const override; void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "GaussianMixtureFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 1292711d89..4665a3136c 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -10,7 +10,132 @@ * @file HybridBayesNet.cpp * @brief A bayes net of Gaussian Conditionals indexed by discrete keys. * @author Fan Jiang + * @author Varun Agrawal + * @author Shangjie Xue * @date January 2022 */ #include +#include +#include + +namespace gtsam { + +/* ************************************************************************* */ +/// Return the DiscreteKey vector as a set. +static std::set DiscreteKeysAsSet(const DiscreteKeys &dkeys) { + std::set s; + s.insert(dkeys.begin(), dkeys.end()); + return s; +} + +/* ************************************************************************* */ +HybridBayesNet HybridBayesNet::prune( + const DecisionTreeFactor::shared_ptr &discreteFactor) const { + /* To Prune, we visitWith every leaf in the GaussianMixture. + * For each leaf, using the assignment we can check the discrete decision tree + * for 0.0 probability, then just set the leaf to a nullptr. + * + * We can later check the GaussianMixture for just nullptrs. + */ + + HybridBayesNet prunedBayesNetFragment; + + // Functional which loops over all assignments and create a set of + // GaussianConditionals + auto pruner = [&](const Assignment &choices, + const GaussianConditional::shared_ptr &conditional) + -> GaussianConditional::shared_ptr { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + + if ((*discreteFactor)(values) == 0.0) { + // empty aka null pointer + boost::shared_ptr null; + return null; + } else { + return conditional; + } + }; + + // Go through all the conditionals in the + // Bayes Net and prune them as per discreteFactor. + for (size_t i = 0; i < this->size(); i++) { + HybridConditional::shared_ptr conditional = this->at(i); + + GaussianMixture::shared_ptr gaussianMixture = + boost::dynamic_pointer_cast(conditional->inner()); + + if (gaussianMixture) { + // We may have mixtures with less discrete keys than discreteFactor so we + // skip those since the label assignment does not exist. + auto gmKeySet = DiscreteKeysAsSet(gaussianMixture->discreteKeys()); + auto dfKeySet = DiscreteKeysAsSet(discreteFactor->discreteKeys()); + if (gmKeySet != dfKeySet) { + // Add the gaussianMixture which doesn't have to be pruned. + prunedBayesNetFragment.push_back( + boost::make_shared(gaussianMixture)); + continue; + } + + // Run the pruning to get a new, pruned tree + GaussianMixture::Conditionals prunedTree = + gaussianMixture->conditionals().apply(pruner); + + DiscreteKeys discreteKeys = gaussianMixture->discreteKeys(); + // reverse keys to get a natural ordering + std::reverse(discreteKeys.begin(), discreteKeys.end()); + + // Convert from boost::iterator_range to KeyVector + // so we can pass it to constructor. + KeyVector frontals(gaussianMixture->frontals().begin(), + gaussianMixture->frontals().end()), + parents(gaussianMixture->parents().begin(), + gaussianMixture->parents().end()); + + // Create the new gaussian mixture and add it to the bayes net. + auto prunedGaussianMixture = boost::make_shared( + frontals, parents, discreteKeys, prunedTree); + + // Type-erase and add to the pruned Bayes Net fragment. + prunedBayesNetFragment.push_back( + boost::make_shared(prunedGaussianMixture)); + + } else { + // Add the non-GaussianMixture conditional + prunedBayesNetFragment.push_back(conditional); + } + } + + return prunedBayesNetFragment; +} + +/* ************************************************************************* */ +GaussianMixture::shared_ptr HybridBayesNet::atGaussian(size_t i) const { + return boost::dynamic_pointer_cast(factors_.at(i)->inner()); +} + +/* ************************************************************************* */ +DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const { + return boost::dynamic_pointer_cast( + factors_.at(i)->inner()); +} + +/* ************************************************************************* */ +GaussianBayesNet HybridBayesNet::choose( + const DiscreteValues &assignment) const { + GaussianBayesNet gbn; + for (size_t idx = 0; idx < size(); idx++) { + GaussianMixture gm = *this->atGaussian(idx); + gbn.push_back(gm(assignment)); + } + return gbn; +} + +/* *******************************************************************************/ +HybridValues HybridBayesNet::optimize() const { + auto dag = HybridLookupDAG::FromBayesNet(*this); + return dag.argmax(); +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 43eead2801..0d2dc36424 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -17,8 +17,11 @@ #pragma once +#include #include +#include #include +#include namespace gtsam { @@ -36,6 +39,39 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /** Construct empty bayes net */ HybridBayesNet() = default; + + /// Prune the Hybrid Bayes Net given the discrete decision tree. + HybridBayesNet prune( + const DecisionTreeFactor::shared_ptr &discreteFactor) const; + + /// Add HybridConditional to Bayes Net + using Base::add; + + /// Add a discrete conditional to the Bayes Net. + void add(const DiscreteKey &key, const std::string &table) { + push_back( + HybridConditional(boost::make_shared(key, table))); + } + + /// Get a specific Gaussian mixture by index `i`. + GaussianMixture::shared_ptr atGaussian(size_t i) const; + + /// Get a specific discrete conditional by index `i`. + DiscreteConditional::shared_ptr atDiscrete(size_t i) const; + + /** + * @brief Get the Gaussian Bayes Net which corresponds to a specific discrete + * value assignment. + * + * @param assignment The discrete value assignment for the discrete keys. + * @return GaussianBayesNet + */ + GaussianBayesNet choose(const DiscreteValues &assignment) const; + + /// Solve the HybridBayesNet by back-substitution. + /// TODO(Shangjie) do we need to create a HybridGaussianBayesNet class, and + /// put this method there? + HybridValues optimize() const; }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index 0b89ca8c4b..02a4a11e5e 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -76,7 +76,10 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { /** * @brief Class for Hybrid Bayes tree orphan subtrees. * - * This does special stuff for the hybrid case + * This object stores parent keys in our base type factor so that + * eliminating those parent keys will pull this subtree into the + * elimination. + * This does special stuff for the hybrid case. * * @tparam CLIQUE */ diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 8df2d524ff..a9fe62cf13 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -78,7 +78,8 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { const This *e = dynamic_cast(&lf); return e != nullptr && Base::equals(*e, tol) && isDiscrete_ == e->isDiscrete_ && isContinuous_ == e->isContinuous_ && - isHybrid_ == e->isHybrid_ && nrContinuous_ == e->nrContinuous_; + isHybrid_ == e->isHybrid_ && continuousKeys_ == e->continuousKeys_ && + discreteKeys_ == e->discreteKeys_; } /* ************************************************************************ */ @@ -88,17 +89,23 @@ void HybridFactor::print(const std::string &s, if (isContinuous_) std::cout << "Continuous "; if (isDiscrete_) std::cout << "Discrete "; if (isHybrid_) std::cout << "Hybrid "; - for (size_t c=0; c 0 ? "; " : ""); } } - for(auto && discreteKey: discreteKeys_) { - std::cout << formatter(discreteKey.first) << " "; + for (size_t d = 0; d < discreteKeys_.size(); d++) { + std::cout << formatter(discreteKeys_.at(d).first); + if (d < discreteKeys_.size() - 1) { + std::cout << " "; + } + } + std::cout << "]"; } } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.h b/gtsam/hybrid/HybridFactor.h index 3e91e24877..13dc2e6e69 100644 --- a/gtsam/hybrid/HybridFactor.h +++ b/gtsam/hybrid/HybridFactor.h @@ -50,8 +50,8 @@ class GTSAM_EXPORT HybridFactor : public Factor { size_t nrContinuous_ = 0; protected: + // Set of DiscreteKeys for this factor. DiscreteKeys discreteKeys_; - /// Record continuous keys for book-keeping KeyVector continuousKeys_; @@ -120,10 +120,13 @@ class GTSAM_EXPORT HybridFactor : public Factor { bool isHybrid() const { return isHybrid_; } /// Return the number of continuous variables in this factor. - size_t nrContinuous() const { return nrContinuous_; } + size_t nrContinuous() const { return continuousKeys_.size(); } + + /// Return the discrete keys for this factor. + const DiscreteKeys &discreteKeys() const { return discreteKeys_; } - /// Return vector of discrete keys. - DiscreteKeys discreteKeys() const { return discreteKeys_; } + /// Return only the continuous keys for this factor. + const KeyVector &continuousKeys() const { return continuousKeys_; } /// @} }; diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h new file mode 100644 index 0000000000..fc730f0c97 --- /dev/null +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -0,0 +1,140 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridFactorGraph.h + * @brief Hybrid factor graph base class that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +namespace gtsam { + +using SharedFactor = boost::shared_ptr; + +/** + * Hybrid Factor Graph + * ----------------------- + * This is the base hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class HybridFactorGraph : public FactorGraph { + public: + using Base = FactorGraph; + using This = HybridFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + protected: + /// Check if FACTOR type is derived from DiscreteFactor. + template + using IsDiscrete = typename std::enable_if< + std::is_base_of::value>::type; + + /// Check if FACTOR type is derived from HybridFactor. + template + using IsHybrid = typename std::enable_if< + std::is_base_of::value>::type; + + public: + /// @name Constructors + /// @{ + + /// Default constructor + HybridFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + HybridFactorGraph(const FactorGraph& graph) : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; + + /** + * Add a discrete factor *pointer* to the internal discrete graph + * @param discreteFactor - boost::shared_ptr to the factor to add + */ + template + IsDiscrete push_discrete( + const boost::shared_ptr& discreteFactor) { + Base::push_back(boost::make_shared(discreteFactor)); + } + + /** + * Add a discrete-continuous (Hybrid) factor *pointer* to the graph + * @param hybridFactor - boost::shared_ptr to the factor to add + */ + template + IsHybrid push_hybrid(const boost::shared_ptr& hybridFactor) { + Base::push_back(hybridFactor); + } + + /// delete emplace_shared. + template + void emplace_shared(Args&&... args) = delete; + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsDiscrete emplace_discrete(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_discrete(factor); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsHybrid emplace_hybrid(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_hybrid(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_discrete(p); + } + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_hybrid(p); + } + } +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 2a92c717c2..e645e63e50 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -37,6 +37,8 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { using This = HybridGaussianFactor; using shared_ptr = boost::shared_ptr; + HybridGaussianFactor() = default; + // Explicit conversion from a shared ptr of GF explicit HybridGaussianFactor(GaussianFactor::shared_ptr other); @@ -52,7 +54,7 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { /// GTSAM print utility. void print( - const std::string &s = "HybridFactor\n", + const std::string &s = "HybridGaussianFactor\n", const KeyFormatter &formatter = DefaultKeyFormatter) const override; /// @} diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 88730cae95..c024c1255f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -98,6 +98,12 @@ GaussianMixtureFactor::Sum sumFrontals( } else if (f->isContinuous()) { deferredFactors.push_back( boost::dynamic_pointer_cast(f)->inner()); + + } else if (f->isDiscrete()) { + // Don't do anything for discrete-only factors + // since we want to eliminate continuous values only. + continue; + } else { // We need to handle the case where the object is actually an // BayesTreeOrphanWrapper! @@ -106,8 +112,8 @@ GaussianMixtureFactor::Sum sumFrontals( if (!orphan) { auto &fr = *f; throw std::invalid_argument( - std::string("factor is discrete in continuous elimination") + - typeid(fr).name()); + std::string("factor is discrete in continuous elimination ") + + demangle(typeid(fr).name())); } } } @@ -158,7 +164,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } } - auto result = EliminateDiscrete(dfg, frontalKeys); + auto result = EliminateForMPE(dfg, frontalKeys); return {boost::make_shared(result.first), boost::make_shared(result.second)}; @@ -178,6 +184,19 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // sum out frontals, this is the factor on the separator GaussianMixtureFactor::Sum sum = sumFrontals(factors); + // If a tree leaf contains nullptr, + // convert that leaf to an empty GaussianFactorGraph. + // Needed since the DecisionTree will otherwise create + // a GFG with a single (null) factor. + auto emptyGaussian = [](const GaussianFactorGraph &gfg) { + bool hasNull = + std::any_of(gfg.begin(), gfg.end(), + [](const GaussianFactor::shared_ptr &ptr) { return !ptr; }); + + return hasNull ? GaussianFactorGraph() : gfg; + }; + sum = GaussianMixtureFactor::Sum(sum, emptyGaussian); + using EliminationPair = GaussianFactorGraph::EliminationResult; KeyVector keysOfEliminated; // Not the ordering @@ -189,7 +208,10 @@ hybridElimination(const HybridGaussianFactorGraph &factors, if (graph.empty()) { return {nullptr, nullptr}; } - auto result = EliminatePreferCholesky(graph, frontalKeys); + std::pair, + boost::shared_ptr> + result = EliminatePreferCholesky(graph, frontalKeys); + if (keysOfEliminated.empty()) { keysOfEliminated = result.first->keys(); // Initialize the keysOfEliminated to be the @@ -229,14 +251,27 @@ hybridElimination(const HybridGaussianFactorGraph &factors, boost::make_shared(discreteFactor)}; } else { - // Create a resulting DCGaussianMixture on the separator. + // Create a resulting GaussianMixtureFactor on the separator. auto factor = boost::make_shared( KeyVector(continuousSeparator.begin(), continuousSeparator.end()), discreteSeparator, separatorFactors); return {boost::make_shared(conditional), factor}; } } -/* ************************************************************************ */ +/* ************************************************************************ + * Function to eliminate variables **under the following assumptions**: + * 1. When the ordering is fully continuous, and the graph only contains + * continuous and hybrid factors + * 2. When the ordering is fully discrete, and the graph only contains discrete + * factors + * + * Any usage outside of this is considered incorrect. + * + * \warning This function is not meant to be used with arbitrary hybrid factor + * graphs. For example, if there exists continuous parents, and one tries to + * eliminate a discrete variable (as specified in the ordering), the result will + * be INCORRECT and there will be NO error raised. + */ std::pair // EliminateHybrid(const HybridGaussianFactorGraph &factors, const Ordering &frontalKeys) { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 0188aa652c..9367103305 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -19,9 +19,12 @@ #pragma once #include +#include +#include #include #include #include +#include namespace gtsam { @@ -53,10 +56,9 @@ struct EliminationTraits { typedef HybridBayesNet BayesNetType; ///< Type of Bayes net from sequential elimination typedef HybridEliminationTree - EliminationTreeType; ///< Type of elimination tree - typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree - typedef HybridJunctionTree - JunctionTreeType; ///< Type of Junction tree + EliminationTreeType; ///< Type of elimination tree + typedef HybridBayesTree BayesTreeType; ///< Type of Bayes tree + typedef HybridJunctionTree JunctionTreeType; ///< Type of Junction tree /// The default dense elimination function static std::pair, boost::shared_ptr > @@ -72,10 +74,16 @@ struct EliminationTraits { * Everything inside needs to be hybrid factor or hybrid conditional. */ class GTSAM_EXPORT HybridGaussianFactorGraph - : public FactorGraph, + : public HybridFactorGraph, public EliminateableFactorGraph { + protected: + /// Check if FACTOR type is derived from GaussianFactor. + template + using IsGaussian = typename std::enable_if< + std::is_base_of::value>::type; + public: - using Base = FactorGraph; + using Base = HybridFactorGraph; using This = HybridGaussianFactorGraph; ///< this class using BaseEliminateable = EliminateableFactorGraph; ///< for elimination @@ -100,7 +108,13 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @} - using FactorGraph::add; + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::push_back; + using Base::resize; /// Add a Jacobian factor to the factor graph. void add(JacobianFactor&& factor); @@ -113,6 +127,39 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// Add a DecisionTreeFactor as a shared ptr. void add(boost::shared_ptr factor); + + /** + * Add a gaussian factor *pointer* to the internal gaussian factor graph + * @param gaussianFactor - boost::shared_ptr to the factor to add + */ + template + IsGaussian push_gaussian( + const boost::shared_ptr& gaussianFactor) { + Base::push_back(boost::make_shared(gaussianFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsGaussian emplace_gaussian(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_gaussian(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @param sharedFactor The factor to add to this factor graph. + */ + void push_back(const SharedFactor& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_gaussian(p); + } else { + Base::push_back(sharedFactor); + } + } }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianISAM.cpp b/gtsam/hybrid/HybridGaussianISAM.cpp index 7783a88ddc..23a95c021d 100644 --- a/gtsam/hybrid/HybridGaussianISAM.cpp +++ b/gtsam/hybrid/HybridGaussianISAM.cpp @@ -41,6 +41,7 @@ HybridGaussianISAM::HybridGaussianISAM(const HybridBayesTree& bayesTree) void HybridGaussianISAM::updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { // Remove the contaminated part of the Bayes tree BayesNetType bn; @@ -74,16 +75,21 @@ void HybridGaussianISAM::updateInternal( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - // KeyVector new - // Get an ordering where the new keys are eliminated last const VariableIndex index(factors); - const Ordering ordering = Ordering::ColamdConstrainedLast( - index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), - true); + Ordering elimination_ordering; + if (ordering) { + elimination_ordering = *ordering; + } else { + elimination_ordering = Ordering::ColamdConstrainedLast( + index, + KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), + true); + } // eliminate all factors (top, added, orphans) into a new Bayes tree - auto bayesTree = factors.eliminateMultifrontal(ordering, function, index); + HybridBayesTree::shared_ptr bayesTree = + factors.eliminateMultifrontal(elimination_ordering, function, index); // Re-add into Bayes tree data structures this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), @@ -93,9 +99,61 @@ void HybridGaussianISAM::updateInternal( /* ************************************************************************* */ void HybridGaussianISAM::update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering, const HybridBayesTree::Eliminate& function) { Cliques orphans; - this->updateInternal(newFactors, &orphans, function); + this->updateInternal(newFactors, &orphans, ordering, function); +} + +/* ************************************************************************* */ +/** + * @brief Check if `b` is a subset of `a`. + * Non-const since they need to be sorted. + * + * @param a KeyVector + * @param b KeyVector + * @return True if the keys of b is a subset of a, else false. + */ +bool IsSubset(KeyVector a, KeyVector b) { + std::sort(a.begin(), a.end()); + std::sort(b.begin(), b.end()); + return std::includes(a.begin(), a.end(), b.begin(), b.end()); +} + +/* ************************************************************************* */ +void HybridGaussianISAM::prune(const Key& root, const size_t maxNrLeaves) { + auto decisionTree = boost::dynamic_pointer_cast( + this->clique(root)->conditional()->inner()); + DecisionTreeFactor prunedDiscreteFactor = decisionTree->prune(maxNrLeaves); + decisionTree->root_ = prunedDiscreteFactor.root_; + + std::vector prunedKeys; + for (auto&& clique : nodes()) { + // The cliques can be repeated for each frontal so we record it in + // prunedKeys and check if we have already pruned a particular clique. + if (std::find(prunedKeys.begin(), prunedKeys.end(), clique.first) != + prunedKeys.end()) { + continue; + } + + // Add all the keys of the current clique to be pruned to prunedKeys + for (auto&& key : clique.second->conditional()->frontals()) { + prunedKeys.push_back(key); + } + + // Convert parents() to a KeyVector for comparison + KeyVector parents; + for (auto&& parent : clique.second->conditional()->parents()) { + parents.push_back(parent); + } + + if (IsSubset(parents, decisionTree->keys())) { + auto gaussianMixture = boost::dynamic_pointer_cast( + clique.second->conditional()->inner()); + + gaussianMixture->prune(prunedDiscreteFactor); + } + } } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianISAM.h b/gtsam/hybrid/HybridGaussianISAM.h index d5b6271da7..4fc206582d 100644 --- a/gtsam/hybrid/HybridGaussianISAM.h +++ b/gtsam/hybrid/HybridGaussianISAM.h @@ -48,6 +48,7 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { void updateInternal( const HybridGaussianFactorGraph& newFactors, HybridBayesTree::Cliques* orphans, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); @@ -59,8 +60,17 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM { * @param function Elimination function. */ void update(const HybridGaussianFactorGraph& newFactors, + const boost::optional& ordering = boost::none, const HybridBayesTree::Eliminate& function = HybridBayesTree::EliminationTraitsType::DefaultEliminate); + + /** + * @brief + * + * @param root The root key in the discrete conditional decision tree. + * @param maxNumberLeaves + */ + void prune(const Key& root, const size_t maxNumberLeaves); }; /// traits diff --git a/gtsam/hybrid/HybridLookupDAG.cpp b/gtsam/hybrid/HybridLookupDAG.cpp new file mode 100644 index 0000000000..a322a81776 --- /dev/null +++ b/gtsam/hybrid/HybridLookupDAG.cpp @@ -0,0 +1,76 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file DiscreteLookupDAG.cpp + * @date Aug, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +using std::pair; +using std::vector; + +namespace gtsam { + +/* ************************************************************************** */ +void HybridLookupTable::argmaxInPlace(HybridValues* values) const { + // For discrete conditional, uses argmaxInPlace() method in + // DiscreteLookupTable. + if (isDiscrete()) { + boost::static_pointer_cast(inner_)->argmaxInPlace( + &(values->discrete)); + } else if (isContinuous()) { + // For Gaussian conditional, uses solve() method in GaussianConditional. + values->continuous.insert( + boost::static_pointer_cast(inner_)->solve( + values->continuous)); + } else if (isHybrid()) { + // For hybrid conditional, since children should not contain discrete + // variable, we can condition on the discrete variable in the parents and + // solve the resulting GaussianConditional. + auto conditional = + boost::static_pointer_cast(inner_)->conditionals()( + values->discrete); + values->continuous.insert(conditional->solve(values->continuous)); + } +} + +/* ************************************************************************** */ +HybridLookupDAG HybridLookupDAG::FromBayesNet(const HybridBayesNet& bayesNet) { + HybridLookupDAG dag; + for (auto&& conditional : bayesNet) { + HybridLookupTable hlt(*conditional); + dag.push_back(hlt); + } + return dag; +} + +/* ************************************************************************** */ +HybridValues HybridLookupDAG::argmax(HybridValues result) const { + // Argmax each node in turn in topological sort order (parents first). + for (auto lookupTable : boost::adaptors::reverse(*this)) + lookupTable->argmaxInPlace(&result); + return result; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridLookupDAG.h b/gtsam/hybrid/HybridLookupDAG.h new file mode 100644 index 0000000000..cc1c58c58f --- /dev/null +++ b/gtsam/hybrid/HybridLookupDAG.h @@ -0,0 +1,119 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridLookupDAG.h + * @date Aug, 2022 + * @author Shangjie Xue + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace gtsam { + +/** + * @brief HybridLookupTable table for max-product + * + * Similar to DiscreteLookupTable, inherits from hybrid conditional for + * convenience. Is used in the max-product algorithm. + */ +class GTSAM_EXPORT HybridLookupTable : public HybridConditional { + public: + using Base = HybridConditional; + using This = HybridLookupTable; + using shared_ptr = boost::shared_ptr; + using BaseConditional = Conditional; + + /** + * @brief Construct a new Hybrid Lookup Table object form a HybridConditional. + * + * @param conditional input hybrid conditional + */ + HybridLookupTable(HybridConditional& conditional) : Base(conditional){}; + + /** + * @brief Calculate assignment for frontal variables that maximizes value. + * @param (in/out) parentsValues Known assignments for the parents. + */ + void argmaxInPlace(HybridValues* parentsValues) const; +}; + +/** A DAG made from hybrid lookup tables, as defined above. Similar to + * DiscreteLookupDAG */ +class GTSAM_EXPORT HybridLookupDAG : public BayesNet { + public: + using Base = BayesNet; + using This = HybridLookupDAG; + using shared_ptr = boost::shared_ptr; + + /// @name Standard Constructors + /// @{ + + /// Construct empty DAG. + HybridLookupDAG() {} + + /// Create from BayesNet with LookupTables + static HybridLookupDAG FromBayesNet(const HybridBayesNet& bayesNet); + + /// Destructor + virtual ~HybridLookupDAG() {} + + /// @} + + /// @name Standard Interface + /// @{ + + /** Add a DiscreteLookupTable */ + template + void add(Args&&... args) { + emplace_shared(std::forward(args)...); + } + + /** + * @brief argmax by back-substitution, optionally given certain variables. + * + * Assumes the DAG is reverse topologically sorted, i.e. last + * conditional will be optimized first *and* that the + * DAG does not contain any conditionals for the given variables. If the DAG + * resulted from eliminating a factor graph, this is true for the elimination + * ordering. + * + * @return given assignment extended w. optimal assignment for all variables. + */ + HybridValues argmax(HybridValues given = HybridValues()) const; + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE& ar, const unsigned int /*version*/) { + ar& BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } +}; + +// traits +template <> +struct traits : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.cpp b/gtsam/hybrid/HybridNonlinearFactor.cpp new file mode 100644 index 0000000000..5a1833d397 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.cpp @@ -0,0 +1,41 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.cpp + * @date May 28, 2022 + * @author Varun Agrawal + */ + +#include + +#include + +namespace gtsam { + +/* ************************************************************************* */ +HybridNonlinearFactor::HybridNonlinearFactor( + const NonlinearFactor::shared_ptr &other) + : Base(other->keys()), inner_(other) {} + +/* ************************************************************************* */ +bool HybridNonlinearFactor::equals(const HybridFactor &lf, double tol) const { + return Base::equals(lf, tol); +} + +/* ************************************************************************* */ +void HybridNonlinearFactor::print(const std::string &s, + const KeyFormatter &formatter) const { + HybridFactor::print(s, formatter); + inner_->print("\n", formatter); +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h new file mode 100644 index 0000000000..7776347b36 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -0,0 +1,62 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactor.h + * @date May 28, 2022 + * @author Varun Agrawal + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + +/** + * A HybridNonlinearFactor is a layer over NonlinearFactor so that we do not + * have a diamond inheritance. + */ +class HybridNonlinearFactor : public HybridFactor { + private: + NonlinearFactor::shared_ptr inner_; + + public: + using Base = HybridFactor; + using This = HybridNonlinearFactor; + using shared_ptr = boost::shared_ptr; + + // Explicit conversion from a shared ptr of NonlinearFactor + explicit HybridNonlinearFactor(const NonlinearFactor::shared_ptr &other); + + /// @name Testable + /// @{ + + /// Check equality. + virtual bool equals(const HybridFactor &lf, double tol) const override; + + /// GTSAM print utility. + void print( + const std::string &s = "HybridNonlinearFactor\n", + const KeyFormatter &formatter = DefaultKeyFormatter) const override; + + /// @} + + NonlinearFactor::shared_ptr inner() const { return inner_; } + + /// Linearize to a HybridGaussianFactor at the linearization point `c`. + boost::shared_ptr linearize(const Values &c) const { + return boost::make_shared(inner_->linearize(c)); + } +}; +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.cpp b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp new file mode 100644 index 0000000000..a6abce15aa --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.cpp @@ -0,0 +1,94 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactorGraph.cpp + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#include + +namespace gtsam { + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::add( + boost::shared_ptr factor) { + FactorGraph::add(boost::make_shared(factor)); +} + +/* ************************************************************************* */ +void HybridNonlinearFactorGraph::print(const std::string& s, + const KeyFormatter& keyFormatter) const { + // Base::print(str, keyFormatter); + std::cout << (s.empty() ? "" : s + " ") << std::endl; + std::cout << "size: " << size() << std::endl; + for (size_t i = 0; i < factors_.size(); i++) { + std::stringstream ss; + ss << "factor " << i << ": "; + if (factors_[i]) { + factors_[i]->print(ss.str(), keyFormatter); + std::cout << std::endl; + } + } +} + +/* ************************************************************************* */ +HybridGaussianFactorGraph HybridNonlinearFactorGraph::linearize( + const Values& continuousValues) const { + // create an empty linear FG + HybridGaussianFactorGraph linearFG; + + linearFG.reserve(size()); + + // linearize all hybrid factors + for (auto&& factor : factors_) { + // First check if it is a valid factor + if (factor) { + // Check if the factor is a hybrid factor. + // It can be either a nonlinear MixtureFactor or a linear + // GaussianMixtureFactor. + if (factor->isHybrid()) { + // Check if it is a nonlinear mixture factor + if (auto nlmf = boost::dynamic_pointer_cast(factor)) { + linearFG.push_back(nlmf->linearize(continuousValues)); + } else { + linearFG.push_back(factor); + } + + // Now check if the factor is a continuous only factor. + } else if (factor->isContinuous()) { + // In this case, we check if factor->inner() is nonlinear since + // HybridFactors wrap over continuous factors. + auto nlhf = boost::dynamic_pointer_cast(factor); + if (auto nlf = + boost::dynamic_pointer_cast(nlhf->inner())) { + auto hgf = boost::make_shared( + nlf->linearize(continuousValues)); + linearFG.push_back(hgf); + } else { + linearFG.push_back(factor); + } + // Finally if nothing else, we are discrete-only which doesn't need + // lineariztion. + } else { + linearFG.push_back(factor); + } + + } else { + linearFG.push_back(GaussianFactor::shared_ptr()); + } + } + return linearFG; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridNonlinearFactorGraph.h b/gtsam/hybrid/HybridNonlinearFactorGraph.h new file mode 100644 index 0000000000..2ddb8bcea2 --- /dev/null +++ b/gtsam/hybrid/HybridNonlinearFactorGraph.h @@ -0,0 +1,134 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridNonlinearFactorGraph.h + * @brief Nonlinear hybrid factor graph that uses type erasure + * @author Varun Agrawal + * @date May 28, 2022 + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +namespace gtsam { + +/** + * Nonlinear Hybrid Factor Graph + * ----------------------- + * This is the non-linear version of a hybrid factor graph. + * Everything inside needs to be hybrid factor or hybrid conditional. + */ +class GTSAM_EXPORT HybridNonlinearFactorGraph : public HybridFactorGraph { + protected: + /// Check if FACTOR type is derived from NonlinearFactor. + template + using IsNonlinear = typename std::enable_if< + std::is_base_of::value>::type; + + public: + using Base = HybridFactorGraph; + using This = HybridNonlinearFactorGraph; ///< this class + using shared_ptr = boost::shared_ptr; ///< shared_ptr to This + + using Values = gtsam::Values; ///< backwards compatibility + using Indices = KeyVector; ///> map from keys to values + + /// @name Constructors + /// @{ + + HybridNonlinearFactorGraph() = default; + + /** + * Implicit copy/downcast constructor to override explicit template container + * constructor. In BayesTree this is used for: + * `cachedSeparatorMarginal_.reset(*separatorMarginal)` + * */ + template + HybridNonlinearFactorGraph(const FactorGraph& graph) + : Base(graph) {} + + /// @} + + // Allow use of selected FactorGraph methods: + using Base::empty; + using Base::reserve; + using Base::size; + using Base::operator[]; + using Base::add; + using Base::resize; + + /** + * Add a nonlinear factor *pointer* to the internal nonlinear factor graph + * @param nonlinearFactor - boost::shared_ptr to the factor to add + */ + template + IsNonlinear push_nonlinear( + const boost::shared_ptr& nonlinearFactor) { + Base::push_back(boost::make_shared(nonlinearFactor)); + } + + /// Construct a factor and add (shared pointer to it) to factor graph. + template + IsNonlinear emplace_nonlinear(Args&&... args) { + auto factor = boost::allocate_shared( + Eigen::aligned_allocator(), std::forward(args)...); + push_nonlinear(factor); + } + + /** + * @brief Add a single factor shared pointer to the hybrid factor graph. + * Dynamically handles the factor type and assigns it to the correct + * underlying container. + * + * @tparam FACTOR The factor type template + * @param sharedFactor The factor to add to this factor graph. + */ + template + void push_back(const boost::shared_ptr& sharedFactor) { + if (auto p = boost::dynamic_pointer_cast(sharedFactor)) { + push_nonlinear(p); + } else { + Base::push_back(sharedFactor); + } + } + + /// Add a nonlinear factor as a shared ptr. + void add(boost::shared_ptr factor); + + /// Print the factor graph. + void print( + const std::string& s = "HybridNonlinearFactorGraph", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override; + + /** + * @brief Linearize all the continuous factors in the + * HybridNonlinearFactorGraph. + * + * @param continuousValues: Dictionary of continuous values. + * @return HybridGaussianFactorGraph::shared_ptr + */ + HybridGaussianFactorGraph linearize(const Values& continuousValues) const; +}; + +template <> +struct traits + : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridValues.h b/gtsam/hybrid/HybridValues.h new file mode 100644 index 0000000000..5e1bd41646 --- /dev/null +++ b/gtsam/hybrid/HybridValues.h @@ -0,0 +1,127 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridValues.h + * @date Jul 28, 2022 + * @author Shangjie Xue + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace gtsam { + +/** + * HybridValues represents a collection of DiscreteValues and VectorValues. It + * is typically used to store the variables of a HybridGaussianFactorGraph. + * Optimizing a HybridGaussianBayesNet returns this class. + */ +class GTSAM_EXPORT HybridValues { + public: + // DiscreteValue stored the discrete components of the HybridValues. + DiscreteValues discrete; + + // VectorValue stored the continuous components of the HybridValues. + VectorValues continuous; + + // Default constructor creates an empty HybridValues. + HybridValues() : discrete(), continuous(){}; + + // Construct from DiscreteValues and VectorValues. + HybridValues(const DiscreteValues& dv, const VectorValues& cv) + : discrete(dv), continuous(cv){}; + + // print required by Testable for unit testing + void print(const std::string& s = "HybridValues", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::cout << s << ": \n"; + discrete.print(" Discrete", keyFormatter); // print discrete components + continuous.print(" Continuous", + keyFormatter); // print continuous components + }; + + // equals required by Testable for unit testing + bool equals(const HybridValues& other, double tol = 1e-9) const { + return discrete.equals(other.discrete, tol) && + continuous.equals(other.continuous, tol); + } + + // Check whether a variable with key \c j exists in DiscreteValue. + bool existsDiscrete(Key j) { return (discrete.find(j) != discrete.end()); }; + + // Check whether a variable with key \c j exists in VectorValue. + bool existsVector(Key j) { return continuous.exists(j); }; + + // Check whether a variable with key \c j exists. + bool exists(Key j) { return existsDiscrete(j) || existsVector(j); }; + + /** Insert a discrete \c value with key \c j. Replaces the existing value if + * the key \c j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, int value) { discrete[j] = value; }; + + /** Insert a vector \c value with key \c j. Throws an invalid_argument + * exception if the key \c j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, const Vector& value) { continuous.insert(j, value); } + + // TODO(Shangjie)- update() and insert_or_assign() , similar to Values.h + + /** + * Read/write access to the discrete value with key \c j, throws + * std::out_of_range if \c j does not exist. + */ + size_t& atDiscrete(Key j) { return discrete.at(j); }; + + /** + * Read/write access to the vector value with key \c j, throws + * std::out_of_range if \c j does not exist. + */ + Vector& at(Key j) { return continuous.at(j); }; + + /// @name Wrapper support + /// @{ + + /** + * @brief Output as a html table. + * + * @param keyFormatter function that formats keys. + * @return string html output. + */ + std::string html( + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::stringstream ss; + ss << this->discrete.html(keyFormatter); + ss << this->continuous.html(keyFormatter); + return ss.str(); + }; + + /// @} +}; + +// traits +template <> +struct traits : public Testable {}; + +} // namespace gtsam diff --git a/gtsam/hybrid/MixtureFactor.h b/gtsam/hybrid/MixtureFactor.h new file mode 100644 index 0000000000..3cd21e32e5 --- /dev/null +++ b/gtsam/hybrid/MixtureFactor.h @@ -0,0 +1,244 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file MixtureFactor.h + * @brief Nonlinear Mixture factor of continuous and discrete. + * @author Kevin Doherty, kdoherty@mit.edu + * @author Varun Agrawal + * @date December 2021 + */ + +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace gtsam { + +/** + * @brief Implementation of a discrete conditional mixture factor. + * + * Implements a joint discrete-continuous factor where the discrete variable + * serves to "select" a mixture component corresponding to a NonlinearFactor + * type of measurement. + * + * This class stores all factors as HybridFactors which can then be typecast to + * one of (NonlinearFactor, GaussianFactor) which can then be checked to perform + * the correct operation. + */ +class MixtureFactor : public HybridFactor { + public: + using Base = HybridFactor; + using This = MixtureFactor; + using shared_ptr = boost::shared_ptr; + using sharedFactor = boost::shared_ptr; + + /** + * @brief typedef for DecisionTree which has Keys as node labels and + * NonlinearFactor as leaf nodes. + */ + using Factors = DecisionTree; + + private: + /// Decision tree of Gaussian factors indexed by discrete keys. + Factors factors_; + bool normalized_; + + public: + MixtureFactor() = default; + + /** + * @brief Construct from Decision tree. + * + * @param keys Vector of keys for continuous factors. + * @param discreteKeys Vector of discrete keys. + * @param factors Decision tree with of shared factors. + * @param normalized Flag indicating if the factor error is already + * normalized. + */ + MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + const Factors& factors, bool normalized = false) + : Base(keys, discreteKeys), factors_(factors), normalized_(normalized) {} + + /** + * @brief Convenience constructor that generates the underlying factor + * decision tree for us. + * + * Here it is important that the vector of factors has the correct number of + * elements based on the number of discrete keys and the cardinality of the + * keys, so that the decision tree is constructed appropriately. + * + * @tparam FACTOR The type of the factor shared pointers being passed in. Will + * be typecast to NonlinearFactor shared pointers. + * @param keys Vector of keys for continuous factors. + * @param discreteKeys Vector of discrete keys. + * @param factors Vector of shared pointers to factors. + * @param normalized Flag indicating if the factor error is already + * normalized. + */ + template + MixtureFactor(const KeyVector& keys, const DiscreteKeys& discreteKeys, + const std::vector>& factors, + bool normalized = false) + : Base(keys, discreteKeys), normalized_(normalized) { + std::vector nonlinear_factors; + for (auto&& f : factors) { + nonlinear_factors.push_back( + boost::dynamic_pointer_cast(f)); + } + factors_ = Factors(discreteKeys, nonlinear_factors); + } + + ~MixtureFactor() = default; + + /** + * @brief Compute error of factor given both continuous and discrete values. + * + * @param continuousVals The continuous Values. + * @param discreteVals The discrete Values. + * @return double The error of this factor. + */ + double error(const Values& continuousVals, + const DiscreteValues& discreteVals) const { + // Retrieve the factor corresponding to the assignment in discreteVals. + auto factor = factors_(discreteVals); + // Compute the error for the selected factor + const double factorError = factor->error(continuousVals); + if (normalized_) return factorError; + return factorError + + this->nonlinearFactorLogNormalizingConstant(factor, continuousVals); + } + + size_t dim() const { + // TODO(Varun) + throw std::runtime_error("MixtureFactor::dim not implemented."); + } + + /// Testable + /// @{ + + /// print to stdout + void print( + const std::string& s = "MixtureFactor", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override { + std::cout << (s.empty() ? "" : s + " "); + Base::print("", keyFormatter); + std::cout << "\nMixtureFactor\n"; + auto valueFormatter = [](const sharedFactor& v) { + if (v) { + return (boost::format("Nonlinear factor on %d keys") % v->size()).str(); + } else { + return std::string("nullptr"); + } + }; + factors_.print("", keyFormatter, valueFormatter); + } + + /// Check equality + bool equals(const HybridFactor& other, double tol = 1e-9) const override { + // We attempt a dynamic cast from HybridFactor to MixtureFactor. If it + // fails, return false. + if (!dynamic_cast(&other)) return false; + + // If the cast is successful, we'll properly construct a MixtureFactor + // object from `other` + const MixtureFactor& f(static_cast(other)); + + // Ensure that this MixtureFactor and `f` have the same `factors_`. + auto compare = [tol](const sharedFactor& a, const sharedFactor& b) { + return traits::Equals(*a, *b, tol); + }; + if (!factors_.equals(f.factors_, compare)) return false; + + // If everything above passes, and the keys_, discreteKeys_ and normalized_ + // member variables are identical, return true. + return (std::equal(keys_.begin(), keys_.end(), f.keys().begin()) && + (discreteKeys_ == f.discreteKeys_) && + (normalized_ == f.normalized_)); + } + + /// @} + + /// Linearize specific nonlinear factors based on the assignment in + /// discreteValues. + GaussianFactor::shared_ptr linearize( + const Values& continuousVals, const DiscreteValues& discreteVals) const { + auto factor = factors_(discreteVals); + return factor->linearize(continuousVals); + } + + /// Linearize all the continuous factors to get a GaussianMixtureFactor. + boost::shared_ptr linearize( + const Values& continuousVals) const { + // functional to linearize each factor in the decision tree + auto linearizeDT = [continuousVals](const sharedFactor& factor) { + return factor->linearize(continuousVals); + }; + + DecisionTree linearized_factors( + factors_, linearizeDT); + + return boost::make_shared( + continuousKeys_, discreteKeys_, linearized_factors); + } + + /** + * If the component factors are not already normalized, we want to compute + * their normalizing constants so that the resulting joint distribution is + * appropriately computed. Remember, this is the _negative_ normalizing + * constant for the measurement likelihood (since we are minimizing the + * _negative_ log-likelihood). + */ + double nonlinearFactorLogNormalizingConstant(const sharedFactor& factor, + const Values& values) const { + // Information matrix (inverse covariance matrix) for the factor. + Matrix infoMat; + + // If this is a NoiseModelFactor, we'll use its noiseModel to + // otherwise noiseModelFactor will be nullptr + if (auto noiseModelFactor = + boost::dynamic_pointer_cast(factor)) { + // If dynamic cast to NoiseModelFactor succeeded, see if the noise model + // is Gaussian + auto noiseModel = noiseModelFactor->noiseModel(); + + auto gaussianNoiseModel = + boost::dynamic_pointer_cast(noiseModel); + if (gaussianNoiseModel) { + // If the noise model is Gaussian, retrieve the information matrix + infoMat = gaussianNoiseModel->information(); + } else { + // If the factor is not a Gaussian factor, we'll linearize it to get + // something with a normalized noise model + // TODO(kevin): does this make sense to do? I think maybe not in + // general? Should we just yell at the user? + auto gaussianFactor = factor->linearize(values); + infoMat = gaussianFactor->information(); + } + } + + // Compute the (negative) log of the normalizing constant + return -(factor->dim() * log(2.0 * M_PI) / 2.0) - + (log(infoMat.determinant()) / 2.0); + } +}; + +} // namespace gtsam diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index bbe1e2400d..207f3ff635 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -4,6 +4,22 @@ namespace gtsam { +#include +class HybridValues { + gtsam::DiscreteValues discrete; + gtsam::VectorValues continuous; + HybridValues(); + HybridValues(const gtsam::DiscreteValues &dv, const gtsam::VectorValues &cv); + void print(string s = "HybridValues", + const gtsam::KeyFormatter& keyFormatter = + gtsam::DefaultKeyFormatter) const; + bool equals(const gtsam::HybridValues& other, double tol) const; + void insert(gtsam::Key j, int value); + void insert(gtsam::Key j, const gtsam::Vector& value); + size_t& atDiscrete(gtsam::Key j); + gtsam::Vector& at(gtsam::Key j); +}; + #include virtual class HybridFactor { void print(string s = "HybridFactor\n", @@ -84,6 +100,7 @@ class HybridBayesNet { size_t size() const; gtsam::KeySet keys() const; const gtsam::HybridConditional* at(size_t i) const; + gtsam::HybridValues optimize() const; void print(string s = "HybridBayesNet\n", const gtsam::KeyFormatter& keyFormatter = gtsam::DefaultKeyFormatter) const; diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index c081b8e87e..52ef0439eb 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -18,20 +18,39 @@ #include #include +#include #include #include +#include +#include #include #include +#include +#include +#include -#pragma once +#include -using gtsam::symbol_shorthand::C; -using gtsam::symbol_shorthand::X; +#pragma once namespace gtsam { + +using symbol_shorthand::C; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/** + * @brief Create a switching system chain. A switching system is a continuous + * system which depends on a discrete mode at each time step of the chain. + * + * @param n The number of chain elements. + * @param keyFunc The functional to help specify the continuous key. + * @param dKeyFunc The functional to help specify the discrete key. + * @return HybridGaussianFactorGraph::shared_ptr + */ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( size_t n, std::function keyFunc = X, - std::function dKeyFunc = C) { + std::function dKeyFunc = M) { HybridGaussianFactorGraph hfg; hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1)); @@ -54,9 +73,19 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( return boost::make_shared(std::move(hfg)); } +/** + * @brief Return the ordering as a binary tree such that all parent nodes are + * above their children. + * + * This will result in a nested dissection Bayes tree after elimination. + * + * @param input The original ordering. + * @return std::pair> + */ inline std::pair> makeBinaryOrdering( std::vector &input) { KeyVector new_order; + std::vector levels(input.size()); std::function::iterator, std::vector::iterator, int)> @@ -79,8 +108,107 @@ inline std::pair> makeBinaryOrdering( bsg(input.begin(), input.end(), 0); std::reverse(new_order.begin(), new_order.end()); - // std::reverse(levels.begin(), levels.end()); + return {new_order, levels}; } +/* *************************************************************************** + */ +using MotionModel = BetweenFactor; +// using MotionMixture = MixtureFactor; + +// Test fixture with switching network. +struct Switching { + size_t K; + DiscreteKeys modes; + HybridNonlinearFactorGraph nonlinearFactorGraph; + HybridGaussianFactorGraph linearizedFactorGraph; + Values linearizationPoint; + + /// Create with given number of time steps. + Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1) + : K(K) { + using symbol_shorthand::M; + using symbol_shorthand::X; + + // Create DiscreteKeys for binary K modes, modes[0] will not be used. + for (size_t k = 0; k <= K; k++) { + modes.emplace_back(M(k), 2); + } + + // Create hybrid factor graph. + // Add a prior on X(1). + auto prior = boost::make_shared>( + X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma)); + nonlinearFactorGraph.push_nonlinear(prior); + + // Add "motion models". + for (size_t k = 1; k < K; k++) { + KeyVector keys = {X(k), X(k + 1)}; + auto motion_models = motionModels(k); + std::vector components; + for (auto &&f : motion_models) { + components.push_back(boost::dynamic_pointer_cast(f)); + } + nonlinearFactorGraph.emplace_hybrid( + keys, DiscreteKeys{modes[k]}, components); + } + + // Add measurement factors + auto measurement_noise = noiseModel::Isotropic::Sigma(1, 0.1); + for (size_t k = 2; k <= K; k++) { + nonlinearFactorGraph.emplace_nonlinear>( + X(k), 1.0 * (k - 1), measurement_noise); + } + + // Add "mode chain" + addModeChain(&nonlinearFactorGraph); + + // Create the linearization point. + for (size_t k = 1; k <= K; k++) { + linearizationPoint.insert(X(k), static_cast(k)); + } + + linearizedFactorGraph = nonlinearFactorGraph.linearize(linearizationPoint); + } + + // Create motion models for a given time step + static std::vector motionModels(size_t k, + double sigma = 1.0) { + using symbol_shorthand::M; + using symbol_shorthand::X; + + auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); + auto still = + boost::make_shared(X(k), X(k + 1), 0.0, noise_model), + moving = + boost::make_shared(X(k), X(k + 1), 1.0, noise_model); + return {still, moving}; + } + + // Add "mode chain" to HybridNonlinearFactorGraph + void addModeChain(HybridNonlinearFactorGraph *fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg->push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg->push_discrete(conditional); + } + } + + // Add "mode chain" to HybridGaussianFactorGraph + void addModeChain(HybridGaussianFactorGraph *fg) { + auto prior = boost::make_shared(modes[1], "1/1"); + fg->push_discrete(prior); + for (size_t k = 1; k < K - 1; k++) { + auto parents = {modes[k]}; + auto conditional = boost::make_shared( + modes[k + 1], parents, "1/2 3/2"); + fg->push_discrete(conditional); + } + } +}; + } // namespace gtsam diff --git a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp index 552bb18f59..e4bd0e0843 100644 --- a/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp +++ b/gtsam/hybrid/tests/testGaussianHybridFactorGraph.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -30,6 +31,7 @@ #include #include #include +#include #include #include #include @@ -52,32 +54,36 @@ using namespace boost::assign; using namespace std; using namespace gtsam; -using gtsam::symbol_shorthand::C; using gtsam::symbol_shorthand::D; +using gtsam::symbol_shorthand::M; using gtsam::symbol_shorthand::X; using gtsam::symbol_shorthand::Y; /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, creation) { - HybridConditional test; +TEST(HybridGaussianFactorGraph, Creation) { + HybridConditional conditional; HybridGaussianFactorGraph hfg; - hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1))); + hfg.add(HybridGaussianFactor(JacobianFactor(X(0), I_3x3, Z_3x1))); + + // Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor + // graph + GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}), + GaussianMixture::Conditionals( + M(0), + boost::make_shared( + X(0), Z_3x1, I_3x3, X(1), I_3x3), + boost::make_shared( + X(0), Vector3::Ones(), I_3x3, X(1), I_3x3))); + hfg.add(gm); - GaussianMixture clgc( - {X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{C(0), 2}), - GaussianMixture::Conditionals( - C(0), - boost::make_shared(X(0), Z_3x1, I_3x3, X(1), - I_3x3), - boost::make_shared(X(0), Vector3::Ones(), I_3x3, - X(1), I_3x3))); - GTSAM_PRINT(clgc); + EXPECT_LONGS_EQUAL(2, hfg.size()); } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminate) { +TEST(HybridGaussianFactorGraph, EliminateSequential) { + // Test elimination of a single variable. HybridGaussianFactorGraph hfg; hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1))); @@ -88,13 +94,15 @@ TEST(HybridGaussianFactorGraph, eliminate) { } /* ************************************************************************* */ -TEST(HybridGaussianFactorGraph, eliminateMultifrontal) { +TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { + // Test multifrontal elimination HybridGaussianFactorGraph hfg; - DiscreteKey c(C(1), 2); + DiscreteKey m(M(1), 2); + // Add priors on x0 and c1 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); + hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); Ordering ordering; ordering.push_back(X(0)); @@ -108,117 +116,111 @@ TEST(HybridGaussianFactorGraph, eliminateMultifrontal) { TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); + // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + // Add a gaussian mixture factor Ï•(x1, c1) DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + M(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt)); auto result = - hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)})); auto dc = result->at(2)->asDiscreteConditional(); DiscreteValues dv; - dv[C(1)] = 0; - EXPECT_DOUBLES_EQUAL(0.6225, dc->operator()(dv), 1e-3); + dv[M(1)] = 0; + EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); + // Add prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Add factor between x0 and x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), - boost::make_shared(X(1), I_3x3, Vector3::Ones())); + std::vector factors = { + boost::make_shared(X(1), I_3x3, Z_3x1), + boost::make_shared(X(1), I_3x3, Vector3::Ones())}; + hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors)); - hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); - // hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c1, {2, 8}))); - hfg.add(HybridDiscreteFactor( - DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1 - // 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2}, - // {C(1), 2}}, "1 2 2 1"))); + // Discrete probability table for c1 + hfg.add(DecisionTreeFactor(m1, {2, 8})); + // Joint discrete probability table for c1, c2 + hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - auto result = hfg.eliminateSequential( - Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); + HybridBayesNet::shared_ptr result = hfg.eliminateSequential( + Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)})); - GTSAM_PRINT(*result); + // There are 4 variables (2 continuous + 2 discrete) in the bayes net. + EXPECT_LONGS_EQUAL(4, result->size()); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { HybridGaussianFactorGraph hfg; - DiscreteKey c1(C(1), 2); + DiscreteKey m1(M(1), 2); hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); - // DecisionTree dt( - // C(1), boost::make_shared(X(1), I_3x3, Z_3x1), - // boost::make_shared(X(1), I_3x3, Vector3::Ones())); - - // hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); hfg.add(GaussianMixtureFactor::FromFactors( - {X(1)}, {{C(1), 2}}, + {X(1)}, {{M(1), 2}}, {boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())})); - // hfg.add(GaussianMixtureFactor({X(0)}, {c1}, dt)); - hfg.add(DecisionTreeFactor(c1, {2, 8})); - hfg.add(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4")); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(2), 2}, {C(3), 2}}, "1 - // 2 3 4"))); hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(3), 2}, - // {C(1), 2}}, "1 2 2 1"))); + hfg.add(DecisionTreeFactor(m1, {2, 8})); + hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); - auto result = hfg.eliminateMultifrontal( - Ordering::ColamdConstrainedLast(hfg, {C(1), C(2)})); + HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal( + Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)})); - GTSAM_PRINT(*result); - GTSAM_PRINT(*result->marginalFactor(C(2))); + // The bayes tree should have 3 cliques + EXPECT_LONGS_EQUAL(3, result->size()); + // GTSAM_PRINT(*result); + // GTSAM_PRINT(*result->marginalFactor(M(2))); } /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { HybridGaussianFactorGraph hfg; - DiscreteKey c(C(1), 2); + DiscreteKey m(M(1), 2); + // Prior on x0 hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + // Factor between x0-x1 hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + // Decision tree with different modes on x1 DecisionTree dt( - C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + M(1), boost::make_shared(X(1), I_3x3, Z_3x1), boost::make_shared(X(1), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(1)}, {c}, dt)); - hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 - // 2 3 4"))); + // Hybrid factor P(x1|c1) + hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt)); + // Prior factor on c1 + hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8}))); - auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {C(1)}); + // Get a constrained ordering keeping c1 last + auto ordering_full = Ordering::ColamdConstrainedLast(hfg, {M(1)}); + // Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1) HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full); - GTSAM_PRINT(*hbt); - /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. - */ + EXPECT_LONGS_EQUAL(3, hbt->size()); } /* ************************************************************************* */ @@ -233,66 +235,72 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); { - // DecisionTree dt( - // C(0), boost::make_shared(X(0), I_3x3, Z_3x1), - // boost::make_shared(X(0), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor::FromFactors( - {X(0)}, {{C(0), 2}}, + {X(0)}, {{M(0), 2}}, {boost::make_shared(X(0), I_3x3, Z_3x1), boost::make_shared(X(0), I_3x3, Vector3::Ones())})); DecisionTree dt1( - C(1), boost::make_shared(X(2), I_3x3, Z_3x1), + M(1), boost::make_shared(X(2), I_3x3, Z_3x1), boost::make_shared(X(2), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(2)}, {{C(1), 2}}, dt1)); + hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1)); } - // hfg.add(HybridDiscreteFactor(DecisionTreeFactor(c, {2, 8}))); hfg.add(HybridDiscreteFactor( - DecisionTreeFactor({{C(1), 2}, {C(2), 2}}, "1 2 3 4"))); + DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"))); hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1)); hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); { DecisionTree dt( - C(3), boost::make_shared(X(3), I_3x3, Z_3x1), + M(3), boost::make_shared(X(3), I_3x3, Z_3x1), boost::make_shared(X(3), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(3)}, {{C(3), 2}}, dt)); + hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt)); DecisionTree dt1( - C(2), boost::make_shared(X(5), I_3x3, Z_3x1), + M(2), boost::make_shared(X(5), I_3x3, Z_3x1), boost::make_shared(X(5), I_3x3, Vector3::Ones())); - hfg.add(GaussianMixtureFactor({X(5)}, {{C(2), 2}}, dt1)); + hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1)); } auto ordering_full = - Ordering::ColamdConstrainedLast(hfg, {C(0), C(1), C(2), C(3)}); - - GTSAM_PRINT(ordering_full); + Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)}); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); - GTSAM_PRINT(*hbt); - - GTSAM_PRINT(*remaining); + // 9 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(9, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); - hbt->dot(std::cout); /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. + (Fan) Explanation: the Junction tree will need to reeliminate to get to the + marginal on X(1), which is not possible because it involves eliminating + discrete before continuous. The solution to this, however, is in Murphy02. + TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. + And I believe that we should do this. */ } +void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg, + const HybridBayesTree::shared_ptr &hbt, + const Ordering &ordering) { + DotWriter dw; + dw.positionHints['c'] = 2; + dw.positionHints['x'] = 1; + std::cout << hfg->dot(DefaultKeyFormatter, dw); + std::cout << "\n"; + hbt->dot(std::cout); + + std::cout << "\n"; + std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw); +} + /* ************************************************************************* */ // TODO(fan): make a graph like Varun's paper one TEST(HybridGaussianFactorGraph, Switching) { @@ -303,13 +311,13 @@ TEST(HybridGaussianFactorGraph, Switching) { // X(3), X(7) // X(2), X(8) // X(1), X(4), X(6), X(9) - // C(5) will be the center, C(1-4), C(6-8) - // C(3), C(7) - // C(1), C(4), C(2), C(6), C(8) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) // auto ordering_full = // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // X(5), - // C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); KeyVector ordering; { @@ -326,79 +334,32 @@ TEST(HybridGaussianFactorGraph, Switching) { for (auto &l : lvls) { l = -l; } - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); - std::cout << "\n"; } { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return C(x); }); + [](int x) { return M(x); }); KeyVector ndC; std::vector lvls; // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); std::tie(ndC, lvls) = makeBinaryOrdering(ordC); std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); } auto ordering_full = Ordering(ordering); - // auto ordering_full = - // Ordering(); - - // for (int i = 1; i <= 9; i++) { - // ordering_full.push_back(X(i)); - // } - - // for (int i = 1; i < 9; i++) { - // ordering_full.push_back(C(i)); - // } - - // auto ordering_full = - // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), - // X(5), - // C(1), C(2), C(3), C(4), C(5), C(6), C(7), C(8)}); - // GTSAM_PRINT(*hfg); - GTSAM_PRINT(ordering_full); + // GTSAM_PRINT(ordering_full); HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - - // GTSAM_PRINT(*remaining); - - { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - } - - { - DotWriter dw; - // dw.positionHints['c'] = 2; - // dw.positionHints['x'] = 1; - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering_full) - ->dot(DefaultKeyFormatter, dw); - } - /* - Explanation: the Junction tree will need to reeliminate to get to the marginal - on X(1), which is not possible because it involves eliminating discrete before - continuous. The solution to this, however, is in Murphy02. TLDR is that this - is 1. expensive and 2. inexact. neverless it is doable. And I believe that we - should do this. - */ - hbt->marginalFactor(C(11))->print("HBT: "); + // 12 cliques in the bayes tree and 0 remaining variables to eliminate. + EXPECT_LONGS_EQUAL(12, hbt->size()); + EXPECT_LONGS_EQUAL(0, remaining->size()); } /* ************************************************************************* */ @@ -411,13 +372,13 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { // X(3), X(7) // X(2), X(8) // X(1), X(4), X(6), X(9) - // C(5) will be the center, C(1-4), C(6-8) - // C(3), C(7) - // C(1), C(4), C(2), C(6), C(8) + // M(5) will be the center, M(1-4), M(6-8) + // M(3), M(7) + // M(1), M(4), M(2), M(6), M(8) // auto ordering_full = // Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7), // X(5), - // C(1), C(4), C(2), C(6), C(8), C(3), C(7), C(5)}); + // M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)}); KeyVector ordering; { @@ -434,67 +395,37 @@ TEST(HybridGaussianFactorGraph, SwitchingISAM) { for (auto &l : lvls) { l = -l; } - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); - std::cout << "\n"; } { std::vector naturalC(N - 1); std::iota(naturalC.begin(), naturalC.end(), 1); std::vector ordC; std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC), - [](int x) { return C(x); }); + [](int x) { return M(x); }); KeyVector ndC; std::vector lvls; // std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering)); std::tie(ndC, lvls) = makeBinaryOrdering(ordC); std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering)); - std::copy(lvls.begin(), lvls.end(), - std::ostream_iterator(std::cout, ",")); } auto ordering_full = Ordering(ordering); - // GTSAM_PRINT(*hfg); - GTSAM_PRINT(ordering_full); - HybridBayesTree::shared_ptr hbt; HybridGaussianFactorGraph::shared_ptr remaining; std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full); - // GTSAM_PRINT(*hbt); - - // GTSAM_PRINT(*remaining); - - { - DotWriter dw; - dw.positionHints['c'] = 2; - dw.positionHints['x'] = 1; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - hbt->dot(std::cout); - } - - { - DotWriter dw; - // dw.positionHints['c'] = 2; - // dw.positionHints['x'] = 1; - std::cout << "\n"; - std::cout << hfg->eliminateSequential(ordering_full) - ->dot(DefaultKeyFormatter, dw); - } - auto new_fg = makeSwitchingChain(12); auto isam = HybridGaussianISAM(*hbt); - { - HybridGaussianFactorGraph factorGraph; - factorGraph.push_back(new_fg->at(new_fg->size() - 2)); - factorGraph.push_back(new_fg->at(new_fg->size() - 1)); - isam.update(factorGraph); - std::cout << isam.dot(); - isam.marginalFactor(C(11))->print(); - } + // Run an ISAM update. + HybridGaussianFactorGraph factorGraph; + factorGraph.push_back(new_fg->at(new_fg->size() - 2)); + factorGraph.push_back(new_fg->at(new_fg->size() - 1)); + isam.update(factorGraph); + + // ISAM should have 12 factors after the last update + EXPECT_LONGS_EQUAL(12, isam.size()); } /* ************************************************************************* */ @@ -517,23 +448,8 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { ordX.emplace_back(Y(i)); } - // { - // KeyVector ndX; - // std::vector lvls; - // std::tie(ndX, lvls) = makeBinaryOrdering(naturalX); - // std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering)); - // std::copy(lvls.begin(), lvls.end(), - // std::ostream_iterator(std::cout, ",")); - // std::cout << "\n"; - - // for (size_t i = 0; i < N; i++) { - // ordX.emplace_back(X(ndX[i])); - // ordX.emplace_back(Y(ndX[i])); - // } - // } - for (size_t i = 1; i <= N - 1; i++) { - ordX.emplace_back(C(i)); + ordX.emplace_back(M(i)); } for (size_t i = 1; i <= N - 1; i++) { ordX.emplace_back(D(i)); @@ -545,8 +461,8 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { dw.positionHints['c'] = 0; dw.positionHints['d'] = 3; dw.positionHints['y'] = 2; - std::cout << hfg->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; + // std::cout << hfg->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; } { @@ -555,10 +471,10 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { // dw.positionHints['c'] = 0; // dw.positionHints['d'] = 3; dw.positionHints['x'] = 1; - std::cout << "\n"; + // std::cout << "\n"; // std::cout << hfg->eliminateSequential(Ordering(ordX)) // ->dot(DefaultKeyFormatter, dw); - hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); + // hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout); } Ordering ordering_partial; @@ -566,25 +482,46 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { ordering_partial.emplace_back(X(i)); ordering_partial.emplace_back(Y(i)); } + HybridBayesNet::shared_ptr hbn; + HybridGaussianFactorGraph::shared_ptr remaining; + std::tie(hbn, remaining) = + hfg->eliminatePartialSequential(ordering_partial); + + EXPECT_LONGS_EQUAL(14, hbn->size()); + EXPECT_LONGS_EQUAL(11, remaining->size()); + { - HybridBayesNet::shared_ptr hbn; - HybridGaussianFactorGraph::shared_ptr remaining; - std::tie(hbn, remaining) = - hfg->eliminatePartialSequential(ordering_partial); - - // remaining->print(); - { - DotWriter dw; - dw.positionHints['x'] = 1; - dw.positionHints['c'] = 0; - dw.positionHints['d'] = 3; - dw.positionHints['y'] = 2; - std::cout << remaining->dot(DefaultKeyFormatter, dw); - std::cout << "\n"; - } + DotWriter dw; + dw.positionHints['x'] = 1; + dw.positionHints['c'] = 0; + dw.positionHints['d'] = 3; + dw.positionHints['y'] = 2; + // std::cout << remaining->dot(DefaultKeyFormatter, dw); + // std::cout << "\n"; } } +TEST(HybridGaussianFactorGraph, optimize) { + HybridGaussianFactorGraph hfg; + + DiscreteKey c1(C(1), 2); + + hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); + hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); + + DecisionTree dt( + C(1), boost::make_shared(X(1), I_3x3, Z_3x1), + boost::make_shared(X(1), I_3x3, Vector3::Ones())); + + hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt)); + + auto result = + hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)})); + + HybridValues hv = result->optimize(); + + EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0))); +} /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp index 36477218b9..cb9068c303 100644 --- a/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp +++ b/gtsam/hybrid/tests/testGaussianMixtureFactor.cpp @@ -74,7 +74,7 @@ TEST(GaussianMixtureFactor, Sum) { // Check that number of keys is 3 EXPECT_LONGS_EQUAL(3, mixtureFactorA.keys().size()); - // Check that number of discrete keys is 1 // TODO(Frank): should not exist? + // Check that number of discrete keys is 1 EXPECT_LONGS_EQUAL(1, mixtureFactorA.discreteKeys().size()); // Create sum of two mixture factors: it will be a decision tree now on both @@ -104,7 +104,7 @@ TEST(GaussianMixtureFactor, Printing) { GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); std::string expected = - R"(Hybrid x1 x2; 1 ]{ + R"(Hybrid [x1 x2; 1]{ Choice(1) 0 Leaf : A[x1] = [ diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp new file mode 100644 index 0000000000..f3db839557 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -0,0 +1,93 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridBayesNet.cpp + * @brief Unit tests for HybridBayesNet + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +static const DiscreteKey Asia(0, 2); + +/* ****************************************************************************/ +// Test creation +TEST(HybridBayesNet, Creation) { + HybridBayesNet bayesNet; + + bayesNet.add(Asia, "99/1"); + + DiscreteConditional expected(Asia, "99/1"); + + CHECK(bayesNet.atDiscrete(0)); + auto& df = *bayesNet.atDiscrete(0); + EXPECT(df.equals(expected)); +} + +/* ****************************************************************************/ +// Test choosing an assignment of conditionals +TEST(HybridBayesNet, Choose) { + Switching s(4); + + Ordering ordering; + for (auto&& kvp : s.linearizationPoint) { + ordering += kvp.key; + } + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 0; + + GaussianBayesNet gbn = hybridBayesNet->choose(assignment); + + EXPECT_LONGS_EQUAL(4, gbn.size()); + + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(0)))(assignment), + *gbn.at(0))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(1)))(assignment), + *gbn.at(1))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(2)))(assignment), + *gbn.at(2))); + EXPECT(assert_equal(*(*boost::dynamic_pointer_cast( + hybridBayesNet->atGaussian(3)))(assignment), + *gbn.at(3))); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridFactorGraph.cpp b/gtsam/hybrid/tests/testHybridFactorGraph.cpp new file mode 100644 index 0000000000..f5b4ec0b19 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridFactorGraph.cpp @@ -0,0 +1,45 @@ +/* ---------------------------------------------------------------------------- + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridFactorGraph.cpp + * @brief Unit tests for HybridFactorGraph + * @author Varun Agrawal + * @date May 2021 + */ + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* **************************************************************************** + * Test that any linearizedFactorGraph gaussian factors are appended to the + * existing gaussian factor graph in the hybrid factor graph. + */ +TEST(HybridFactorGraph, Constructor) { + // Initialize the hybrid factor graph + HybridFactorGraph fg; +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridIncremental.cpp b/gtsam/hybrid/tests/testHybridIncremental.cpp new file mode 100644 index 0000000000..4449aba0b3 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridIncremental.cpp @@ -0,0 +1,563 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridIncremental.cpp + * @brief Unit tests for incremental inference + * @author Fan Jiang, Varun Agrawal, Frank Dellaert + * @date Jan 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::W; +using symbol_shorthand::X; +using symbol_shorthand::Y; +using symbol_shorthand::Z; + +/* ****************************************************************************/ +// Test if we can perform elimination incrementally. +TEST(HybridGaussianElimination, IncrementalElimination) { + Switching switching(3); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Create initial factor graph + // * * * + // | | | + // X1 -*- X2 -*- X3 + // \*-M1-*/ + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + + // Run update step + isam.update(graph1); + + // Check that after update we have 3 hybrid Bayes net nodes: + // P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2) + EXPECT_LONGS_EQUAL(3, isam.size()); + EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)}); + EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)})); + EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridGaussianFactorGraph graph2; + + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + + isam.update(graph2); + + // Check that after the second update we have + // 1 additional hybrid Bayes net node: + // P(X2, X3 | M1, M2) + EXPECT_LONGS_EQUAL(3, isam.size()); + EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)})); + EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)})); +} + +/* ****************************************************************************/ +// Test if we can incrementally do the inference +TEST(HybridGaussianElimination, IncrementalInference) { + Switching switching(3); + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Create initial factor graph + // * * * + // | | | + // X1 -*- X2 -*- X3 + // | | + // *-M1 - * - M2 + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + + // Run update step + isam.update(graph1); + + auto discreteConditional_m1 = + isam[M(1)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + + /********************************************************/ + // New factor graph for incremental update. + HybridGaussianFactorGraph graph2; + + graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + + isam.update(graph2); + + /********************************************************/ + // Run batch elimination so we can compare results. + Ordering ordering; + ordering += X(1); + ordering += X(2); + ordering += X(3); + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr expectedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; + std::tie(expectedHybridBayesTree, expectedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + // The densities on X(1) should be the same + auto x1_conditional = + dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); + auto actual_x1_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); + EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); + + // The densities on X(2) should be the same + auto x2_conditional = + dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); + auto actual_x2_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); + + // The densities on X(3) should be the same + auto x3_conditional = + dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); + auto actual_x3_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); + EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); + + // We only perform manual continuous elimination for 0,0. + // The other discrete probabilities on M(2) are calculated the same way + Ordering discrete_ordering; + discrete_ordering += M(1); + discrete_ordering += M(2); + HybridBayesTree::shared_ptr discreteBayesTree = + expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); + + DiscreteValues m00; + m00[M(1)] = 0, m00[M(2)] = 0; + DiscreteConditional decisionTree = + *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + double m00_prob = decisionTree(m00); + + auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); + + // Test if the probability values are as expected with regression tests. + DiscreteValues assignment; + EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 0; + EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 0; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 0; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(1)] = 1; + assignment[M(2)] = 1; + EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); + + // Check if the clique conditional generated from incremental elimination + // matches that of batch elimination. + auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); + auto expectedConditional = dynamic_pointer_cast( + (*expectedChordal)[M(2)]->conditional()->inner()); + auto actualConditional = dynamic_pointer_cast( + isam[M(2)]->conditional()->inner()); + EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); +} + +/* ****************************************************************************/ +// Test if we can approximately do the inference +TEST(HybridGaussianElimination, Approx_inference) { + Switching switching(4); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Create ordering. + Ordering ordering; + for (size_t j = 1; j <= 4; j++) { + ordering += X(j); + } + + // Now we calculate the actual factors using full elimination + HybridBayesTree::shared_ptr unprunedHybridBayesTree; + HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; + std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = + switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + + size_t maxNrLeaves = 5; + incrementalHybrid.update(graph1); + + incrementalHybrid.prune(M(3), maxNrLeaves); + + /* + unpruned factor is: + Choice(m3) + 0 Choice(m2) + 0 0 Choice(m1) + 0 0 0 Leaf 0.11267528 + 0 0 1 Leaf 0.18576102 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0.18576102 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + + pruned factors is: + Choice(m3) + 0 Choice(m2) + 0 0 Leaf 0 + 0 1 Choice(m1) + 0 1 0 Leaf 0.18754662 + 0 1 1 Leaf 0.30623871 + 1 Choice(m2) + 1 0 Choice(m1) + 1 0 0 Leaf 0 + 1 0 1 Leaf 0.30622428 + 1 1 Choice(m1) + 1 1 0 Leaf 0.30623871 + 1 1 1 Leaf 0.5 + */ + + auto discreteConditional_m1 = *dynamic_pointer_cast( + incrementalHybrid[M(1)]->conditional()->inner()); + EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); + + // Get the number of elements which are greater than 0. + auto count = [](const double &value, int count) { + return value > 0 ? count + 1 : count; + }; + // Check that the number of leaves after pruning is 5. + EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); + + // Check that the hybrid nodes of the bayes net match those of the pre-pruning + // bayes net, at the same positions. + auto &unprunedLastDensity = *dynamic_pointer_cast( + unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + auto &lastDensity = *dynamic_pointer_cast( + incrementalHybrid[X(4)]->conditional()->inner()); + + std::vector> assignments = + discreteConditional_m1.enumerate(); + // Loop over all assignments and check the pruned components + for (auto &&av : assignments) { + const DiscreteValues &assignment = av.first; + const double value = av.second; + + if (value == 0.0) { + EXPECT(lastDensity(assignment) == nullptr); + } else { + CHECK(lastDensity(assignment)); + EXPECT(assert_equal(*unprunedLastDensity(assignment), + *lastDensity(assignment))); + } + } +} + +/* ****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST(HybridGaussianElimination, Incremental_approximate) { + Switching switching(5); + HybridGaussianISAM incrementalHybrid; + HybridGaussianFactorGraph graph1; + + /***** Run Round 1 *****/ + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(switching.linearizedFactorGraph.at(0)); + for (size_t i = 5; i <= 7; i++) { + graph1.push_back(switching.linearizedFactorGraph.at(i)); + } + + // Run update with pruning + size_t maxComponents = 5; + incrementalHybrid.update(graph1); + incrementalHybrid.prune(M(3), maxComponents); + + // Check if we have a bayes tree with 4 hybrid nodes, + // each with 2, 4, 8, and 5 (pruned) leaves respetively. + EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 4, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + + /***** Run Round 2 *****/ + HybridGaussianFactorGraph graph2; + graph2.push_back(switching.linearizedFactorGraph.at(4)); + graph2.push_back(switching.linearizedFactorGraph.at(8)); + + // Run update with pruning a second time. + incrementalHybrid.update(graph2); + incrementalHybrid.prune(M(4), maxComponents); + + // Check if we have a bayes tree with pruned hybrid nodes, + // with 5 (pruned) leaves. + CHECK_EQUAL(5, incrementalHybrid.size()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + EXPECT_LONGS_EQUAL( + 5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents()); +} + +/* ************************************************************************/ +// A GTSAM-only test for running inference on a single-legged robot. +// The leg links are represented by the chain X-Y-Z-W, where X is the base and +// W is the foot. +// We use BetweenFactor as constraints between each of the poses. +TEST(HybridGaussianISAM, NonTrivial) { + /*************** Run Round 1 ***************/ + HybridNonlinearFactorGraph fg; + + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and + // a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + // create a noise model for the landmark measurements + auto poseNoise = noiseModel::Isotropic::Sigma(3, 0.1); + + // We model a robot's single leg as X - Y - Z - W + // where X is the base link and W is the foot link. + + // Add connecting poses similar to PoseFactors in GTD + fg.emplace_nonlinear>(X(0), Y(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Y(0), Z(0), Pose2(0, 1.0, 0), + poseNoise); + fg.emplace_nonlinear>(Z(0), W(0), Pose2(0, 1.0, 0), + poseNoise); + + // Create initial estimate + Values initial; + initial.insert(X(0), Pose2(0.0, 0.0, 0.0)); + initial.insert(Y(0), Pose2(0.0, 1.0, 0.0)); + initial.insert(Z(0), Pose2(0.0, 2.0, 0.0)); + initial.insert(W(0), Pose2(0.0, 3.0, 0.0)); + + HybridGaussianFactorGraph gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + HybridGaussianISAM inc; + + // Update without pruning + // The result is a HybridBayesNet with no discrete variables + // (equivalent to a GaussianBayesNet). + // Factorization is: + // `P(X | measurements) = P(W0|Z0) P(Z0|Y0) P(Y0|X0) P(X0)` + inc.update(gfg); + + /*************** Run Round 2 ***************/ + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor with discrete modes. + Pose2 odometry(1.0, 0.0, 0.0); + KeyVector contKeys = {W(0), W(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(W(0), W(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(W(0), W(1), odometry, + noise_model); + std::vector components = {moving, still}; + auto mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(0), X(1), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(1), Y(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(1), Z(1), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(1), W(1), Pose2(-1, 1, 0), + poseNoise); + + initial.insert(X(1), Pose2(1.0, 0.0, 0.0)); + initial.insert(Y(1), Pose2(1.0, 1.0, 0.0)); + initial.insert(Z(1), Pose2(1.0, 2.0, 0.0)); + // The leg link did not move so we set the expected pose accordingly. + initial.insert(W(1), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Update without pruning + // The result is a HybridBayesNet with 1 discrete variable M(1). + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|Z1, X1, M1) P(Z1|Y1, X1, M1) + // P(Y1 | X1, M1)P(X1 | M1)P(M1) + // The MHS tree is a 1 level tree for time indices (1,) with 2 leaves. + inc.update(gfg); + + /*************** Run Round 3 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(1), W(2)}; + still = boost::make_shared(W(1), W(2), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(1), W(2), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(1), X(2), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=1 + fg.emplace_nonlinear>(X(2), Y(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(2), Z(2), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(2), W(2), Pose2(-2, 1, 0), + poseNoise); + + initial.insert(X(2), Pose2(2.0, 0.0, 0.0)); + initial.insert(Y(2), Pose2(2.0, 1.0, 0.0)); + initial.insert(Z(2), Pose2(2.0, 2.0, 0.0)); + initial.insert(W(2), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Now we prune! + // P(X | measurements) = P(W0|Z0, W1, M1) P(Z0|Y0, W1, M1) P(Y0|X0, W1, M1) + // P(X0 | X1, W1, M1) P(W1|W2, Z1, X1, M1, M2) + // P(Z1| W2, Y1, X1, M1, M2) P(Y1 | W2, X1, M1, M2) + // P(X1 | W2, X2, M1, M2) P(W2|Z2, X2, M1, M2) + // P(Z2|Y2, X2, M1, M2) P(Y2 | X2, M1, M2) + // P(X2 | M1, M2) P(M1, M2) + // The MHS at this point should be a 2 level tree on (1, 2). + // 1 has 2 choices, and 2 has 4 choices. + inc.update(gfg); + inc.prune(M(2), 2); + + /*************** Run Round 4 ***************/ + // Add odometry factor with discrete modes. + contKeys = {W(2), W(3)}; + still = boost::make_shared(W(2), W(3), Pose2(0, 0, 0), + noise_model); + moving = + boost::make_shared(W(2), W(3), odometry, noise_model); + components = {moving, still}; + mixtureFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + fg.push_back(mixtureFactor); + + // Add equivalent of ImuFactor + fg.emplace_nonlinear>(X(2), X(3), Pose2(1.0, 0.0, 0), + poseNoise); + // PoseFactors-like at k=3 + fg.emplace_nonlinear>(X(3), Y(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Y(3), Z(3), Pose2(0, 1, 0), + poseNoise); + fg.emplace_nonlinear>(Z(3), W(3), Pose2(-3, 1, 0), + poseNoise); + + initial.insert(X(3), Pose2(3.0, 0.0, 0.0)); + initial.insert(Y(3), Pose2(3.0, 1.0, 0.0)); + initial.insert(Z(3), Pose2(3.0, 2.0, 0.0)); + initial.insert(W(3), Pose2(0.0, 3.0, 0.0)); + + gfg = fg.linearize(initial); + fg = HybridNonlinearFactorGraph(); + + // Keep pruning! + inc.update(gfg); + inc.prune(M(3), 3); + + // The final discrete graph should not be empty since we have eliminated + // all continuous variables. + auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional(); + EXPECT_LONGS_EQUAL(3, discreteTree->size()); + + // Test if the optimal discrete mode assignment is (1, 1, 1). + DiscreteFactorGraph discreteGraph; + discreteGraph.push_back(discreteTree); + DiscreteValues optimal_assignment = discreteGraph.optimize(); + + DiscreteValues expected_assignment; + expected_assignment[M(1)] = 1; + expected_assignment[M(2)] = 1; + expected_assignment[M(3)] = 1; + + EXPECT(assert_equal(expected_assignment, optimal_assignment)); + + // Test if pruning worked correctly by checking that we only have 3 leaves in + // the last node. + auto lastConditional = inc[X(3)]->conditional()->asMixture(); + EXPECT_LONGS_EQUAL(3, lastConditional->nrComponents()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} diff --git a/gtsam/hybrid/tests/testHybridLookupDAG.cpp b/gtsam/hybrid/tests/testHybridLookupDAG.cpp new file mode 100644 index 0000000000..0ab012d104 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridLookupDAG.cpp @@ -0,0 +1,272 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridLookupDAG.cpp + * @date Aug, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +TEST(HybridLookupTable, basics) { + // create a conditional gaussian node + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 2; + S1(0, 1) = 3; + S1(1, 1) = 4; + + Matrix S2(2, 2); + S2(0, 0) = 6; + S2(1, 0) = 0.2; + S2(0, 1) = 8; + S2(1, 1) = 0.4; + + Matrix R1(2, 2); + R1(0, 0) = 0.1; + R1(1, 0) = 0.3; + R1(0, 1) = 0.0; + R1(1, 1) = 0.34; + + Matrix R2(2, 2); + R2(0, 0) = 0.1; + R2(1, 0) = 0.3; + R2(0, 1) = 0.0; + R2(1, 1) = 0.34; + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + Vector2 d1(0.2, 0.5), d2(0.5, 0.2); + + auto conditional0 = boost::make_shared(X(1), d1, R1, + X(2), S1, model), + conditional1 = boost::make_shared(X(1), d2, R2, + X(2), S2, model); + + // Create decision tree + DiscreteKey m1(1, 2); + GaussianMixture::Conditionals conditionals( + {m1}, + vector{conditional0, conditional1}); + // GaussianMixture mixtureFactor2({X(1)}, {X(2)}, {m1}, conditionals); + + boost::shared_ptr mixtureFactor( + new GaussianMixture({X(1)}, {X(2)}, {m1}, conditionals)); + + HybridConditional hc(mixtureFactor); + + GaussianMixture::Conditionals conditional2 = + boost::static_pointer_cast(hc.inner())->conditionals(); + + DiscreteValues dv; + dv[1] = 1; + + VectorValues cv; + cv.insert(X(2), Vector2(0.0, 0.0)); + + HybridValues hv(dv, cv); + + // std::cout << conditional2(values).markdown(); + EXPECT(assert_equal(*conditional2(dv), *conditionals(dv), 1e-6)); + EXPECT(conditional2(dv) == conditionals(dv)); + HybridLookupTable hlt(hc); + + // hlt.argmaxInPlace(&hv); + + HybridLookupDAG dag; + dag.push_back(hlt); + dag.argmax(hv); + + // HybridBayesNet hbn; + // hbn.push_back(hc); + // hbn.optimize(); +} + +TEST(HybridLookupTable, hybrid_argmax) { + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 0; + S1(0, 1) = 0; + S1(1, 1) = 1; + + Vector2 d1(0.2, 0.5), d2(-0.5, 0.6); + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + auto conditional0 = + boost::make_shared(X(1), d1, S1, model), + conditional1 = + boost::make_shared(X(1), d2, S1, model); + + DiscreteKey m1(1, 2); + GaussianMixture::Conditionals conditionals( + {m1}, + vector{conditional0, conditional1}); + boost::shared_ptr mixtureFactor( + new GaussianMixture({X(1)}, {}, {m1}, conditionals)); + + HybridConditional hc(mixtureFactor); + + DiscreteValues dv; + dv[1] = 1; + VectorValues cv; + // cv.insert(X(2),Vector2(0.0, 0.0)); + HybridValues hv(dv, cv); + + HybridLookupTable hlt(hc); + + hlt.argmaxInPlace(&hv); + + EXPECT(assert_equal(hv.at(X(1)), d2)); +} + +TEST(HybridLookupTable, discrete_argmax) { + DiscreteKey X(0, 2), Y(1, 2); + + auto conditional = boost::make_shared(X | Y = "0/1 3/2"); + + HybridConditional hc(conditional); + + HybridLookupTable hlt(hc); + + DiscreteValues dv; + dv[1] = 0; + VectorValues cv; + // cv.insert(X(2),Vector2(0.0, 0.0)); + HybridValues hv(dv, cv); + + hlt.argmaxInPlace(&hv); + + EXPECT(assert_equal(hv.atDiscrete(0), 1)); + + DecisionTreeFactor f1(X, "2 3"); + auto conditional2 = boost::make_shared(1, f1); + + HybridConditional hc2(conditional2); + + HybridLookupTable hlt2(hc2); + + HybridValues hv2; + + hlt2.argmaxInPlace(&hv2); + + EXPECT(assert_equal(hv2.atDiscrete(0), 1)); +} + +TEST(HybridLookupTable, gaussian_argmax) { + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 0; + S1(0, 1) = 0; + S1(1, 1) = 1; + + Vector2 d1(0.2, 0.5), d2(-0.5, 0.6); + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + auto conditional = + boost::make_shared(X(1), d1, S1, X(2), -S1, model); + + HybridConditional hc(conditional); + + HybridLookupTable hlt(hc); + + DiscreteValues dv; + // dv[1]=0; + VectorValues cv; + cv.insert(X(2), d2); + HybridValues hv(dv, cv); + + hlt.argmaxInPlace(&hv); + + EXPECT(assert_equal(hv.at(X(1)), d1 + d2)); +} + +TEST(HybridLookupDAG, argmax) { + Matrix S1(2, 2); + S1(0, 0) = 1; + S1(1, 0) = 0; + S1(0, 1) = 0; + S1(1, 1) = 1; + + Vector2 d1(0.2, 0.5), d2(-0.5, 0.6); + + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector2(1.0, 0.34)); + + auto conditional0 = + boost::make_shared(X(2), d1, S1, model), + conditional1 = + boost::make_shared(X(2), d2, S1, model); + + DiscreteKey m1(1, 2); + GaussianMixture::Conditionals conditionals( + {m1}, + vector{conditional0, conditional1}); + boost::shared_ptr mixtureFactor( + new GaussianMixture({X(2)}, {}, {m1}, conditionals)); + HybridConditional hc2(mixtureFactor); + HybridLookupTable hlt2(hc2); + + auto conditional2 = + boost::make_shared(X(1), d1, S1, X(2), -S1, model); + + HybridConditional hc1(conditional2); + HybridLookupTable hlt1(hc1); + + DecisionTreeFactor f1(m1, "2 3"); + auto discrete_conditional = boost::make_shared(1, f1); + + HybridConditional hc3(discrete_conditional); + HybridLookupTable hlt3(hc3); + + HybridLookupDAG dag; + dag.push_back(hlt1); + dag.push_back(hlt2); + dag.push_back(hlt3); + auto hv = dag.argmax(); + + EXPECT(assert_equal(hv.atDiscrete(1), 1)); + EXPECT(assert_equal(hv.at(X(2)), d2)); + EXPECT(assert_equal(hv.at(X(1)), d2 + d1)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp new file mode 100644 index 0000000000..018b017a9d --- /dev/null +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -0,0 +1,776 @@ +/* ---------------------------------------------------------------------------- + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridNonlinearFactorGraph.cpp + * @brief Unit tests for HybridNonlinearFactorGraph + * @author Varun Agrawal + * @author Fan Jiang + * @author Frank Dellaert + * @date December 2021 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::L; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* **************************************************************************** + * Test that any linearizedFactorGraph gaussian factors are appended to the + * existing gaussian factor graph in the hybrid factor graph. + */ +TEST(HybridFactorGraph, GaussianFactorGraph) { + HybridNonlinearFactorGraph fg; + + // Add a simple prior factor to the nonlinear factor graph + fg.emplace_nonlinear>(X(0), 0, Isotropic::Sigma(1, 0.1)); + + // Linearization point + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + + HybridGaussianFactorGraph ghfg = fg.linearize(linearizationPoint); + + // Add a factor to the GaussianFactorGraph + ghfg.add(JacobianFactor(X(0), I_1x1, Vector1(5))); + + EXPECT_LONGS_EQUAL(2, ghfg.size()); +} + +/*************************************************************************** + * Test equality for Hybrid Nonlinear Factor Graph. + */ +TEST(HybridNonlinearFactorGraph, Equals) { + HybridNonlinearFactorGraph graph1; + HybridNonlinearFactorGraph graph2; + + // Test empty factor graphs + EXPECT(assert_equal(graph1, graph2)); + + auto f0 = boost::make_shared>( + 1, Pose2(), noiseModel::Isotropic::Sigma(3, 0.001)); + graph1.push_back(f0); + graph2.push_back(f0); + + auto f1 = boost::make_shared>( + 1, 2, Pose2(), noiseModel::Isotropic::Sigma(3, 0.1)); + graph1.push_back(f1); + graph2.push_back(f1); + + // Test non-empty graphs + EXPECT(assert_equal(graph1, graph2)); +} + +/*************************************************************************** + * Test that the resize method works correctly for a HybridNonlinearFactorGraph. + */ +TEST(HybridNonlinearFactorGraph, Resize) { + HybridNonlinearFactorGraph fg; + auto nonlinearFactor = boost::make_shared>(); + fg.push_back(nonlinearFactor); + + auto discreteFactor = boost::make_shared(); + fg.push_back(discreteFactor); + + auto dcFactor = boost::make_shared(); + fg.push_back(dcFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 3); + + fg.resize(0); + EXPECT_LONGS_EQUAL(fg.size(), 0); +} + +/*************************************************************************** + * Test that the resize method works correctly for a + * HybridGaussianFactorGraph. + */ +TEST(HybridGaussianFactorGraph, Resize) { + HybridNonlinearFactorGraph nhfg; + auto nonlinearFactor = boost::make_shared>( + X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1)); + nhfg.push_back(nonlinearFactor); + auto discreteFactor = boost::make_shared(); + nhfg.push_back(discreteFactor); + + KeyVector contKeys = {X(0), X(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); + auto still = boost::make_shared(X(0), X(1), 0.0, noise_model), + moving = boost::make_shared(X(0), X(1), 1.0, noise_model); + + std::vector components = {still, moving}; + auto dcFactor = boost::make_shared( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + nhfg.push_back(dcFactor); + + Values linearizationPoint; + linearizationPoint.insert(X(0), 0); + linearizationPoint.insert(X(1), 1); + + // Generate `HybridGaussianFactorGraph` by linearizing + HybridGaussianFactorGraph gfg = nhfg.linearize(linearizationPoint); + + EXPECT_LONGS_EQUAL(gfg.size(), 3); + + gfg.resize(0); + EXPECT_LONGS_EQUAL(gfg.size(), 0); +} + +/***************************************************************************** + * Test push_back on HFG makes the correct distinction. + */ +TEST(HybridFactorGraph, PushBack) { + HybridNonlinearFactorGraph fg; + + auto nonlinearFactor = boost::make_shared>(); + fg.push_back(nonlinearFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 1); + + fg = HybridNonlinearFactorGraph(); + + auto discreteFactor = boost::make_shared(); + fg.push_back(discreteFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 1); + + fg = HybridNonlinearFactorGraph(); + + auto dcFactor = boost::make_shared(); + fg.push_back(dcFactor); + + EXPECT_LONGS_EQUAL(fg.size(), 1); + + // Now do the same with HybridGaussianFactorGraph + HybridGaussianFactorGraph ghfg; + + auto gaussianFactor = boost::make_shared(); + ghfg.push_back(gaussianFactor); + + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + ghfg = HybridGaussianFactorGraph(); + ghfg.push_back(discreteFactor); + + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + ghfg = HybridGaussianFactorGraph(); + ghfg.push_back(dcFactor); + + HybridGaussianFactorGraph hgfg2; + hgfg2.push_back(ghfg.begin(), ghfg.end()); + + EXPECT_LONGS_EQUAL(ghfg.size(), 1); + + HybridNonlinearFactorGraph hnfg; + NonlinearFactorGraph factors; + auto noise = noiseModel::Isotropic::Sigma(3, 1.0); + factors.emplace_shared>(0, Pose2(0, 0, 0), noise); + factors.emplace_shared>(1, Pose2(1, 0, 0), noise); + factors.emplace_shared>(2, Pose2(2, 0, 0), noise); + // TODO(Varun) This does not currently work. It should work once HybridFactor + // becomes a base class of NonlinearFactor. + // hnfg.push_back(factors.begin(), factors.end()); + + // EXPECT_LONGS_EQUAL(3, hnfg.size()); +} + +/**************************************************************************** + * Test construction of switching-like hybrid factor graph. + */ +TEST(HybridFactorGraph, Switching) { + Switching self(3); + + EXPECT_LONGS_EQUAL(7, self.nonlinearFactorGraph.size()); + EXPECT_LONGS_EQUAL(7, self.linearizedFactorGraph.size()); +} + +/**************************************************************************** + * Test linearization on a switching-like hybrid factor graph. + */ +TEST(HybridFactorGraph, Linearization) { + Switching self(3); + + // Linearize here: + HybridGaussianFactorGraph actualLinearized = + self.nonlinearFactorGraph.linearize(self.linearizationPoint); + + EXPECT_LONGS_EQUAL(7, actualLinearized.size()); +} + +/**************************************************************************** + * Test elimination tree construction + */ +TEST(HybridFactorGraph, EliminationTree) { + Switching self(3); + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Create elimination tree. + HybridEliminationTree etree(self.linearizedFactorGraph, ordering); + EXPECT_LONGS_EQUAL(1, etree.roots().size()) +} + +/**************************************************************************** + *Test elimination function by eliminating x1 in *-x1-*-x2 graph. + */ +TEST(GaussianElimination, Eliminate_x1) { + Switching self(3); + + // Gather factors on x1, has a simple Gaussian and a mixture factor. + HybridGaussianFactorGraph factors; + // Add gaussian prior + factors.push_back(self.linearizedFactorGraph[0]); + // Add first hybrid factor + factors.push_back(self.linearizedFactorGraph[1]); + + // TODO(Varun) remove this block since sum is no longer exposed. + // // Check that sum works: + // auto sum = factors.sum(); + // Assignment mode; + // mode[M(1)] = 1; + // auto actual = sum(mode); // Selects one of 2 modes. + // EXPECT_LONGS_EQUAL(2, actual.size()); // Prior and motion model. + + // Eliminate x1 + Ordering ordering; + ordering += X(1); + + auto result = EliminateHybrid(factors, ordering); + CHECK(result.first); + EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); + CHECK(result.second); + // Has two keys, x2 and m1 + EXPECT_LONGS_EQUAL(2, result.second->size()); +} + +/**************************************************************************** + * Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain. + * m1/ \m2 + */ +TEST(HybridsGaussianElimination, Eliminate_x2) { + Switching self(3); + + // Gather factors on x2, will be two mixture factors (with x1 and x3, resp.). + HybridGaussianFactorGraph factors; + factors.push_back(self.linearizedFactorGraph[1]); // involves m1 + factors.push_back(self.linearizedFactorGraph[2]); // involves m2 + + // TODO(Varun) remove this block since sum is no longer exposed. + // // Check that sum works: + // auto sum = factors.sum(); + // Assignment mode; + // mode[M(1)] = 0; + // mode[M(2)] = 1; + // auto actual = sum(mode); // Selects one of 4 mode + // combinations. EXPECT_LONGS_EQUAL(2, actual.size()); // 2 motion models. + + // Eliminate x2 + Ordering ordering; + ordering += X(2); + + std::pair result = + EliminateHybrid(factors, ordering); + CHECK(result.first); + EXPECT_LONGS_EQUAL(1, result.first->nrFrontals()); + CHECK(result.second); + // Note: separator keys should include m1, m2. + EXPECT_LONGS_EQUAL(4, result.second->size()); +} + +/**************************************************************************** + * Helper method to generate gaussian factor graphs with a specific mode. + */ +GaussianFactorGraph::shared_ptr batchGFG(double between, + Values linearizationPoint) { + NonlinearFactorGraph graph; + graph.addPrior(X(1), 0, Isotropic::Sigma(1, 0.1)); + + auto between_x1_x2 = boost::make_shared( + X(1), X(2), between, Isotropic::Sigma(1, 1.0)); + + graph.push_back(between_x1_x2); + + return graph.linearize(linearizationPoint); +} + +/**************************************************************************** + * Test elimination function by eliminating x1 and x2 in graph. + */ +TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { + Switching self(2, 1.0, 0.1); + + auto factors = self.linearizedFactorGraph; + + // Eliminate x1 + Ordering ordering; + ordering += X(1); + ordering += X(2); + + HybridConditional::shared_ptr hybridConditionalMixture; + HybridFactor::shared_ptr factorOnModes; + + std::tie(hybridConditionalMixture, factorOnModes) = + EliminateHybrid(factors, ordering); + + auto gaussianConditionalMixture = + dynamic_pointer_cast(hybridConditionalMixture->inner()); + + CHECK(gaussianConditionalMixture); + // Frontals = [x1, x2] + EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals()); + // 1 parent, which is the mode + EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents()); + + // This is now a HybridDiscreteFactor + auto hybridDiscreteFactor = + dynamic_pointer_cast(factorOnModes); + // Access the type-erased inner object and convert to DecisionTreeFactor + auto discreteFactor = + dynamic_pointer_cast(hybridDiscreteFactor->inner()); + CHECK(discreteFactor); + EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); + EXPECT(discreteFactor->root_->isLeaf() == false); +} + +// /* +// ****************************************************************************/ +// /// Test the toDecisionTreeFactor method +// TEST(HybridFactorGraph, ToDecisionTreeFactor) { +// size_t K = 3; + +// // Provide tight sigma values so that the errors are visibly different. +// double between_sigma = 5e-8, prior_sigma = 1e-7; + +// Switching self(K, between_sigma, prior_sigma); + +// // Clear out discrete factors since sum() cannot hanldle those +// HybridGaussianFactorGraph linearizedFactorGraph( +// self.linearizedFactorGraph.gaussianGraph(), DiscreteFactorGraph(), +// self.linearizedFactorGraph.dcGraph()); + +// auto decisionTreeFactor = linearizedFactorGraph.toDecisionTreeFactor(); + +// auto allAssignments = +// DiscreteValues::CartesianProduct(linearizedFactorGraph.discreteKeys()); + +// // Get the error of the discrete assignment m1=0, m2=1. +// double actual = (*decisionTreeFactor)(allAssignments[1]); + +// /********************************************/ +// // Create equivalent factor graph for m1=0, m2=1 +// GaussianFactorGraph graph = linearizedFactorGraph.gaussianGraph(); + +// for (auto &p : linearizedFactorGraph.dcGraph()) { +// if (auto mixture = +// boost::dynamic_pointer_cast(p)) { +// graph.add((*mixture)(allAssignments[1])); +// } +// } + +// VectorValues values = graph.optimize(); +// double expected = graph.probPrime(values); +// /********************************************/ +// EXPECT_DOUBLES_EQUAL(expected, actual, 1e-12); +// // REGRESSION: +// EXPECT_DOUBLES_EQUAL(0.6125, actual, 1e-4); +// } + +/**************************************************************************** + * Test partial elimination + */ +TEST(HybridFactorGraph, Partial_Elimination) { + Switching self(3); + + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + CHECK(hybridBayesNet); + // GTSAM_PRINT(*hybridBayesNet); // HybridBayesNet + EXPECT_LONGS_EQUAL(3, hybridBayesNet->size()); + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + + CHECK(remainingFactorGraph); + // GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph + EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size()); + EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)})); + EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)})); + EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)})); +} + +/**************************************************************************** + * Test full elimination + */ +TEST(HybridFactorGraph, Full_Elimination) { + Switching self(3); + + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // We first do a partial elimination + HybridBayesNet::shared_ptr hybridBayesNet_partial; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph_partial; + DiscreteBayesNet discreteBayesNet; + + { + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteFactorGraph discrete_fg; + // TODO(Varun) Make this a function of HybridGaussianFactorGraph? + for (HybridFactor::shared_ptr& factor : (*remainingFactorGraph_partial)) { + auto df = dynamic_pointer_cast(factor); + discrete_fg.push_back(df->inner()); + } + ordering.clear(); + for (size_t k = 1; k < self.K; k++) ordering += M(k); + discreteBayesNet = + *discrete_fg.eliminateSequential(ordering, EliminateForMPE); + } + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 1; k < self.K; k++) ordering += M(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet = + linearizedFactorGraph.eliminateSequential(ordering); + + CHECK(hybridBayesNet); + EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); + // p(x1 | x2, m1) + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); + // p(x2 | x3, m1, m2) + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); + // p(x3 | m1, m2) + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + // P(m1 | m2) + EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); + EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); + EXPECT( + dynamic_pointer_cast(hybridBayesNet->at(3)->inner()) + ->equals(*discreteBayesNet.at(0))); + // P(m2) + EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); + EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); + EXPECT( + dynamic_pointer_cast(hybridBayesNet->at(4)->inner()) + ->equals(*discreteBayesNet.at(1))); +} + +/**************************************************************************** + * Test printing + */ +TEST(HybridFactorGraph, Printing) { + Switching self(3); + + auto linearizedFactorGraph = self.linearizedFactorGraph; + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= self.K; k++) ordering += X(k); + + // Eliminate partially. + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + linearizedFactorGraph.eliminatePartialSequential(ordering); + + string expected_hybridFactorGraph = R"( +size: 7 +factor 0: Continuous [x1] + + A[x1] = [ + 10 +] + b = [ -10 ] + No noise model +factor 1: Hybrid [x1 x2; m1]{ + Choice(m1) + 0 Leaf : + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf : + A[x1] = [ + -1 +] + A[x2] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 2: Hybrid [x2 x3; m2]{ + Choice(m2) + 0 Leaf : + A[x2] = [ + -1 +] + A[x3] = [ + 1 +] + b = [ -1 ] + No noise model + + 1 Leaf : + A[x2] = [ + -1 +] + A[x3] = [ + 1 +] + b = [ -0 ] + No noise model + +} +factor 3: Continuous [x2] + + A[x2] = [ + 10 +] + b = [ -10 ] + No noise model +factor 4: Continuous [x3] + + A[x3] = [ + 10 +] + b = [ -10 ] + No noise model +factor 5: Discrete [m1] + P( m1 ): + Leaf 0.5 + +factor 6: Discrete [m2 m1] + P( m2 | m1 ): + Choice(m2) + 0 Choice(m1) + 0 0 Leaf 0.33333333 + 0 1 Leaf 0.6 + 1 Choice(m1) + 1 0 Leaf 0.66666667 + 1 1 Leaf 0.4 + +)"; + EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph)); + + // Expected output for hybridBayesNet. + string expected_hybridBayesNet = R"( +size: 3 +factor 0: Hybrid P( x1 | x2 m1) + Discrete Keys = (m1, 2), + Choice(m1) + 0 Leaf p(x1 | x2) + R = [ 10.0499 ] + S[x2] = [ -0.0995037 ] + d = [ -9.85087 ] + No noise model + + 1 Leaf p(x1 | x2) + R = [ 10.0499 ] + S[x2] = [ -0.0995037 ] + d = [ -9.95037 ] + No noise model + +factor 1: Hybrid P( x2 | x3 m1 m2) + Discrete Keys = (m1, 2), (m2, 2), + Choice(m2) + 0 Choice(m1) + 0 0 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -9.99901 ] + No noise model + + 0 1 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -9.90098 ] + No noise model + + 1 Choice(m1) + 1 0 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -10.098 ] + No noise model + + 1 1 Leaf p(x2 | x3) + R = [ 10.099 ] + S[x3] = [ -0.0990196 ] + d = [ -10 ] + No noise model + +factor 2: Hybrid P( x3 | m1 m2) + Discrete Keys = (m1, 2), (m2, 2), + Choice(m2) + 0 Choice(m1) + 0 0 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.1489 ] + No noise model + + 0 1 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.1479 ] + No noise model + + 1 Choice(m1) + 1 0 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.0504 ] + No noise model + + 1 1 Leaf p(x3) + R = [ 10.0494 ] + d = [ -10.0494 ] + No noise model + +)"; + EXPECT(assert_print_equal(expected_hybridBayesNet, *hybridBayesNet)); +} + +/**************************************************************************** + * Simple PlanarSLAM example test with 2 poses and 2 landmarks (each pose + * connects to 1 landmark) to expose issue with default decision tree creation + * in hybrid elimination. The hybrid factor is between the poses X0 and X1. + * The issue arises if we eliminate a landmark variable first since it is not + * connected to a HybridFactor. + */ +TEST(HybridFactorGraph, DefaultDecisionTree) { + HybridNonlinearFactorGraph fg; + + // Add a prior on pose x1 at the origin. + // A prior factor consists of a mean and a noise model (covariance matrix) + Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin + auto priorNoise = noiseModel::Diagonal::Sigmas( + Vector3(0.3, 0.3, 0.1)); // 30cm std on x,y, 0.1 rad on theta + fg.emplace_nonlinear>(X(0), prior, priorNoise); + + using PlanarMotionModel = BetweenFactor; + + // Add odometry factor + Pose2 odometry(2.0, 0.0, 0.0); + KeyVector contKeys = {X(0), X(1)}; + auto noise_model = noiseModel::Isotropic::Sigma(3, 1.0); + auto still = boost::make_shared(X(0), X(1), Pose2(0, 0, 0), + noise_model), + moving = boost::make_shared(X(0), X(1), odometry, + noise_model); + std::vector motion_models = {still, moving}; + // TODO(Varun) Make a templated constructor for MixtureFactor which does this? + std::vector components; + for (auto&& f : motion_models) { + components.push_back(boost::dynamic_pointer_cast(f)); + } + fg.emplace_hybrid( + contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + + // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. + // create a noise model for the landmark measurements + auto measurementNoise = noiseModel::Diagonal::Sigmas( + Vector2(0.1, 0.2)); // 0.1 rad std on bearing, 20cm on range + // create the measurement values - indices are (pose id, landmark id) + Rot2 bearing11 = Rot2::fromDegrees(45), bearing22 = Rot2::fromDegrees(90); + double range11 = std::sqrt(4.0 + 4.0), range22 = 2.0; + + // Add Bearing-Range factors + fg.emplace_nonlinear>( + X(0), L(0), bearing11, range11, measurementNoise); + fg.emplace_nonlinear>( + X(1), L(1), bearing22, range22, measurementNoise); + + // Create (deliberately inaccurate) initial estimate + Values initialEstimate; + initialEstimate.insert(X(0), Pose2(0.5, 0.0, 0.2)); + initialEstimate.insert(X(1), Pose2(2.3, 0.1, -0.2)); + initialEstimate.insert(L(0), Point2(1.8, 2.1)); + initialEstimate.insert(L(1), Point2(4.1, 1.8)); + + // We want to eliminate variables not connected to DCFactors first. + Ordering ordering; + ordering += L(0); + ordering += L(1); + ordering += X(0); + ordering += X(1); + + HybridGaussianFactorGraph linearized = fg.linearize(initialEstimate); + gtsam::HybridBayesNet::shared_ptr hybridBayesNet; + gtsam::HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + + // This should NOT fail + std::tie(hybridBayesNet, remainingFactorGraph) = + linearized.eliminatePartialSequential(ordering); + EXPECT_LONGS_EQUAL(4, hybridBayesNet->size()); + EXPECT_LONGS_EQUAL(1, remainingFactorGraph->size()); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridValues.cpp b/gtsam/hybrid/tests/testHybridValues.cpp new file mode 100644 index 0000000000..6f510601d4 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridValues.cpp @@ -0,0 +1,58 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridValues.cpp + * @date Jul 28, 2022 + * @author Shangjie Xue + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; + +TEST(HybridValues, basics) { + HybridValues values; + values.insert(99, Vector2(2, 3)); + values.insert(100, 3); + EXPECT(assert_equal(values.at(99), Vector2(2, 3))); + EXPECT(assert_equal(values.atDiscrete(100), int(3))); + + values.print(); + + HybridValues values2; + values2.insert(100, 3); + values2.insert(99, Vector2(2, 3)); + EXPECT(assert_equal(values2, values)); + + values2.insert(98, Vector2(2, 3)); + EXPECT(!assert_equal(values2, values)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 101134c838..89fd09037c 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -116,7 +116,7 @@ class FactorGraph { using HasDerivedValueType = typename std::enable_if< std::is_base_of::value>::type; - /// Check if T has a value_type derived from FactorType. + /// Check if T has a pointer type derived from FactorType. template using HasDerivedElementType = typename std::enable_if::value>::type; diff --git a/gtsam/linear/GaussianISAM.h b/gtsam/linear/GaussianISAM.h index b77b262408..98bf5df884 100644 --- a/gtsam/linear/GaussianISAM.h +++ b/gtsam/linear/GaussianISAM.h @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file SymbolicISAM.h + * @file GaussianISAM.h * @date July 29, 2013 * @author Frank Dellaert * @author Richard Roberts diff --git a/python/gtsam/tests/test_HybridFactorGraph.py b/python/gtsam/tests/test_HybridFactorGraph.py index 781cfd9240..576efa82fd 100644 --- a/python/gtsam/tests/test_HybridFactorGraph.py +++ b/python/gtsam/tests/test_HybridFactorGraph.py @@ -55,6 +55,34 @@ def test_create(self): discrete_conditional = hbn.at(hbn.size() - 1).inner() self.assertIsInstance(discrete_conditional, gtsam.DiscreteConditional) + def test_optimize(self): + """Test contruction of hybrid factor graph.""" + noiseModel = gtsam.noiseModel.Unit.Create(3) + dk = gtsam.DiscreteKeys() + dk.push_back((C(0), 2)) + + jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), + noiseModel) + jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), + noiseModel) + + gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2]) + + hfg = gtsam.HybridGaussianFactorGraph() + hfg.add(jf1) + hfg.add(jf2) + hfg.push_back(gmf) + + dtf = gtsam.DecisionTreeFactor([(C(0), 2)],"0 1") + hfg.add(dtf) + + hbn = hfg.eliminateSequential( + gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph( + hfg, [C(0)])) + + # print("hbn = ", hbn) + hv = hbn.optimize() + self.assertEqual(hv.atDiscrete(C(0)), 1) if __name__ == "__main__": unittest.main() diff --git a/python/gtsam/tests/test_HybridValues.py b/python/gtsam/tests/test_HybridValues.py new file mode 100644 index 0000000000..63e7c8e7dd --- /dev/null +++ b/python/gtsam/tests/test_HybridValues.py @@ -0,0 +1,41 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for Hybrid Values. +Author: Shangjie Xue +""" +# pylint: disable=invalid-name, no-name-in-module, no-member + +from __future__ import print_function + +import unittest + +import gtsam +import numpy as np +from gtsam.symbol_shorthand import C, X +from gtsam.utils.test_case import GtsamTestCase + + +class TestHybridGaussianFactorGraph(GtsamTestCase): + """Unit tests for HybridValues.""" + + def test_basic(self): + """Test contruction and basic methods of hybrid values.""" + + hv1 = gtsam.HybridValues() + hv1.insert(X(0), np.ones((3,1))) + hv1.insert(C(0), 2) + + hv2 = gtsam.HybridValues() + hv2.insert(C(0), 2) + hv2.insert(X(0), np.ones((3,1))) + + self.assertEqual(hv1.atDiscrete(C(0)), 2) + self.assertEqual(hv1.at(X(0))[0], np.ones((3,1))[0]) + +if __name__ == "__main__": + unittest.main()