From d387d2cf3f3476c755843afb0da155acecf0260c Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 14 Oct 2022 16:44:06 -0400 Subject: [PATCH 01/10] update switching example to accept measurements in constructor --- gtsam/hybrid/tests/Switching.h | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 3ae8f0bb1c..a1bad79bc0 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -131,17 +131,25 @@ struct Switching { * @param between_sigma The stddev between poses. * @param prior_sigma The stddev on priors (also used for measurements). */ - Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1) + Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1, + std::vector measurements = {}) : K(K) { // Create DiscreteKeys for binary K modes, modes[0] will not be used. for (size_t k = 0; k <= K; k++) { modes.emplace_back(M(k), 2); } + // If measurements are not provided, we just have the robot moving forward. + if (measurements.size() == 0) { + for (size_t k = 1; k <= K; k++) { + measurements.push_back(k - 1); + } + } + // Create hybrid factor graph. // Add a prior on X(1). auto prior = boost::make_shared>( - X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma)); + X(1), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma)); nonlinearFactorGraph.push_nonlinear(prior); // Add "motion models". @@ -160,7 +168,7 @@ struct Switching { auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma); for (size_t k = 2; k <= K; k++) { nonlinearFactorGraph.emplace_nonlinear>( - X(k), 1.0 * (k - 1), measurement_noise); + X(k), measurements.at(k - 1), measurement_noise); } // Add "mode chain" From 40d38651ccc3d93f59ed4406d3ff9cb304caa600 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 14 Oct 2022 16:44:38 -0400 Subject: [PATCH 02/10] rename discreteFactor to decisionTree --- gtsam/hybrid/HybridBayesNet.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index cc27600f09..8bce45c518 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -45,7 +45,7 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const { HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const { // Get the decision tree of only the discrete keys auto discreteConditionals = this->discreteConditionals(); - const DecisionTreeFactor::shared_ptr discreteFactor = + const DecisionTreeFactor::shared_ptr decisionTree = boost::make_shared( discreteConditionals->prune(maxNrLeaves)); @@ -59,7 +59,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const { HybridBayesNet prunedBayesNetFragment; // Go through all the conditionals in the - // Bayes Net and prune them as per discreteFactor. + // Bayes Net and prune them as per decisionTree. for (size_t i = 0; i < this->size(); i++) { HybridConditional::shared_ptr conditional = this->at(i); @@ -69,7 +69,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const { // Make a copy of the gaussian mixture and prune it! auto prunedGaussianMixture = boost::make_shared(*gaussianMixture); - prunedGaussianMixture->prune(*discreteFactor); + prunedGaussianMixture->prune(*decisionTree); // Type-erase and add to the pruned Bayes Net fragment. prunedBayesNetFragment.push_back( From 82f328b80800cdd75f09aa514353d5c58b12759e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 20 Oct 2022 15:43:29 -0400 Subject: [PATCH 03/10] expose DiscreteKeysAsSet as a function --- gtsam/hybrid/GaussianMixture.cpp | 1 - gtsam/hybrid/GaussianMixture.h | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 5172a97983..fa0cc189b6 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -129,7 +129,6 @@ void GaussianMixture::print(const std::string &s, } /* ************************************************************************* */ -/// Return the DiscreteKey vector as a set. std::set DiscreteKeysAsSet(const DiscreteKeys &dkeys) { std::set s; s.insert(dkeys.begin(), dkeys.end()); diff --git a/gtsam/hybrid/GaussianMixture.h b/gtsam/hybrid/GaussianMixture.h index 9792a85323..7c1cde4cf3 100644 --- a/gtsam/hybrid/GaussianMixture.h +++ b/gtsam/hybrid/GaussianMixture.h @@ -162,6 +162,9 @@ class GTSAM_EXPORT GaussianMixture Sum add(const Sum &sum) const; }; +/// Return the DiscreteKey vector as a set. +std::set DiscreteKeysAsSet(const DiscreteKeys &dkeys); + // traits template <> struct traits : public Testable {}; From 949958dc6ec3226c830023a8ec026abf41add40b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 20 Oct 2022 16:35:14 -0400 Subject: [PATCH 04/10] new updateDiscreteConditionals method for after we prune --- gtsam/hybrid/HybridBayesNet.cpp | 89 ++++++++++++++++++++++- gtsam/hybrid/HybridBayesNet.h | 11 ++- gtsam/hybrid/tests/testHybridBayesNet.cpp | 49 +++++++++++++ 3 files changed, 146 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 8bce45c518..a641363846 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -42,13 +42,100 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const { } /* ************************************************************************* */ -HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const { +/** + * @brief Helper function to get the pruner functional. + * + * @param decisionTree The probability decision tree of only discrete keys. + * @return std::function &, const GaussianConditional::shared_ptr &)> + */ +std::function &, double)> prunerFunc( + const DecisionTreeFactor &decisionTree, + const HybridConditional &conditional) { + // Get the discrete keys as sets for the decision tree + // and the gaussian mixture. + auto decisionTreeKeySet = DiscreteKeysAsSet(decisionTree.discreteKeys()); + auto conditionalKeySet = DiscreteKeysAsSet(conditional.discreteKeys()); + + auto pruner = [decisionTree, decisionTreeKeySet, conditionalKeySet]( + const Assignment &choices, + double probability) -> double { + // typecast so we can use this to get probability value + DiscreteValues values(choices); + // Case where the gaussian mixture has the same + // discrete keys as the decision tree. + if (conditionalKeySet == decisionTreeKeySet) { + if (decisionTree(values) == 0) { + return 0.0; + } else { + return probability; + } + } else { + std::vector set_diff; + std::set_difference(decisionTreeKeySet.begin(), decisionTreeKeySet.end(), + conditionalKeySet.begin(), conditionalKeySet.end(), + std::back_inserter(set_diff)); + + const std::vector assignments = + DiscreteValues::CartesianProduct(set_diff); + for (const DiscreteValues &assignment : assignments) { + DiscreteValues augmented_values(values); + augmented_values.insert(assignment.begin(), assignment.end()); + + // If any one of the sub-branches are non-zero, + // we need this probability. + if (decisionTree(augmented_values) > 0.0) { + return probability; + } + } + // If we are here, it means that all the sub-branches are 0, + // so we prune. + return 0.0; + } + }; + return pruner; +} + +/* ************************************************************************* */ +void HybridBayesNet::updateDiscreteConditionals( + const DecisionTreeFactor::shared_ptr &prunedDecisionTree) { + KeyVector prunedTreeKeys = prunedDecisionTree->keys(); + + for (size_t i = 0; i < this->size(); i++) { + HybridConditional::shared_ptr conditional = this->at(i); + if (conditional->isDiscrete()) { + // std::cout << demangle(typeid(conditional).name()) << std::endl; + auto discrete = conditional->asDiscreteConditional(); + KeyVector frontals(discrete->frontals().begin(), + discrete->frontals().end()); + + // Apply prunerFunc to the underlying AlgebraicDecisionTree + auto discreteTree = + boost::dynamic_pointer_cast(discrete); + DecisionTreeFactor::ADT prunedDiscreteTree = + discreteTree->apply(prunerFunc(*prunedDecisionTree, *conditional)); + + // Create the new (hybrid) conditional + auto prunedDiscrete = boost::make_shared( + frontals.size(), conditional->discreteKeys(), prunedDiscreteTree); + conditional = boost::make_shared(prunedDiscrete); + + // Add it back to the BayesNet + this->at(i) = conditional; + } + } +} + +/* ************************************************************************* */ +HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Get the decision tree of only the discrete keys auto discreteConditionals = this->discreteConditionals(); const DecisionTreeFactor::shared_ptr decisionTree = boost::make_shared( discreteConditionals->prune(maxNrLeaves)); + this->updateDiscreteConditionals(decisionTree); + /* To Prune, we visitWith every leaf in the GaussianMixture. * For each leaf, using the assignment we can check the discrete decision tree * for 0.0 probability, then just set the leaf to a nullptr. diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index b8234d70ab..87e6c5db67 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -111,7 +111,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { */ VectorValues optimize(const DiscreteValues &assignment) const; - protected: /** * @brief Get all the discrete conditionals as a decision tree factor. * @@ -121,11 +120,19 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { public: /// Prune the Hybrid Bayes Net such that we have at most maxNrLeaves leaves. - HybridBayesNet prune(size_t maxNrLeaves) const; + HybridBayesNet prune(size_t maxNrLeaves); /// @} private: + /** + * @brief Update the discrete conditionals with the pruned versions. + * + * @param prunedDecisionTree + */ + void updateDiscreteConditionals( + const DecisionTreeFactor::shared_ptr &prunedDecisionTree); + /** Serialization function */ friend class boost::serialization::access; template diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 5885fdcdcc..fc353f9c1f 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -201,6 +201,55 @@ TEST(HybridBayesNet, Prune) { EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous())); } +/* ****************************************************************************/ +// Test bayes net updateDiscreteConditionals +TEST(HybridBayesNet, UpdateDiscreteConditionals) { + Switching s(4); + + Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesNet::shared_ptr hybridBayesNet = + s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + + size_t maxNrLeaves = 3; + auto discreteConditionals = hybridBayesNet->discreteConditionals(); + const DecisionTreeFactor::shared_ptr prunedDecisionTree = + boost::make_shared( + discreteConditionals->prune(maxNrLeaves)); + + EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, + prunedDecisionTree->nrLeaves()); + + auto original_discrete_conditionals = + *(hybridBayesNet->at(4)->asDiscreteConditional()); + + // Prune! + hybridBayesNet->prune(maxNrLeaves); + + // Functor to verify values against the original_discrete_conditionals + auto checker = [&](const Assignment& assignment, + double probability) -> double { + // typecast so we can use this to get probability value + DiscreteValues choices(assignment); + if (prunedDecisionTree->operator()(choices) == 0) { + EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9); + } else { + EXPECT_DOUBLES_EQUAL(original_discrete_conditionals(choices), probability, + 1e-9); + } + return 0.0; + }; + + // Get the pruned discrete conditionals as an AlgebraicDecisionTree + auto pruned_discrete_conditionals = + hybridBayesNet->at(4)->asDiscreteConditional(); + auto discrete_conditional_tree = + boost::dynamic_pointer_cast( + pruned_discrete_conditionals); + + // The checker functor verifies the values for us. + discrete_conditional_tree->apply(checker); +} + /* ****************************************************************************/ // Test HybridBayesNet serialization. TEST(HybridBayesNet, Serialization) { From cc78a14a466e35e3e14dd32b02b605d5f9265e6f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 20 Oct 2022 16:47:45 -0400 Subject: [PATCH 05/10] HybridSmoother based on BayesNet --- gtsam/hybrid/HybridSmoother.cpp | 114 ++++++++++++++++++++++++++++++++ gtsam/hybrid/HybridSmoother.h | 73 ++++++++++++++++++++ 2 files changed, 187 insertions(+) create mode 100644 gtsam/hybrid/HybridSmoother.cpp create mode 100644 gtsam/hybrid/HybridSmoother.h diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp new file mode 100644 index 0000000000..585ca53098 --- /dev/null +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -0,0 +1,114 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridSmoother.cpp + * @brief An incremental smoother for hybrid factor graphs + * @author Varun Agrawal + * @date October 2022 + */ + +#include + +#include +#include + +namespace gtsam { + +/* ************************************************************************* */ +void HybridSmoother::update(HybridGaussianFactorGraph graph, + const Ordering &ordering, + boost::optional maxNrLeaves) { + // Add the necessary conditionals from the previous timestep(s). + std::tie(graph, hybridBayesNet_) = + addConditionals(graph, hybridBayesNet_, ordering); + + // Eliminate. + auto bayesNetFragment = graph.eliminateSequential(ordering); + + /// Prune + if (maxNrLeaves) { + // `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in + // all the conditionals with the same keys in bayesNetFragment. + HybridBayesNet prunedBayesNetFragment = + bayesNetFragment->prune(*maxNrLeaves); + // Set the bayes net fragment to the pruned version + bayesNetFragment = + boost::make_shared(prunedBayesNetFragment); + } + + // Add the partial bayes net to the posterior bayes net. + hybridBayesNet_.push_back(*bayesNetFragment); + + tictoc_print_(); +} + +/* ************************************************************************* */ +std::pair +HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, + const HybridBayesNet &originalHybridBayesNet, + const Ordering &ordering) const { + HybridGaussianFactorGraph graph(originalGraph); + HybridBayesNet hybridBayesNet(originalHybridBayesNet); + + // If we are not at the first iteration, means we have conditionals to add. + if (!hybridBayesNet.empty()) { + // We add all relevant conditional mixtures on the last continuous variable + // in the previous `hybridBayesNet` to the graph + + // Conditionals to remove from the bayes net + // since the conditional will be updated. + std::vector conditionals_to_erase; + + // New conditionals to add to the graph + gtsam::HybridBayesNet newConditionals; + + // NOTE(Varun) Using a for-range loop doesn't work since some of the + // conditionals are invalid pointers + for (size_t i = 0; i < hybridBayesNet.size(); i++) { + auto conditional = hybridBayesNet.at(i); + + for (auto &key : conditional->frontals()) { + if (std::find(ordering.begin(), ordering.end(), key) != + ordering.end()) { + newConditionals.push_back(conditional); + conditionals_to_erase.push_back(conditional); + + break; + } + } + } + // Remove conditionals at the end so we don't affect the order in the + // original bayes net. + for (auto &&conditional : conditionals_to_erase) { + auto it = find(hybridBayesNet.begin(), hybridBayesNet.end(), conditional); + hybridBayesNet.erase(it); + } + + graph.push_back(newConditionals); + // newConditionals.print("\n\n\nNew Conditionals to add back"); + } + return {graph, hybridBayesNet}; +} + +/* ************************************************************************* */ +GaussianMixture::shared_ptr HybridSmoother::gaussianMixture( + size_t index) const { + return boost::dynamic_pointer_cast( + hybridBayesNet_.at(index)); +} + +/* ************************************************************************* */ +const HybridBayesNet &HybridSmoother::hybridBayesNet() const { + return hybridBayesNet_; +} + +} // namespace gtsam diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h new file mode 100644 index 0000000000..7e90f9425d --- /dev/null +++ b/gtsam/hybrid/HybridSmoother.h @@ -0,0 +1,73 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HybridSmoother.h + * @brief An incremental smoother for hybrid factor graphs + * @author Varun Agrawal + * @date October 2022 + */ + +#include +#include +#include + +namespace gtsam { + +class HybridSmoother { + private: + HybridBayesNet hybridBayesNet_; + HybridGaussianFactorGraph remainingFactorGraph_; + + public: + /** + * Given new factors, perform an incremental update. + * The relevant densities in the `hybridBayesNet` will be added to the input + * graph (fragment), and then eliminated according to the `ordering` + * presented. The remaining factor graph contains Gaussian mixture factors + * that are not connected to the variables in the ordering, or a single + * discrete factor on all discrete keys, plus all discrete factors in the + * original graph. + * + * \note If maxComponents is given, we look at the discrete factor resulting + * from this elimination, and prune it and the Gaussian components + * corresponding to the pruned choices. + * + * @param graph The new factors, should be linear only + * @param ordering The ordering for elimination, only continuous vars are + * allowed + * @param maxNrLeaves The maximum number of leaves in the new discrete factor, + * if applicable + */ + void update(HybridGaussianFactorGraph graph, const Ordering& ordering, + boost::optional maxNrLeaves = boost::none); + + /** + * @brief Add conditionals from previous timestep as part of liquefication. + * + * @param graph The new factor graph for the current time step. + * @param hybridBayesNet The hybrid bayes net containing all conditionals so + * far. + * @param ordering The elimination ordering. + * @return std::pair + */ + std::pair addConditionals( + const HybridGaussianFactorGraph& graph, + const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const; + + /// Get the Gaussian Mixture from the Bayes Net posterior at `index`. + GaussianMixture::shared_ptr gaussianMixture(size_t index) const; + + /// Return the Bayes Net posterior. + const HybridBayesNet& hybridBayesNet() const; +}; + +}; // namespace gtsam From 453870e43e736a5ab9d978ed11e5122d44538472 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 20 Oct 2022 16:49:22 -0400 Subject: [PATCH 06/10] unit test for end-2-end hybrid estimation --- gtsam/hybrid/tests/testHybridEstimation.cpp | 123 ++++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 gtsam/hybrid/tests/testHybridEstimation.cpp diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp new file mode 100644 index 0000000000..4ef245e223 --- /dev/null +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -0,0 +1,123 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridEstimation.cpp + * @brief Unit tests for end-to-end Hybrid Estimation + * @author Varun Agrawal + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Include for test suite +#include + +#include "Switching.h" + +using namespace std; +using namespace gtsam; + +using symbol_shorthand::X; + +Ordering getOrdering(HybridGaussianFactorGraph& factors, + const HybridGaussianFactorGraph& newFactors) { + factors += newFactors; + // Get all the discrete keys from the factors + KeySet allDiscrete = factors.discreteKeys(); + + // Create KeyVector with continuous keys followed by discrete keys. + KeyVector newKeysDiscreteLast; + const KeySet newFactorKeys = newFactors.keys(); + // Insert continuous keys first. + for (auto& k : newFactorKeys) { + if (!allDiscrete.exists(k)) { + newKeysDiscreteLast.push_back(k); + } + } + + // Insert discrete keys at the end + std::copy(allDiscrete.begin(), allDiscrete.end(), + std::back_inserter(newKeysDiscreteLast)); + + const VariableIndex index(factors); + + // Get an ordering where the new keys are eliminated last + Ordering ordering = Ordering::ColamdConstrainedLast( + index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), + true); + return ordering; +} + +/****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST(HybridNonlinearISAM, Incremental) { + size_t K = 10; + std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6}; + // Ground truth discrete seq + std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0}; + Switching switching(K, 1.0, 0.1, measurements); + // HybridNonlinearISAM smoother; + HybridSmoother smoother; + HybridNonlinearFactorGraph graph; + Values initial; + + // switching.nonlinearFactorGraph.print(); + // switching.linearizationPoint.print(); + // Add the X(1) prior + graph.push_back(switching.nonlinearFactorGraph.at(0)); + initial.insert(X(1), switching.linearizationPoint.at(X(1))); + + HybridGaussianFactorGraph linearized; + HybridGaussianFactorGraph bayesNet; + + for (size_t k = 1; k < K; k++) { + // Motion Model + graph.push_back(switching.nonlinearFactorGraph.at(k)); + // Measurement + graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); + + initial.insert(X(k + 1), switching.linearizationPoint.at(X(k + 1))); + + // std::cout << "\n============= " << k << std::endl; + // graph.print(); + + bayesNet = smoother.hybridBayesNet(); + linearized = *graph.linearize(initial); + Ordering ordering = getOrdering(bayesNet, linearized); + + ordering.print(); + smoother.update(linearized, ordering, 3); + // if (k == 2) exit(0); + // smoother.hybridBayesNet().print(); + graph.resize(0); + // initial.clear(); + } +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ From 58a15c3385bb7fa3b9b14f78a059d9e78c0a182b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 22 Oct 2022 18:29:05 -0400 Subject: [PATCH 07/10] remove extraneous tictoc print --- gtsam/hybrid/HybridSmoother.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 585ca53098..07a7a4e77a 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -47,8 +47,6 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, // Add the partial bayes net to the posterior bayes net. hybridBayesNet_.push_back(*bayesNetFragment); - - tictoc_print_(); } /* ************************************************************************* */ From 74d6f0548d72c1b632a89ab29b66f82b78e54951 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 22 Oct 2022 18:29:28 -0400 Subject: [PATCH 08/10] add assertions for HybridEstimation example --- gtsam/hybrid/tests/testHybridEstimation.cpp | 25 +++++++++++++-------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 4ef245e223..de978360e4 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -82,8 +82,6 @@ TEST(HybridNonlinearISAM, Incremental) { HybridNonlinearFactorGraph graph; Values initial; - // switching.nonlinearFactorGraph.print(); - // switching.linearizationPoint.print(); // Add the X(1) prior graph.push_back(switching.nonlinearFactorGraph.at(0)); initial.insert(X(1), switching.linearizationPoint.at(X(1))); @@ -92,6 +90,7 @@ TEST(HybridNonlinearISAM, Incremental) { HybridGaussianFactorGraph bayesNet; for (size_t k = 1; k < K; k++) { + std::cout << ">>>>>>>>>>>>>>>>>>> k=" << k << std::endl; // Motion Model graph.push_back(switching.nonlinearFactorGraph.at(k)); // Measurement @@ -99,20 +98,28 @@ TEST(HybridNonlinearISAM, Incremental) { initial.insert(X(k + 1), switching.linearizationPoint.at(X(k + 1))); - // std::cout << "\n============= " << k << std::endl; - // graph.print(); - bayesNet = smoother.hybridBayesNet(); linearized = *graph.linearize(initial); Ordering ordering = getOrdering(bayesNet, linearized); - ordering.print(); smoother.update(linearized, ordering, 3); - // if (k == 2) exit(0); - // smoother.hybridBayesNet().print(); graph.resize(0); - // initial.clear(); } + HybridValues delta = smoother.hybridBayesNet().optimize(); + + Values result = initial.retract(delta.continuous()); + + DiscreteValues expected_discrete; + for (size_t k = 0; k < K - 1; k++) { + expected_discrete[M(k + 1)] = discrete_seq[k]; + } + EXPECT(assert_equal(expected_discrete, delta.discrete())); + + Values expected_continuous; + for (size_t k = 0; k < K; k++) { + expected_continuous.insert(X(k + 1), measurements[k]); + } + EXPECT(assert_equal(expected_continuous, result)); } /* ************************************************************************* */ From 0b793997aca34ee701046744d9c14fe2c72e5c63 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 22 Oct 2022 18:29:49 -0400 Subject: [PATCH 09/10] simplify conversion of root discrete conditional --- gtsam/hybrid/HybridBayesTree.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index 266b295dd5..8fdedab44f 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -146,8 +146,8 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { /* ************************************************************************* */ void HybridBayesTree::prune(const size_t maxNrLeaves) { - auto decisionTree = boost::dynamic_pointer_cast( - this->roots_.at(0)->conditional()->inner()); + auto decisionTree = + this->roots_.at(0)->conditional()->asDiscreteConditional(); DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves); decisionTree->root_ = prunedDecisionTree.root_; From ebb29ef33d3551149f5fbe233caccf87639b20fe Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 22 Oct 2022 19:52:44 -0400 Subject: [PATCH 10/10] Make Switching test fixture 0-indexed to match iMHS paper --- gtsam/hybrid/tests/Switching.h | 42 ++-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 32 +-- gtsam/hybrid/tests/testHybridBayesTree.cpp | 8 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 10 +- .../tests/testHybridGaussianFactorGraph.cpp | 16 +- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 152 ++++++------- .../tests/testHybridNonlinearFactorGraph.cpp | 210 +++++++++--------- .../hybrid/tests/testHybridNonlinearISAM.cpp | 174 +++++++-------- 8 files changed, 327 insertions(+), 317 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index a1bad79bc0..f9e1916d07 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -134,26 +134,26 @@ struct Switching { Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1, std::vector measurements = {}) : K(K) { - // Create DiscreteKeys for binary K modes, modes[0] will not be used. - for (size_t k = 0; k <= K; k++) { + // Create DiscreteKeys for binary K modes. + for (size_t k = 0; k < K; k++) { modes.emplace_back(M(k), 2); } // If measurements are not provided, we just have the robot moving forward. if (measurements.size() == 0) { - for (size_t k = 1; k <= K; k++) { - measurements.push_back(k - 1); + for (size_t k = 0; k < K; k++) { + measurements.push_back(k); } } // Create hybrid factor graph. // Add a prior on X(1). auto prior = boost::make_shared>( - X(1), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma)); + X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma)); nonlinearFactorGraph.push_nonlinear(prior); // Add "motion models". - for (size_t k = 1; k < K; k++) { + for (size_t k = 0; k < K - 1; k++) { KeyVector keys = {X(k), X(k + 1)}; auto motion_models = motionModels(k, between_sigma); std::vector components; @@ -166,17 +166,17 @@ struct Switching { // Add measurement factors auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma); - for (size_t k = 2; k <= K; k++) { + for (size_t k = 1; k < K; k++) { nonlinearFactorGraph.emplace_nonlinear>( - X(k), measurements.at(k - 1), measurement_noise); + X(k), measurements.at(k), measurement_noise); } // Add "mode chain" addModeChain(&nonlinearFactorGraph); // Create the linearization point. - for (size_t k = 1; k <= K; k++) { - linearizationPoint.insert(X(k), static_cast(k)); + for (size_t k = 0; k < K; k++) { + linearizationPoint.insert(X(k), static_cast(k + 1)); } // The ground truth is robot moving forward @@ -195,11 +195,16 @@ struct Switching { return {still, moving}; } - // Add "mode chain" to HybridNonlinearFactorGraph + /** + * @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-2). + * E.g. if K=4, we want M0, M1 and M2. + * + * @param fg The nonlinear factor graph to which the mode chain is added. + */ void addModeChain(HybridNonlinearFactorGraph *fg) { - auto prior = boost::make_shared(modes[1], "1/1"); + auto prior = boost::make_shared(modes[0], "1/1"); fg->push_discrete(prior); - for (size_t k = 1; k < K - 1; k++) { + for (size_t k = 0; k < K - 2; k++) { auto parents = {modes[k]}; auto conditional = boost::make_shared( modes[k + 1], parents, "1/2 3/2"); @@ -207,11 +212,16 @@ struct Switching { } } - // Add "mode chain" to HybridGaussianFactorGraph + /** + * @brief Add "mode chain" to HybridGaussianFactorGraph from M(0) to M(K-2). + * E.g. if K=4, we want M0, M1 and M2. + * + * @param fg The gaussian factor graph to which the mode chain is added. + */ void addModeChain(HybridGaussianFactorGraph *fg) { - auto prior = boost::make_shared(modes[1], "1/1"); + auto prior = boost::make_shared(modes[0], "1/1"); fg->push_discrete(prior); - for (size_t k = 1; k < K - 1; k++) { + for (size_t k = 0; k < K - 2; k++) { auto parents = {modes[k]}; auto conditional = boost::make_shared( modes[k + 1], parents, "1/2 3/2"); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index fc353f9c1f..e1fe724695 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -82,9 +82,9 @@ TEST(HybridBayesNet, Choose) { s.linearizedFactorGraph.eliminatePartialSequential(ordering); DiscreteValues assignment; + assignment[M(0)] = 1; assignment[M(1)] = 1; - assignment[M(2)] = 1; - assignment[M(3)] = 0; + assignment[M(2)] = 0; GaussianBayesNet gbn = hybridBayesNet->choose(assignment); @@ -120,20 +120,20 @@ TEST(HybridBayesNet, OptimizeAssignment) { s.linearizedFactorGraph.eliminatePartialSequential(ordering); DiscreteValues assignment; + assignment[M(0)] = 1; assignment[M(1)] = 1; assignment[M(2)] = 1; - assignment[M(3)] = 1; VectorValues delta = hybridBayesNet->optimize(assignment); // The linearization point has the same value as the key index, - // e.g. X(1) = 1, X(2) = 2, + // e.g. X(0) = 1, X(1) = 2, // but the factors specify X(k) = k-1, so delta should be -1. VectorValues expected_delta; + expected_delta.insert(make_pair(X(0), -Vector1::Ones())); expected_delta.insert(make_pair(X(1), -Vector1::Ones())); expected_delta.insert(make_pair(X(2), -Vector1::Ones())); expected_delta.insert(make_pair(X(3), -Vector1::Ones())); - expected_delta.insert(make_pair(X(4), -Vector1::Ones())); EXPECT(assert_equal(expected_delta, delta)); } @@ -150,16 +150,16 @@ TEST(HybridBayesNet, Optimize) { HybridValues delta = hybridBayesNet->optimize(); DiscreteValues expectedAssignment; - expectedAssignment[M(1)] = 1; - expectedAssignment[M(2)] = 0; - expectedAssignment[M(3)] = 1; + expectedAssignment[M(0)] = 1; + expectedAssignment[M(1)] = 0; + expectedAssignment[M(2)] = 1; EXPECT(assert_equal(expectedAssignment, delta.discrete())); VectorValues expectedValues; - expectedValues.insert(X(1), -0.999904 * Vector1::Ones()); - expectedValues.insert(X(2), -0.99029 * Vector1::Ones()); - expectedValues.insert(X(3), -1.00971 * Vector1::Ones()); - expectedValues.insert(X(4), -1.0001 * Vector1::Ones()); + expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); + expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); + expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); + expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); } @@ -175,10 +175,10 @@ TEST(HybridBayesNet, OptimizeMultifrontal) { HybridValues delta = hybridBayesTree->optimize(); VectorValues expectedValues; - expectedValues.insert(X(1), -0.999904 * Vector1::Ones()); - expectedValues.insert(X(2), -0.99029 * Vector1::Ones()); - expectedValues.insert(X(3), -1.00971 * Vector1::Ones()); - expectedValues.insert(X(4), -1.0001 * Vector1::Ones()); + expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); + expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); + expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); + expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); } diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 0908b8cb59..876c550cb0 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -60,9 +60,9 @@ TEST(HybridBayesTree, OptimizeAssignment) { isam.update(graph1); DiscreteValues assignment; + assignment[M(0)] = 1; assignment[M(1)] = 1; assignment[M(2)] = 1; - assignment[M(3)] = 1; VectorValues delta = isam.optimize(assignment); @@ -70,16 +70,16 @@ TEST(HybridBayesTree, OptimizeAssignment) { // e.g. X(1) = 1, X(2) = 2, // but the factors specify X(k) = k-1, so delta should be -1. VectorValues expected_delta; + expected_delta.insert(make_pair(X(0), -Vector1::Ones())); expected_delta.insert(make_pair(X(1), -Vector1::Ones())); expected_delta.insert(make_pair(X(2), -Vector1::Ones())); expected_delta.insert(make_pair(X(3), -Vector1::Ones())); - expected_delta.insert(make_pair(X(4), -Vector1::Ones())); EXPECT(assert_equal(expected_delta, delta)); // Create ordering. Ordering ordering; - for (size_t k = 1; k <= s.K; k++) ordering += X(k); + for (size_t k = 0; k < s.K; k++) ordering += X(k); HybridBayesNet::shared_ptr hybridBayesNet; HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; @@ -123,7 +123,7 @@ TEST(HybridBayesTree, Optimize) { // Create ordering. Ordering ordering; - for (size_t k = 1; k <= s.K; k++) ordering += X(k); + for (size_t k = 0; k < s.K; k++) ordering += X(k); HybridBayesNet::shared_ptr hybridBayesNet; HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index de978360e4..6be7566ae0 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -82,9 +82,9 @@ TEST(HybridNonlinearISAM, Incremental) { HybridNonlinearFactorGraph graph; Values initial; - // Add the X(1) prior + // Add the X(0) prior graph.push_back(switching.nonlinearFactorGraph.at(0)); - initial.insert(X(1), switching.linearizationPoint.at(X(1))); + initial.insert(X(0), switching.linearizationPoint.at(X(0))); HybridGaussianFactorGraph linearized; HybridGaussianFactorGraph bayesNet; @@ -96,7 +96,7 @@ TEST(HybridNonlinearISAM, Incremental) { // Measurement graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); - initial.insert(X(k + 1), switching.linearizationPoint.at(X(k + 1))); + initial.insert(X(k), switching.linearizationPoint.at(X(k))); bayesNet = smoother.hybridBayesNet(); linearized = *graph.linearize(initial); @@ -111,13 +111,13 @@ TEST(HybridNonlinearISAM, Incremental) { DiscreteValues expected_discrete; for (size_t k = 0; k < K - 1; k++) { - expected_discrete[M(k + 1)] = discrete_seq[k]; + expected_discrete[M(k)] = discrete_seq[k]; } EXPECT(assert_equal(expected_discrete, delta.discrete())); Values expected_continuous; for (size_t k = 0; k < K; k++) { - expected_continuous.insert(X(k + 1), measurements[k]); + expected_continuous.insert(X(k), measurements[k]); } EXPECT(assert_equal(expected_continuous, result)); } diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index d199d76113..ed6b97ab04 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -531,34 +531,34 @@ TEST(HybridGaussianFactorGraph, Conditionals) { hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) Ordering ordering; - ordering.push_back(X(1)); + ordering.push_back(X(0)); HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering); hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) hfg.push_back(*bayes_net); hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + ordering.push_back(X(1)); ordering.push_back(X(2)); - ordering.push_back(X(3)); + ordering.push_back(M(0)); ordering.push_back(M(1)); - ordering.push_back(M(2)); bayes_net = hfg.eliminateSequential(ordering); HybridValues result = bayes_net->optimize(); Values expected_continuous; - expected_continuous.insert(X(1), 0); - expected_continuous.insert(X(2), 1); - expected_continuous.insert(X(3), 2); - expected_continuous.insert(X(4), 4); + expected_continuous.insert(X(0), 0); + expected_continuous.insert(X(1), 1); + expected_continuous.insert(X(2), 2); + expected_continuous.insert(X(3), 4); Values result_continuous = switching.linearizationPoint.retract(result.continuous()); EXPECT(assert_equal(expected_continuous, result_continuous)); DiscreteValues expected_discrete; + expected_discrete[M(0)] = 1; expected_discrete[M(1)] = 1; - expected_discrete[M(2)] = 1; EXPECT(assert_equal(expected_discrete, result.discrete())); } diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index a5e3903d94..18ce7f10ec 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -54,40 +54,40 @@ TEST(HybridGaussianElimination, IncrementalElimination) { // Create initial factor graph // * * * // | | | - // X1 -*- X2 -*- X3 - // \*-M1-*/ - graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) - graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) - graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) - graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + // X0 -*- X1 -*- X2 + // \*-M0-*/ + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0) + graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0) // Run update step isam.update(graph1); - // Check that after update we have 3 hybrid Bayes net nodes: - // P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2) + // Check that after update we have 2 hybrid Bayes net nodes: + // P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1) EXPECT_LONGS_EQUAL(3, isam.size()); - EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)}); - EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)})); - EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)})); - EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)})); + EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector{X(0)}); + EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)})); + EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)})); + EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)})); /********************************************************/ // New factor graph for incremental update. HybridGaussianFactorGraph graph2; - graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) - graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) - graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1) isam.update(graph2); // Check that after the second update we have // 1 additional hybrid Bayes net node: - // P(X2, X3 | M1, M2) + // P(X1, X2 | M0, M1) EXPECT_LONGS_EQUAL(3, isam.size()); - EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)})); - EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)})); + EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)})); + EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)})); } /* ****************************************************************************/ @@ -100,104 +100,104 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Create initial factor graph // * * * // | | | - // X1 -*- X2 -*- X3 + // X0 -*- X1 -*- X2 // | | - // *-M1 - * - M2 - graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1) - graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1) - graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2) - graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1) + // *-M0 - * - M1 + graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0) + graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0) + graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1) + graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0) // Run update step isam.update(graph1); - auto discreteConditional_m1 = - isam[M(1)]->conditional()->asDiscreteConditional(); - EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + auto discreteConditional_m0 = + isam[M(0)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)})); /********************************************************/ // New factor graph for incremental update. HybridGaussianFactorGraph graph2; - graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2) - graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3) - graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2) + graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1) + graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2) + graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1) isam.update(graph2); /********************************************************/ // Run batch elimination so we can compare results. Ordering ordering; + ordering += X(0); ordering += X(1); ordering += X(2); - ordering += X(3); - // Now we calculate the actual factors using full elimination + // Now we calculate the expected factors using full elimination HybridBayesTree::shared_ptr expectedHybridBayesTree; HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; std::tie(expectedHybridBayesTree, expectedRemainingGraph) = switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + // The densities on X(0) should be the same + auto x0_conditional = + dynamic_pointer_cast(isam[X(0)]->conditional()->inner()); + auto expected_x0_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); + EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); + // The densities on X(1) should be the same auto x1_conditional = dynamic_pointer_cast(isam[X(1)]->conditional()->inner()); - auto actual_x1_conditional = dynamic_pointer_cast( + auto expected_x1_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); - EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); + EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = dynamic_pointer_cast(isam[X(2)]->conditional()->inner()); - auto actual_x2_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); - EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); - - // The densities on X(3) should be the same - auto x3_conditional = - dynamic_pointer_cast(isam[X(3)]->conditional()->inner()); - auto actual_x3_conditional = dynamic_pointer_cast( + auto expected_x2_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); - EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); + EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); // We only perform manual continuous elimination for 0,0. // The other discrete probabilities on M(2) are calculated the same way Ordering discrete_ordering; + discrete_ordering += M(0); discrete_ordering += M(1); - discrete_ordering += M(2); HybridBayesTree::shared_ptr discreteBayesTree = expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); DiscreteValues m00; - m00[M(1)] = 0, m00[M(2)] = 0; + m00[M(0)] = 0, m00[M(1)] = 0; DiscreteConditional decisionTree = - *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + *(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional(); double m00_prob = decisionTree(m00); - auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional(); + auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); // Test if the probability values are as expected with regression tests. DiscreteValues assignment; EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(0)] = 0; assignment[M(1)] = 0; - assignment[M(2)] = 0; EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); - assignment[M(1)] = 1; - assignment[M(2)] = 0; - EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(0)] = 1; assignment[M(1)] = 0; - assignment[M(2)] = 1; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(0)] = 0; + assignment[M(1)] = 1; EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(0)] = 1; assignment[M(1)] = 1; - assignment[M(2)] = 1; EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); auto expectedConditional = dynamic_pointer_cast( - (*expectedChordal)[M(2)]->conditional()->inner()); + (*expectedChordal)[M(1)]->conditional()->inner()); auto actualConditional = dynamic_pointer_cast( - isam[M(2)]->conditional()->inner()); + isam[M(1)]->conditional()->inner()); EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); } @@ -208,13 +208,13 @@ TEST(HybridGaussianElimination, Approx_inference) { HybridGaussianISAM incrementalHybrid; HybridGaussianFactorGraph graph1; - // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + // Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3 for (size_t i = 1; i < 4; i++) { graph1.push_back(switching.linearizedFactorGraph.at(i)); } - // Add the Gaussian factors, 1 prior on X(1), - // 3 measurements on X(2), X(3), X(4) + // Add the Gaussian factors, 1 prior on X(0), + // 3 measurements on X(1), X(2), X(3) graph1.push_back(switching.linearizedFactorGraph.at(0)); for (size_t i = 4; i <= 7; i++) { graph1.push_back(switching.linearizedFactorGraph.at(i)); @@ -222,7 +222,7 @@ TEST(HybridGaussianElimination, Approx_inference) { // Create ordering. Ordering ordering; - for (size_t j = 1; j <= 4; j++) { + for (size_t j = 0; j < 4; j++) { ordering += X(j); } @@ -271,26 +271,26 @@ TEST(HybridGaussianElimination, Approx_inference) { 1 1 1 Leaf 0.5 */ - auto discreteConditional_m1 = *dynamic_pointer_cast( - incrementalHybrid[M(1)]->conditional()->inner()); - EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); + auto discreteConditional_m0 = *dynamic_pointer_cast( + incrementalHybrid[M(0)]->conditional()->inner()); + EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)})); // Get the number of elements which are greater than 0. auto count = [](const double &value, int count) { return value > 0 ? count + 1 : count; }; // Check that the number of leaves after pruning is 5. - EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); + EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0)); // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. auto &unprunedLastDensity = *dynamic_pointer_cast( - unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); auto &lastDensity = *dynamic_pointer_cast( - incrementalHybrid[X(4)]->conditional()->inner()); + incrementalHybrid[X(3)]->conditional()->inner()); std::vector> assignments = - discreteConditional_m1.enumerate(); + discreteConditional_m0.enumerate(); // Loop over all assignments and check the pruned components for (auto &&av : assignments) { const DiscreteValues &assignment = av.first; @@ -314,13 +314,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) { HybridGaussianFactorGraph graph1; /***** Run Round 1 *****/ - // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + // Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3 for (size_t i = 1; i < 4; i++) { graph1.push_back(switching.linearizedFactorGraph.at(i)); } - // Add the Gaussian factors, 1 prior on X(1), - // 3 measurements on X(2), X(3), X(4) + // Add the Gaussian factors, 1 prior on X(0), + // 3 measurements on X(1), X(2), X(3) graph1.push_back(switching.linearizedFactorGraph.at(0)); for (size_t i = 5; i <= 7; i++) { graph1.push_back(switching.linearizedFactorGraph.at(i)); @@ -335,13 +335,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) { // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, incrementalHybrid.size()); EXPECT_LONGS_EQUAL( - 2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); + 2, incrementalHybrid[X(0)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 3, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); + 3, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); /***** Run Round 2 *****/ HybridGaussianFactorGraph graph2; @@ -356,9 +356,9 @@ TEST(HybridGaussianElimination, Incremental_approximate) { // with 5 (pruned) leaves. CHECK_EQUAL(5, incrementalHybrid.size()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents()); + 5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents()); } /* ************************************************************************/ @@ -370,7 +370,7 @@ TEST(HybridGaussianISAM, NonTrivial) { /*************** Run Round 1 ***************/ HybridNonlinearFactorGraph fg; - // Add a prior on pose x1 at the origin. + // Add a prior on pose x0 at the origin. // A prior factor consists of a mean and // a noise model (covariance matrix) Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 9e93eaba33..f6889f132c 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -263,7 +263,7 @@ TEST(HybridFactorGraph, EliminationTree) { // Create ordering. Ordering ordering; - for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 0; k < self.K; k++) ordering += X(k); // Create elimination tree. HybridEliminationTree etree(self.linearizedFactorGraph, ordering); @@ -271,9 +271,9 @@ TEST(HybridFactorGraph, EliminationTree) { } /**************************************************************************** - *Test elimination function by eliminating x1 in *-x1-*-x2 graph. + *Test elimination function by eliminating x0 in *-x0-*-x1 graph. */ -TEST(GaussianElimination, Eliminate_x1) { +TEST(GaussianElimination, Eliminate_x0) { Switching self(3); // Gather factors on x1, has a simple Gaussian and a mixture factor. @@ -283,9 +283,9 @@ TEST(GaussianElimination, Eliminate_x1) { // Add first hybrid factor factors.push_back(self.linearizedFactorGraph[1]); - // Eliminate x1 + // Eliminate x0 Ordering ordering; - ordering += X(1); + ordering += X(0); auto result = EliminateHybrid(factors, ordering); CHECK(result.first); @@ -296,20 +296,20 @@ TEST(GaussianElimination, Eliminate_x1) { } /**************************************************************************** - * Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain. - * m1/ \m2 + * Test elimination function by eliminating x1 in x0-*-x1-*-x2 chain. + * m0/ \m1 */ -TEST(HybridsGaussianElimination, Eliminate_x2) { +TEST(HybridsGaussianElimination, Eliminate_x1) { Switching self(3); - // Gather factors on x2, will be two mixture factors (with x1 and x3, resp.). + // Gather factors on x1, will be two mixture factors (with x0 and x2, resp.). HybridGaussianFactorGraph factors; - factors.push_back(self.linearizedFactorGraph[1]); // involves m1 - factors.push_back(self.linearizedFactorGraph[2]); // involves m2 + factors.push_back(self.linearizedFactorGraph[1]); // involves m0 + factors.push_back(self.linearizedFactorGraph[2]); // involves m1 - // Eliminate x2 + // Eliminate x1 Ordering ordering; - ordering += X(2); + ordering += X(1); std::pair result = EliminateHybrid(factors, ordering); @@ -326,28 +326,28 @@ TEST(HybridsGaussianElimination, Eliminate_x2) { GaussianFactorGraph::shared_ptr batchGFG(double between, Values linearizationPoint) { NonlinearFactorGraph graph; - graph.addPrior(X(1), 0, Isotropic::Sigma(1, 0.1)); + graph.addPrior(X(0), 0, Isotropic::Sigma(1, 0.1)); - auto between_x1_x2 = boost::make_shared( - X(1), X(2), between, Isotropic::Sigma(1, 1.0)); + auto between_x0_x1 = boost::make_shared( + X(0), X(1), between, Isotropic::Sigma(1, 1.0)); - graph.push_back(between_x1_x2); + graph.push_back(between_x0_x1); return graph.linearize(linearizationPoint); } /**************************************************************************** - * Test elimination function by eliminating x1 and x2 in graph. + * Test elimination function by eliminating x0 and x1 in graph. */ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { Switching self(2, 1.0, 0.1); auto factors = self.linearizedFactorGraph; - // Eliminate x1 + // Eliminate x0 Ordering ordering; + ordering += X(0); ordering += X(1); - ordering += X(2); HybridConditional::shared_ptr hybridConditionalMixture; HybridFactor::shared_ptr factorOnModes; @@ -359,7 +359,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { dynamic_pointer_cast(hybridConditionalMixture->inner()); CHECK(gaussianConditionalMixture); - // Frontals = [x1, x2] + // Frontals = [x0, x1] EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals()); // 1 parent, which is the mode EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents()); @@ -387,7 +387,7 @@ TEST(HybridFactorGraph, Partial_Elimination) { // Create ordering. Ordering ordering; - for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 0; k < self.K; k++) ordering += X(k); // Eliminate partially. HybridBayesNet::shared_ptr hybridBayesNet; @@ -397,18 +397,18 @@ TEST(HybridFactorGraph, Partial_Elimination) { CHECK(hybridBayesNet); EXPECT_LONGS_EQUAL(3, hybridBayesNet->size()); - EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); - EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); - EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); - EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); - EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); - EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)})); + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)})); + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)})); CHECK(remainingFactorGraph); EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size()); - EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)})); - EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)})); - EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)})); + EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(0)})); + EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(1), M(0)})); + EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)})); } /**************************************************************************** @@ -427,7 +427,7 @@ TEST(HybridFactorGraph, Full_Elimination) { { // Create ordering. Ordering ordering; - for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 0; k < self.K; k++) ordering += X(k); // Eliminate partially. std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) = @@ -440,15 +440,15 @@ TEST(HybridFactorGraph, Full_Elimination) { discrete_fg.push_back(df->inner()); } ordering.clear(); - for (size_t k = 1; k < self.K; k++) ordering += M(k); + for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); discreteBayesNet = *discrete_fg.eliminateSequential(ordering, EliminateForMPE); } // Create ordering. Ordering ordering; - for (size_t k = 1; k <= self.K; k++) ordering += X(k); - for (size_t k = 1; k < self.K; k++) ordering += M(k); + for (size_t k = 0; k < self.K; k++) ordering += X(k); + for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); // Eliminate partially. HybridBayesNet::shared_ptr hybridBayesNet = @@ -456,23 +456,23 @@ TEST(HybridFactorGraph, Full_Elimination) { CHECK(hybridBayesNet); EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); - // p(x1 | x2, m1) - EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)}); - EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)})); - // p(x2 | x3, m1, m2) - EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)}); - EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)})); - // p(x3 | m1, m2) - EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)}); - EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)})); - // P(m1 | m2) - EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)}); - EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)})); + // p(x0 | x1, m0) + EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)}); + EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)})); + // p(x1 | x2, m0, m1) + EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)}); + EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)})); + // p(x2 | m0, m1) + EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)}); + EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)})); + // P(m0 | m1) + EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(0)}); + EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(1)})); EXPECT( dynamic_pointer_cast(hybridBayesNet->at(3)->inner()) ->equals(*discreteBayesNet.at(0))); - // P(m2) - EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)}); + // P(m1) + EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(1)}); EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents()); EXPECT( dynamic_pointer_cast(hybridBayesNet->at(4)->inner()) @@ -489,7 +489,7 @@ TEST(HybridFactorGraph, Printing) { // Create ordering. Ordering ordering; - for (size_t k = 1; k <= self.K; k++) ordering += X(k); + for (size_t k = 0; k < self.K; k++) ordering += X(k); // Eliminate partially. HybridBayesNet::shared_ptr hybridBayesNet; @@ -499,84 +499,84 @@ TEST(HybridFactorGraph, Printing) { string expected_hybridFactorGraph = R"( size: 7 -factor 0: Continuous [x1] +factor 0: Continuous [x0] - A[x1] = [ + A[x0] = [ 10 ] b = [ -10 ] No noise model -factor 1: Hybrid [x1 x2; m1]{ - Choice(m1) +factor 1: Hybrid [x0 x1; m0]{ + Choice(m0) 0 Leaf : - A[x1] = [ + A[x0] = [ -1 ] - A[x2] = [ + A[x1] = [ 1 ] b = [ -1 ] No noise model 1 Leaf : - A[x1] = [ + A[x0] = [ -1 ] - A[x2] = [ + A[x1] = [ 1 ] b = [ -0 ] No noise model } -factor 2: Hybrid [x2 x3; m2]{ - Choice(m2) +factor 2: Hybrid [x1 x2; m1]{ + Choice(m1) 0 Leaf : - A[x2] = [ + A[x1] = [ -1 ] - A[x3] = [ + A[x2] = [ 1 ] b = [ -1 ] No noise model 1 Leaf : - A[x2] = [ + A[x1] = [ -1 ] - A[x3] = [ + A[x2] = [ 1 ] b = [ -0 ] No noise model } -factor 3: Continuous [x2] +factor 3: Continuous [x1] - A[x2] = [ + A[x1] = [ 10 ] b = [ -10 ] No noise model -factor 4: Continuous [x3] +factor 4: Continuous [x2] - A[x3] = [ + A[x2] = [ 10 ] b = [ -10 ] No noise model -factor 5: Discrete [m1] - P( m1 ): +factor 5: Discrete [m0] + P( m0 ): Leaf 0.5 -factor 6: Discrete [m2 m1] - P( m2 | m1 ): - Choice(m2) - 0 Choice(m1) +factor 6: Discrete [m1 m0] + P( m1 | m0 ): + Choice(m1) + 0 Choice(m0) 0 0 Leaf 0.33333333 0 1 Leaf 0.6 - 1 Choice(m1) + 1 Choice(m0) 1 0 Leaf 0.66666667 1 1 Leaf 0.4 @@ -586,71 +586,71 @@ factor 6: Discrete [m2 m1] // Expected output for hybridBayesNet. string expected_hybridBayesNet = R"( size: 3 -factor 0: Hybrid P( x1 | x2 m1) - Discrete Keys = (m1, 2), - Choice(m1) - 0 Leaf p(x1 | x2) +factor 0: Hybrid P( x0 | x1 m0) + Discrete Keys = (m0, 2), + Choice(m0) + 0 Leaf p(x0 | x1) R = [ 10.0499 ] - S[x2] = [ -0.0995037 ] + S[x1] = [ -0.0995037 ] d = [ -9.85087 ] No noise model - 1 Leaf p(x1 | x2) + 1 Leaf p(x0 | x1) R = [ 10.0499 ] - S[x2] = [ -0.0995037 ] + S[x1] = [ -0.0995037 ] d = [ -9.95037 ] No noise model -factor 1: Hybrid P( x2 | x3 m1 m2) - Discrete Keys = (m1, 2), (m2, 2), - Choice(m2) - 0 Choice(m1) - 0 0 Leaf p(x2 | x3) +factor 1: Hybrid P( x1 | x2 m0 m1) + Discrete Keys = (m0, 2), (m1, 2), + Choice(m1) + 0 Choice(m0) + 0 0 Leaf p(x1 | x2) R = [ 10.099 ] - S[x3] = [ -0.0990196 ] + S[x2] = [ -0.0990196 ] d = [ -9.99901 ] No noise model - 0 1 Leaf p(x2 | x3) + 0 1 Leaf p(x1 | x2) R = [ 10.099 ] - S[x3] = [ -0.0990196 ] + S[x2] = [ -0.0990196 ] d = [ -9.90098 ] No noise model - 1 Choice(m1) - 1 0 Leaf p(x2 | x3) + 1 Choice(m0) + 1 0 Leaf p(x1 | x2) R = [ 10.099 ] - S[x3] = [ -0.0990196 ] + S[x2] = [ -0.0990196 ] d = [ -10.098 ] No noise model - 1 1 Leaf p(x2 | x3) + 1 1 Leaf p(x1 | x2) R = [ 10.099 ] - S[x3] = [ -0.0990196 ] + S[x2] = [ -0.0990196 ] d = [ -10 ] No noise model -factor 2: Hybrid P( x3 | m1 m2) - Discrete Keys = (m1, 2), (m2, 2), - Choice(m2) - 0 Choice(m1) - 0 0 Leaf p(x3) +factor 2: Hybrid P( x2 | m0 m1) + Discrete Keys = (m0, 2), (m1, 2), + Choice(m1) + 0 Choice(m0) + 0 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1489 ] No noise model - 0 1 Leaf p(x3) + 0 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.1479 ] No noise model - 1 Choice(m1) - 1 0 Leaf p(x3) + 1 Choice(m0) + 1 0 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0504 ] No noise model - 1 1 Leaf p(x3) + 1 1 Leaf p(x2) R = [ 10.0494 ] d = [ -10.0494 ] No noise model @@ -669,7 +669,7 @@ factor 2: Hybrid P( x3 | m1 m2) TEST(HybridFactorGraph, DefaultDecisionTree) { HybridNonlinearFactorGraph fg; - // Add a prior on pose x1 at the origin. + // Add a prior on pose x0 at the origin. // A prior factor consists of a mean and a noise model (covariance matrix) Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin auto priorNoise = noiseModel::Diagonal::Sigmas( diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index fbb114ef32..3bdb5ed1e0 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -55,47 +55,47 @@ TEST(HybridNonlinearISAM, IncrementalElimination) { // Create initial factor graph // * * * // | | | - // X1 -*- X2 -*- X3 - // \*-M1-*/ - graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1) - graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1) - graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2) - graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1) + // X0 -*- X1 -*- X2 + // \*-M0-*/ + graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0) + graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0) + graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1) + graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0) - initial.insert(X(1), 1); - initial.insert(X(2), 2); - initial.insert(X(3), 3); + initial.insert(X(0), 1); + initial.insert(X(1), 2); + initial.insert(X(2), 3); // Run update step isam.update(graph1, initial); // Check that after update we have 3 hybrid Bayes net nodes: - // P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2) + // P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1) HybridGaussianISAM bayesTree = isam.bayesTree(); EXPECT_LONGS_EQUAL(3, bayesTree.size()); - EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector{X(1)}); - EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)})); - EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)})); - EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)})); + EXPECT(bayesTree[X(0)]->conditional()->frontals() == KeyVector{X(0)}); + EXPECT(bayesTree[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)})); + EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)})); + EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)})); /********************************************************/ // New factor graph for incremental update. HybridNonlinearFactorGraph graph2; initial = Values(); - graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2) - graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3) - graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2) + graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1) + graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2) + graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1) isam.update(graph2, initial); bayesTree = isam.bayesTree(); // Check that after the second update we have // 1 additional hybrid Bayes net node: - // P(X2, X3 | M1, M2) + // P(X1, X2 | M0, M1) EXPECT_LONGS_EQUAL(3, bayesTree.size()); - EXPECT(bayesTree[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)})); - EXPECT(bayesTree[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)})); + EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)})); + EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)})); } /* ****************************************************************************/ @@ -109,35 +109,35 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // Create initial factor graph // * * * // | | | - // X1 -*- X2 -*- X3 + // X0 -*- X1 -*- X2 // | | - // *-M1 - * - M2 - graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1) - graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1) - graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2) - graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1) + // *-M0 - * - M1 + graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0) + graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0) + graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1) + graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0) - initial.insert(X(1), 1); - initial.insert(X(2), 2); + initial.insert(X(0), 1); + initial.insert(X(1), 2); // Run update step isam.update(graph1, initial); HybridGaussianISAM bayesTree = isam.bayesTree(); - auto discreteConditional_m1 = - bayesTree[M(1)]->conditional()->asDiscreteConditional(); - EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)})); + auto discreteConditional_m0 = + bayesTree[M(0)]->conditional()->asDiscreteConditional(); + EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)})); /********************************************************/ // New factor graph for incremental update. HybridNonlinearFactorGraph graph2; initial = Values(); - initial.insert(X(3), 3); + initial.insert(X(2), 3); - graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2) - graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3) - graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2) + graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1) + graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2) + graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1) isam.update(graph2, initial); bayesTree = isam.bayesTree(); @@ -145,9 +145,9 @@ TEST(HybridNonlinearISAM, IncrementalInference) { /********************************************************/ // Run batch elimination so we can compare results. Ordering ordering; + ordering += X(0); ordering += X(1); ordering += X(2); - ordering += X(3); // Now we calculate the actual factors using full elimination HybridBayesTree::shared_ptr expectedHybridBayesTree; @@ -155,67 +155,67 @@ TEST(HybridNonlinearISAM, IncrementalInference) { std::tie(expectedHybridBayesTree, expectedRemainingGraph) = switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + // The densities on X(1) should be the same + auto x0_conditional = dynamic_pointer_cast( + bayesTree[X(0)]->conditional()->inner()); + auto expected_x0_conditional = dynamic_pointer_cast( + (*expectedHybridBayesTree)[X(0)]->conditional()->inner()); + EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional)); + // The densities on X(1) should be the same auto x1_conditional = dynamic_pointer_cast( bayesTree[X(1)]->conditional()->inner()); - auto actual_x1_conditional = dynamic_pointer_cast( + auto expected_x1_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(1)]->conditional()->inner()); - EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional)); + EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional)); // The densities on X(2) should be the same auto x2_conditional = dynamic_pointer_cast( bayesTree[X(2)]->conditional()->inner()); - auto actual_x2_conditional = dynamic_pointer_cast( - (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); - EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional)); - - // The densities on X(3) should be the same - auto x3_conditional = dynamic_pointer_cast( - bayesTree[X(3)]->conditional()->inner()); - auto actual_x3_conditional = dynamic_pointer_cast( + auto expected_x2_conditional = dynamic_pointer_cast( (*expectedHybridBayesTree)[X(2)]->conditional()->inner()); - EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional)); + EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional)); // We only perform manual continuous elimination for 0,0. - // The other discrete probabilities on M(2) are calculated the same way + // The other discrete probabilities on M(1) are calculated the same way Ordering discrete_ordering; + discrete_ordering += M(0); discrete_ordering += M(1); - discrete_ordering += M(2); HybridBayesTree::shared_ptr discreteBayesTree = expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); DiscreteValues m00; - m00[M(1)] = 0, m00[M(2)] = 0; + m00[M(0)] = 0, m00[M(1)] = 0; DiscreteConditional decisionTree = - *(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional(); + *(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional(); double m00_prob = decisionTree(m00); auto discreteConditional = - bayesTree[M(2)]->conditional()->asDiscreteConditional(); + bayesTree[M(1)]->conditional()->asDiscreteConditional(); // Test if the probability values are as expected with regression tests. DiscreteValues assignment; EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + assignment[M(0)] = 0; assignment[M(1)] = 0; - assignment[M(2)] = 0; EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); - assignment[M(1)] = 1; - assignment[M(2)] = 0; - EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(0)] = 1; assignment[M(1)] = 0; - assignment[M(2)] = 1; + EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); + assignment[M(0)] = 0; + assignment[M(1)] = 1; EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5)); + assignment[M(0)] = 1; assignment[M(1)] = 1; - assignment[M(2)] = 1; EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5)); // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); auto expectedConditional = dynamic_pointer_cast( - (*expectedChordal)[M(2)]->conditional()->inner()); + (*expectedChordal)[M(1)]->conditional()->inner()); auto actualConditional = dynamic_pointer_cast( - bayesTree[M(2)]->conditional()->inner()); + bayesTree[M(1)]->conditional()->inner()); EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); } @@ -227,22 +227,22 @@ TEST(HybridNonlinearISAM, Approx_inference) { HybridNonlinearFactorGraph graph1; Values initial; - // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + // Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3 for (size_t i = 1; i < 4; i++) { graph1.push_back(switching.nonlinearFactorGraph.at(i)); } - // Add the Gaussian factors, 1 prior on X(1), - // 3 measurements on X(2), X(3), X(4) + // Add the Gaussian factors, 1 prior on X(0), + // 3 measurements on X(1), X(2), X(3) graph1.push_back(switching.nonlinearFactorGraph.at(0)); for (size_t i = 4; i <= 7; i++) { graph1.push_back(switching.nonlinearFactorGraph.at(i)); - initial.insert(X(i - 3), i - 3); + initial.insert(X(i - 4), i - 3); } // Create ordering. Ordering ordering; - for (size_t j = 1; j <= 4; j++) { + for (size_t j = 0; j < 4; j++) { ordering += X(j); } @@ -292,26 +292,26 @@ TEST(HybridNonlinearISAM, Approx_inference) { 1 1 1 Leaf 0.5 */ - auto discreteConditional_m1 = *dynamic_pointer_cast( - bayesTree[M(1)]->conditional()->inner()); - EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)})); + auto discreteConditional_m0 = *dynamic_pointer_cast( + bayesTree[M(0)]->conditional()->inner()); + EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)})); // Get the number of elements which are greater than 0. auto count = [](const double &value, int count) { return value > 0 ? count + 1 : count; }; // Check that the number of leaves after pruning is 5. - EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0)); + EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0)); // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. auto &unprunedLastDensity = *dynamic_pointer_cast( - unprunedHybridBayesTree->clique(X(4))->conditional()->inner()); + unprunedHybridBayesTree->clique(X(3))->conditional()->inner()); auto &lastDensity = *dynamic_pointer_cast( - bayesTree[X(4)]->conditional()->inner()); + bayesTree[X(3)]->conditional()->inner()); std::vector> assignments = - discreteConditional_m1.enumerate(); + discreteConditional_m0.enumerate(); // Loop over all assignments and check the pruned components for (auto &&av : assignments) { const DiscreteValues &assignment = av.first; @@ -336,18 +336,18 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { Values initial; /***** Run Round 1 *****/ - // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + // Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3 for (size_t i = 1; i < 4; i++) { graph1.push_back(switching.nonlinearFactorGraph.at(i)); } - // Add the Gaussian factors, 1 prior on X(1), - // 3 measurements on X(2), X(3), X(4) + // Add the Gaussian factors, 1 prior on X(0), + // 3 measurements on X(1), X(2), X(3) graph1.push_back(switching.nonlinearFactorGraph.at(0)); - initial.insert(X(1), 1); + initial.insert(X(0), 1); for (size_t i = 5; i <= 7; i++) { graph1.push_back(switching.nonlinearFactorGraph.at(i)); - initial.insert(X(i - 3), i - 3); + initial.insert(X(i - 4), i - 3); } // Run update with pruning @@ -361,20 +361,20 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // each with 2, 4, 8, and 5 (pruned) leaves respetively. EXPECT_LONGS_EQUAL(4, bayesTree.size()); EXPECT_LONGS_EQUAL( - 2, bayesTree[X(1)]->conditional()->asMixture()->nrComponents()); + 2, bayesTree[X(0)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 3, bayesTree[X(2)]->conditional()->asMixture()->nrComponents()); + 3, bayesTree[X(1)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(2)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); /***** Run Round 2 *****/ HybridGaussianFactorGraph graph2; - graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x4-x5 - graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x5 measurement + graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x3-x4 + graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x4 measurement initial = Values(); - initial.insert(X(5), 5); + initial.insert(X(4), 5); // Run update with pruning a second time. incrementalHybrid.update(graph2, initial); @@ -386,9 +386,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) { // with 5 (pruned) leaves. CHECK_EQUAL(5, bayesTree.size()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents()); EXPECT_LONGS_EQUAL( - 5, bayesTree[X(5)]->conditional()->asMixture()->nrComponents()); + 5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents()); } /* ************************************************************************/ @@ -401,7 +401,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { HybridNonlinearFactorGraph fg; HybridNonlinearISAM inc; - // Add a prior on pose x1 at the origin. + // Add a prior on pose x0 at the origin. // A prior factor consists of a mean and // a noise model (covariance matrix) Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin