diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 4665a3136c..f898178c2b 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -16,8 +16,8 @@ */ #include -#include #include +#include namespace gtsam { @@ -112,13 +112,12 @@ HybridBayesNet HybridBayesNet::prune( /* ************************************************************************* */ GaussianMixture::shared_ptr HybridBayesNet::atGaussian(size_t i) const { - return boost::dynamic_pointer_cast(factors_.at(i)->inner()); + return factors_.at(i)->asMixture(); } /* ************************************************************************* */ DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const { - return boost::dynamic_pointer_cast( - factors_.at(i)->inner()); + return factors_.at(i)->asDiscreteConditional(); } /* ************************************************************************* */ @@ -126,8 +125,14 @@ GaussianBayesNet HybridBayesNet::choose( const DiscreteValues &assignment) const { GaussianBayesNet gbn; for (size_t idx = 0; idx < size(); idx++) { - GaussianMixture gm = *this->atGaussian(idx); - gbn.push_back(gm(assignment)); + try { + GaussianMixture gm = *this->atGaussian(idx); + gbn.push_back(gm(assignment)); + + } catch (std::exception &exc) { + // if factor at `idx` is discrete-only, just continue. + continue; + } } return gbn; } @@ -138,4 +143,10 @@ HybridValues HybridBayesNet::optimize() const { return dag.argmax(); } +/* *******************************************************************************/ +VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const { + GaussianBayesNet gbn = this->choose(assignment); + return gbn.optimize(); +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesNet.h b/gtsam/hybrid/HybridBayesNet.h index 0d2dc36424..a16a4f42c5 100644 --- a/gtsam/hybrid/HybridBayesNet.h +++ b/gtsam/hybrid/HybridBayesNet.h @@ -72,6 +72,15 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet { /// TODO(Shangjie) do we need to create a HybridGaussianBayesNet class, and /// put this method there? HybridValues optimize() const; + + /** + * @brief Given the discrete assignment, return the optimized estimate for the + * selected Gaussian BayesNet. + * + * @param assignment An assignment of discrete values. + * @return Values + */ + VectorValues optimize(const DiscreteValues &assignment) const; }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index d65270f91d..0a9b9e7f22 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -35,4 +35,48 @@ bool HybridBayesTree::equals(const This& other, double tol) const { return Base::equals(other, tol); } +/* ************************************************************************* */ +VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { + GaussianBayesNet gbn; + + KeyVector added_keys; + + // Iterate over all the nodes in the BayesTree + for (auto&& node : nodes()) { + // Check if conditional being added is already in the Bayes net. + if (std::find(added_keys.begin(), added_keys.end(), node.first) == + added_keys.end()) { + // Access the clique and get the underlying hybrid conditional + HybridBayesTreeClique::shared_ptr clique = node.second; + HybridConditional::shared_ptr conditional = clique->conditional(); + + KeyVector frontals(conditional->frontals().begin(), + conditional->frontals().end()); + + // Record the key being added + added_keys.insert(added_keys.end(), frontals.begin(), frontals.end()); + + // If conditional is hybrid (and not discrete-only), we get the Gaussian + // Conditional corresponding to the assignment and add it to the Gaussian + // Bayes Net. + if (conditional->isHybrid()) { + auto gm = conditional->asMixture(); + GaussianConditional::shared_ptr gaussian_conditional = + (*gm)(assignment); + + gbn.push_back(gaussian_conditional); + } + } + } + // If TBB is enabled, the bayes net order gets reversed, + // so we pre-reverse it +#ifdef GTSAM_USE_TBB + auto reversed = boost::adaptors::reverse(gbn); + gbn = GaussianBayesNet(reversed.begin(), reversed.end()); +#endif + + // Return the optimized bayes net. + return gbn.optimize(); +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index 02a4a11e5e..b7950a4838 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -70,6 +70,15 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { /** Check equality */ bool equals(const This& other, double tol = 1e-9) const; + /** + * @brief Recursively optimize the BayesTree to produce a vector solution. + * + * @param assignment The discrete values assignment to select the Gaussian + * mixtures. + * @return VectorValues + */ + VectorValues optimize(const DiscreteValues& assignment) const; + /// @} }; diff --git a/gtsam/hybrid/HybridConditional.h b/gtsam/hybrid/HybridConditional.h index 3ba5da393b..91c9f84950 100644 --- a/gtsam/hybrid/HybridConditional.h +++ b/gtsam/hybrid/HybridConditional.h @@ -69,7 +69,7 @@ class GTSAM_EXPORT HybridConditional BaseConditional; ///< Typedef to our conditional base class protected: - // Type-erased pointer to the inner type + /// Type-erased pointer to the inner type boost::shared_ptr inner_; public: @@ -127,8 +127,7 @@ class GTSAM_EXPORT HybridConditional * @param gaussianMixture Gaussian Mixture Conditional used to create the * HybridConditional. */ - HybridConditional( - boost::shared_ptr gaussianMixture); + HybridConditional(boost::shared_ptr gaussianMixture); /** * @brief Return HybridConditional as a GaussianMixture @@ -168,10 +167,10 @@ class GTSAM_EXPORT HybridConditional /// Get the type-erased pointer to the inner type boost::shared_ptr inner() { return inner_; } -}; // DiscreteConditional +}; // HybridConditional // traits template <> -struct traits : public Testable {}; +struct traits : public Testable {}; } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index c024c1255f..94e33890dd 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -135,9 +135,9 @@ continuousElimination(const HybridGaussianFactorGraph &factors, for (auto &fp : factors) { if (auto ptr = boost::dynamic_pointer_cast(fp)) { gfg.push_back(ptr->inner()); - } else if (auto p = - boost::static_pointer_cast(fp)->inner()) { - gfg.push_back(boost::static_pointer_cast(p)); + } else if (auto ptr = boost::static_pointer_cast(fp)) { + gfg.push_back( + boost::static_pointer_cast(ptr->inner())); } else { // It is an orphan wrapped conditional } @@ -401,4 +401,20 @@ void HybridGaussianFactorGraph::add(DecisionTreeFactor::shared_ptr factor) { FactorGraph::add(boost::make_shared(factor)); } +/* ************************************************************************ */ +const Ordering HybridGaussianFactorGraph::getHybridOrdering( + OptionalOrderingType orderingType) const { + KeySet discrete_keys; + for (auto &factor : factors_) { + for (const DiscreteKey &k : factor->discreteKeys()) { + discrete_keys.insert(k.first); + } + } + + const VariableIndex index(factors_); + Ordering ordering = Ordering::ColamdConstrainedLast( + index, KeyVector(discrete_keys.begin(), discrete_keys.end()), true); + return ordering; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 9367103305..56f9b7e07d 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -160,6 +160,15 @@ class GTSAM_EXPORT HybridGaussianFactorGraph Base::push_back(sharedFactor); } } + + /** + * @brief + * + * @param orderingType + * @return const Ordering + */ + const Ordering getHybridOrdering( + OptionalOrderingType orderingType = boost::none) const; }; } // namespace gtsam diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 52ef0439eb..6f41d06ea9 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -145,7 +145,7 @@ struct Switching { // Add "motion models". for (size_t k = 1; k < K; k++) { KeyVector keys = {X(k), X(k + 1)}; - auto motion_models = motionModels(k); + auto motion_models = motionModels(k, between_sigma); std::vector components; for (auto &&f : motion_models) { components.push_back(boost::dynamic_pointer_cast(f)); @@ -155,7 +155,7 @@ struct Switching { } // Add measurement factors - auto measurement_noise = noiseModel::Isotropic::Sigma(1, 0.1); + auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma); for (size_t k = 2; k <= K; k++) { nonlinearFactorGraph.emplace_nonlinear>( X(k), 1.0 * (k - 1), measurement_noise); diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index f3db839557..4602e8bac1 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -19,6 +19,7 @@ */ #include +#include #include "Switching.h" @@ -85,6 +86,76 @@ TEST(HybridBayesNet, Choose) { *gbn.at(3))); } +/* ****************************************************************************/ +// Test bayes net optimize +TEST(HybridBayesNet, OptimizeAssignment) { + Switching s(4); + + Ordering ordering; + for (auto&& kvp : s.linearizationPoint) { + ordering += kvp.key; + } + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 1; + + VectorValues delta = hybridBayesNet->optimize(assignment); + + // The linearization point has the same value as the key index, + // e.g. X(1) = 1, X(2) = 2, + // but the factors specify X(k) = k-1, so delta should be -1. + VectorValues expected_delta; + expected_delta.insert(make_pair(X(1), -Vector1::Ones())); + expected_delta.insert(make_pair(X(2), -Vector1::Ones())); + expected_delta.insert(make_pair(X(3), -Vector1::Ones())); + expected_delta.insert(make_pair(X(4), -Vector1::Ones())); + + EXPECT(assert_equal(expected_delta, delta)); +} + +/* ****************************************************************************/ +// Test bayes net optimize +TEST(HybridBayesNet, Optimize) { + Switching s(4); + + Ordering ordering; + for (auto&& kvp : s.linearizationPoint) { + ordering += kvp.key; + } + + Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesNet::shared_ptr hybridBayesNet = + s.linearizedFactorGraph.eliminateSequential(hybridOrdering); + + HybridValues delta = hybridBayesNet->optimize(); + + delta.print(); + VectorValues correct; + correct.insert(X(1), 0 * Vector1::Ones()); + correct.insert(X(2), 1 * Vector1::Ones()); + correct.insert(X(3), 2 * Vector1::Ones()); + correct.insert(X(4), 3 * Vector1::Ones()); + + DiscreteValues assignment111; + assignment111[M(1)] = 1; + assignment111[M(2)] = 1; + assignment111[M(3)] = 1; + std::cout << hybridBayesNet->choose(assignment111).error(correct) << std::endl; + + DiscreteValues assignment101; + assignment101[M(1)] = 1; + assignment101[M(2)] = 0; + assignment101[M(3)] = 1; + std::cout << hybridBayesNet->choose(assignment101).error(correct) << std::endl; +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp new file mode 100644 index 0000000000..a693af1b2d --- /dev/null +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -0,0 +1,93 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testHybridBayesTree.cpp + * @brief Unit tests for HybridBayesTree + * @author Varun Agrawal + * @date August 2022 + */ + +#include +#include + +#include "Switching.h" + +// Include for test suite +#include + +using namespace std; +using namespace gtsam; +using noiseModel::Isotropic; +using symbol_shorthand::M; +using symbol_shorthand::X; + +/* ****************************************************************************/ +// Test for optimizing a HybridBayesTree. +TEST(HybridBayesTree, Optimize) { + Switching s(4); + + HybridGaussianISAM isam; + HybridGaussianFactorGraph graph1; + + // Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4 + for (size_t i = 1; i < 4; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + // Add the Gaussian factors, 1 prior on X(1), + // 3 measurements on X(2), X(3), X(4) + graph1.push_back(s.linearizedFactorGraph.at(0)); + for (size_t i = 4; i <= 7; i++) { + graph1.push_back(s.linearizedFactorGraph.at(i)); + } + + isam.update(graph1); + + DiscreteValues assignment; + assignment[M(1)] = 1; + assignment[M(2)] = 1; + assignment[M(3)] = 1; + + VectorValues delta = isam.optimize(assignment); + + // The linearization point has the same value as the key index, + // e.g. X(1) = 1, X(2) = 2, + // but the factors specify X(k) = k-1, so delta should be -1. + VectorValues expected_delta; + expected_delta.insert(make_pair(X(1), -Vector1::Ones())); + expected_delta.insert(make_pair(X(2), -Vector1::Ones())); + expected_delta.insert(make_pair(X(3), -Vector1::Ones())); + expected_delta.insert(make_pair(X(4), -Vector1::Ones())); + + EXPECT(assert_equal(expected_delta, delta)); + + // Create ordering. + Ordering ordering; + for (size_t k = 1; k <= s.K; k++) ordering += X(k); + + HybridBayesNet::shared_ptr hybridBayesNet; + HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; + std::tie(hybridBayesNet, remainingFactorGraph) = + s.linearizedFactorGraph.eliminatePartialSequential(ordering); + + GaussianBayesNet gbn = hybridBayesNet->choose(assignment); + VectorValues expected = gbn.optimize(); + + EXPECT(assert_equal(expected, delta)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/nonlinear/NonlinearISAM.cpp b/gtsam/nonlinear/NonlinearISAM.cpp index 512069a8ab..840712eb65 100644 --- a/gtsam/nonlinear/NonlinearISAM.cpp +++ b/gtsam/nonlinear/NonlinearISAM.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file NonlinearISAM-inl.h + * @file NonlinearISAM.cpp * @date Jan 19, 2010 * @author Viorela Ila and Richard Roberts */