diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 325c32f95f..ac1956d5f6 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -160,8 +160,8 @@ void GaussianMixture::print(const std::string &s, for (auto &dk : discreteKeys()) { std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; } - std::cout << "\n"; - std::cout << " logNormalizationConstant: " << logConstant_ << "\n" + std::cout << std::endl + << " logNormalizationConstant: " << logConstant_ << std::endl << std::endl; conditionals_.print( "", [&](Key k) { return formatter(k); }, diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index ae8fa03781..da2645b5a6 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -40,17 +40,17 @@ bool HybridBayesTree::equals(const This& other, double tol) const { /* ************************************************************************* */ HybridValues HybridBayesTree::optimize() const { - DiscreteBayesNet dbn; + DiscreteFactorGraph discrete_fg; DiscreteValues mpe; auto root = roots_.at(0); // Access the clique and get the underlying hybrid conditional HybridConditional::shared_ptr root_conditional = root->conditional(); - // The root should be discrete only, we compute the MPE + // The root should be discrete only, we compute the MPE if (root_conditional->isDiscrete()) { - dbn.push_back(root_conditional->asDiscrete()); - mpe = DiscreteFactorGraph(dbn).optimize(); + discrete_fg.push_back(root_conditional->asDiscrete()); + mpe = discrete_fg.optimize(); } else { throw std::runtime_error( "HybridBayesTree root is not discrete-only. Please check elimination " @@ -136,8 +136,7 @@ struct HybridAssignmentData { } }; -/* ************************************************************************* - */ +/* ************************************************************************* */ GaussianBayesTree HybridBayesTree::choose( const DiscreteValues& assignment) const { GaussianBayesTree gbt; @@ -157,8 +156,12 @@ GaussianBayesTree HybridBayesTree::choose( return gbt; } -/* ************************************************************************* - */ +/* ************************************************************************* */ +double HybridBayesTree::error(const HybridValues& values) const { + return HybridGaussianFactorGraph(*this).error(values); +} + +/* ************************************************************************* */ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { GaussianBayesTree gbt = this->choose(assignment); // If empty GaussianBayesTree, means a clique is pruned hence invalid diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index f91e16cbf1..ab2d8a73d2 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -84,6 +84,9 @@ class GTSAM_EXPORT HybridBayesTree : public BayesTree { */ GaussianBayesTree choose(const DiscreteValues& assignment) const; + /** Error for all conditionals. */ + double error(const HybridValues& values) const; + /** * @brief Optimize the hybrid Bayes tree by computing the MPE for the current * set of discrete variables and using it to compute the best continuous diff --git a/gtsam/hybrid/HybridFactorGraph.h b/gtsam/hybrid/HybridFactorGraph.h index 79f2a7af15..921cc14bf1 100644 --- a/gtsam/hybrid/HybridFactorGraph.h +++ b/gtsam/hybrid/HybridFactorGraph.h @@ -59,6 +59,10 @@ class GTSAM_EXPORT HybridFactorGraph : public FactorGraph { template HybridFactorGraph(const FactorGraph& graph) : Base(graph) {} + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit HybridFactorGraph(const CONTAINER& factors) : Base(factors) {} + /// @} /// @name Extra methods to inspect discrete/continuous keys. /// @{ diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a7a6eee5aa..efcd322aa0 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -353,6 +353,8 @@ static std::shared_ptr createGaussianMixtureFactor( if (factor) { auto hf = std::dynamic_pointer_cast(factor); if (!hf) throw std::runtime_error("Expected HessianFactor!"); + // Add 2.0 term since the constant term will be premultiplied by 0.5 + // as per the Hessian definition hf->constantTerm() += 2.0 * conditional->logNormalizationConstant(); } return factor; @@ -586,4 +588,24 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( return prob_tree; } +/* ************************************************************************ */ +GaussianFactorGraph HybridGaussianFactorGraph::operator()( + const DiscreteValues &assignment) const { + GaussianFactorGraph gfg; + for (auto &&f : *this) { + if (auto gf = std::dynamic_pointer_cast(f)) { + gfg.push_back(gf); + } else if (auto gc = std::dynamic_pointer_cast(f)) { + gfg.push_back(gf); + } else if (auto gmf = std::dynamic_pointer_cast(f)) { + gfg.push_back((*gmf)(assignment)); + } else if (auto gm = dynamic_pointer_cast(f)) { + gfg.push_back((*gm)(assignment)); + } else { + continue; + } + } + return gfg; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 2ca6e4c956..8a0a6f0a34 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -126,6 +126,11 @@ class GTSAM_EXPORT HybridGaussianFactorGraph /// @brief Default constructor. HybridGaussianFactorGraph() = default; + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit HybridGaussianFactorGraph(const CONTAINER& factors) + : Base(factors) {} + /** * Implicit copy/downcast constructor to override explicit template container * constructor. In BayesTree this is used for: @@ -213,6 +218,10 @@ class GTSAM_EXPORT HybridGaussianFactorGraph GaussianFactorGraphTree assembleGraphTree() const; /// @} + + /// Get the GaussianFactorGraph at a given discrete assignment. + GaussianFactorGraph operator()(const DiscreteValues& assignment) const; + }; } // namespace gtsam diff --git a/gtsam/hybrid/hybrid.i b/gtsam/hybrid/hybrid.i index c1d57715ec..85c34e5755 100644 --- a/gtsam/hybrid/hybrid.i +++ b/gtsam/hybrid/hybrid.i @@ -92,7 +92,10 @@ class GaussianMixture : gtsam::HybridFactor { const std::vector& conditionalsList); - gtsam::GaussianMixtureFactor* likelihood(const gtsam::VectorValues &frontals) const; + gtsam::GaussianMixtureFactor* likelihood( + const gtsam::VectorValues& frontals) const; + double logProbability(const gtsam::HybridValues& values) const; + double evaluate(const gtsam::HybridValues& values) const; void print(string s = "GaussianMixture\n", const gtsam::KeyFormatter& keyFormatter = diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 68b3b8215d..559f59c8bc 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -490,6 +490,58 @@ TEST(HybridGaussianFactorGraph, SwitchingTwoVar) { } } +/* ****************************************************************************/ +// Select a particular continuous factor graph given a discrete assignment +TEST(HybridGaussianFactorGraph, DiscreteSelection) { + Switching s(3); + + HybridGaussianFactorGraph graph = s.linearizedFactorGraph; + + DiscreteValues dv00{{M(0), 0}, {M(1), 0}}; + GaussianFactorGraph continuous_00 = graph(dv00); + GaussianFactorGraph expected_00; + expected_00.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_00.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1))); + expected_00.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1))); + expected_00.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_00.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_00, continuous_00)); + + DiscreteValues dv01{{M(0), 0}, {M(1), 1}}; + GaussianFactorGraph continuous_01 = graph(dv01); + GaussianFactorGraph expected_01; + expected_01.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_01.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-1))); + expected_01.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0))); + expected_01.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_01.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_01, continuous_01)); + + DiscreteValues dv10{{M(0), 1}, {M(1), 0}}; + GaussianFactorGraph continuous_10 = graph(dv10); + GaussianFactorGraph expected_10; + expected_10.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_10.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0))); + expected_10.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-1))); + expected_10.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_10.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_10, continuous_10)); + + DiscreteValues dv11{{M(0), 1}, {M(1), 1}}; + GaussianFactorGraph continuous_11 = graph(dv11); + GaussianFactorGraph expected_11; + expected_11.push_back(JacobianFactor(X(0), I_1x1 * 10, Vector1(-10))); + expected_11.push_back(JacobianFactor(X(0), -I_1x1, X(1), I_1x1, Vector1(-0))); + expected_11.push_back(JacobianFactor(X(1), -I_1x1, X(2), I_1x1, Vector1(-0))); + expected_11.push_back(JacobianFactor(X(1), I_1x1 * 10, Vector1(-10))); + expected_11.push_back(JacobianFactor(X(2), I_1x1 * 10, Vector1(-10))); + + EXPECT(assert_equal(expected_11, continuous_11)); +} + /* ************************************************************************* */ TEST(HybridGaussianFactorGraph, optimize) { HybridGaussianFactorGraph hfg; diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index f986eed02f..585bbdb331 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -121,7 +121,7 @@ namespace gtsam { const auto mean = solve({}); // solve for mean. mean.print(" mean", formatter); } - cout << " logNormalizationConstant: " << logNormalizationConstant() << std::endl; + cout << " logNormalizationConstant: " << logNormalizationConstant() << endl; if (model_) model_->print(" Noise model: "); else diff --git a/gtsam/linear/linear.i b/gtsam/linear/linear.i index ed19eaa3b5..9c448de50b 100644 --- a/gtsam/linear/linear.i +++ b/gtsam/linear/linear.i @@ -511,7 +511,7 @@ virtual class GaussianConditional : gtsam::JacobianFactor { GaussianConditional(size_t key, gtsam::Vector d, gtsam::Matrix R, size_t name1, gtsam::Matrix S, size_t name2, gtsam::Matrix T, const gtsam::noiseModel::Diagonal* sigmas); - GaussianConditional(const vector> terms, + GaussianConditional(const std::vector> terms, size_t nrFrontals, gtsam::Vector d, const gtsam::noiseModel::Diagonal* sigmas); @@ -773,4 +773,4 @@ class KalmanFilter { gtsam::Vector z, gtsam::Matrix Q); }; -} \ No newline at end of file +} diff --git a/gtsam/slam/README.md b/gtsam/slam/README.md index ae5edfdac0..018126a663 100644 --- a/gtsam/slam/README.md +++ b/gtsam/slam/README.md @@ -63,6 +63,6 @@ A RegularJacobianFactor that uses some badly documented reduction on the Jacobia A RegularJacobianFactor that eliminates a point using sequential elimination. -### JacobianFactorQR +### JacobianFactorSVD -A RegularJacobianFactor that uses the "Nullspace Trick" by Mourikis et al. See the documentation in the file, which *is* well documented. \ No newline at end of file +A RegularJacobianFactor that uses the "Nullspace Trick" by Mourikis et al. See the documentation in the file, which *is* well documented.