Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hybrid Elimination #1339

Merged
merged 12 commits into from
Dec 10, 2022
Prev Previous commit
Next Next commit
move multifrontal optimize test to testHybridBayesTree and update doc…
…strings
  • Loading branch information
varunagrawal committed Dec 3, 2022
commit 3eaf4cc910fa0cdf34023c4ebd0bf4b37354499b
3 changes: 1 addition & 2 deletions gtsam/hybrid/HybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
// If there are no more continuous parents, then we should create here a
// DiscreteFactor, with the error for each discrete choice.
if (keysOfSeparator.empty()) {
// TODO(Varun) Use the math from the iMHS_Math-1-indexed document
VectorValues empty_values;
auto factorProb = [&](const GaussianFactor::shared_ptr &factor) {
if (!factor) {
Expand Down Expand Up @@ -574,7 +573,7 @@ HybridGaussianFactorGraph::eliminateHybridSequential(
bayesNet->at(bayesNet->size() - 1);
DiscreteKeys discrete_keys = last_conditional->discreteKeys();

// If not discrete variables, return the eliminated bayes net.
// If no discrete variables, return the eliminated bayes net.
if (discrete_keys.size() == 0) {
return bayesNet;
}
Expand Down
19 changes: 0 additions & 19 deletions gtsam/hybrid/tests/testHybridBayesNet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -164,25 +164,6 @@ TEST(HybridBayesNet, Optimize) {
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}

/* ****************************************************************************/
// Test bayes net multifrontal optimize
TEST(HybridBayesNet, OptimizeMultifrontal) {
Switching s(4);

Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesTree::shared_ptr hybridBayesTree =
s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering);
HybridValues delta = hybridBayesTree->optimize();

VectorValues expectedValues;
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());

EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}

/* ****************************************************************************/
// Test bayes net error
TEST(HybridBayesNet, Error) {
Expand Down
19 changes: 19 additions & 0 deletions gtsam/hybrid/tests/testHybridBayesTree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,25 @@ using noiseModel::Isotropic;
using symbol_shorthand::M;
using symbol_shorthand::X;

/* ****************************************************************************/
// Test multifrontal optimize
TEST(HybridBayesTree, OptimizeMultifrontal) {
Switching s(4);

Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesTree::shared_ptr hybridBayesTree =
s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering);
HybridValues delta = hybridBayesTree->optimize();

VectorValues expectedValues;
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());

EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}

/* ****************************************************************************/
// Test for optimizing a HybridBayesTree with a given assignment.
TEST(HybridBayesTree, OptimizeAssignment) {
Expand Down
4 changes: 2 additions & 2 deletions gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -386,11 +386,11 @@ TEST(HybridFactorGraph, Partial_Elimination) {

auto linearizedFactorGraph = self.linearizedFactorGraph;

// Create ordering.
// Create ordering of only continuous variables.
Ordering ordering;
for (size_t k = 0; k < self.K; k++) ordering += X(k);

// Eliminate partially.
// Eliminate partially i.e. only continuous part.
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
std::tie(hybridBayesNet, remainingFactorGraph) =
Expand Down