Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hybrid Elimination #1339

Merged
merged 12 commits into from
Dec 10, 2022
Prev Previous commit
Next Next commit
update hybrid elimination and corresponding tests
  • Loading branch information
varunagrawal committed Dec 10, 2022
commit 62bc9f20a3b36fbcb1840b9d600d68239f549063
12 changes: 9 additions & 3 deletions gtsam/hybrid/HybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,13 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
}
}

// std::cout << "Eliminate For MPE" << std::endl;
auto result = EliminateForMPE(dfg, frontalKeys);

// std::cout << "discrete elimination done!" << std::endl;
// dfg.print();
// std::cout << "\n\n\n" << std::endl;
// result.first->print();
// result.second->print();
return {boost::make_shared<HybridConditional>(result.first),
boost::make_shared<HybridDiscreteFactor>(result.second)};
}
Expand Down Expand Up @@ -262,7 +267,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
if (!factor) {
return 0.0; // If nullptr, return 0.0 probability
} else {
return 1.0;
double error =
0.5 * std::abs(factor->augmentedInformation().determinant());
return std::exp(-error);
}
};
DecisionTree<Key, double> fdt(separatorFactors, factorProb);
Expand Down Expand Up @@ -550,5 +557,4 @@ HybridGaussianFactorGraph::separateContinuousDiscreteOrdering(
return std::make_pair(continuous_ordering, discrete_ordering);
}


} // namespace gtsam
2 changes: 1 addition & 1 deletion gtsam/hybrid/HybridSmoother.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph,
addConditionals(graph, hybridBayesNet_, ordering);

// Eliminate.
auto bayesNetFragment = graph.eliminateSequential();
auto bayesNetFragment = graph.eliminateSequential(ordering);

/// Prune
if (maxNrLeaves) {
Expand Down
49 changes: 11 additions & 38 deletions gtsam/hybrid/tests/testHybridEstimation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* @author Varun Agrawal
*/

#include <gtsam/discrete/DiscreteBayesNet.h>
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
Expand Down Expand Up @@ -280,8 +281,10 @@ TEST(HybridEstimation, Probability) {

VectorValues values = bayes_net->optimize();

expected_errors.push_back(linear_graph->error(values));
expected_prob_primes.push_back(linear_graph->probPrime(values));
double error = linear_graph->error(values);
expected_errors.push_back(error);
double prob_prime = linear_graph->probPrime(values);
expected_prob_primes.push_back(prob_prime);
}

// Switching example of robot moving in 1D with given measurements and equal
Expand All @@ -291,51 +294,21 @@ TEST(HybridEstimation, Probability) {
auto graph = switching.linearizedFactorGraph;
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());

AlgebraicDecisionTree<Key> expected_probPrimeTree = probPrimeTree(graph);

// Eliminate continuous
Ordering continuous_ordering(graph.continuousKeys());
HybridBayesNet::shared_ptr bayesNet;
HybridGaussianFactorGraph::shared_ptr discreteGraph;
std::tie(bayesNet, discreteGraph) =
graph.eliminatePartialSequential(continuous_ordering);

// Get the last continuous conditional which will have all the discrete keys
auto last_conditional = bayesNet->at(bayesNet->size() - 1);
DiscreteKeys discrete_keys = last_conditional->discreteKeys();

const std::vector<DiscreteValues> assignments =
DiscreteValues::CartesianProduct(discrete_keys);

// Reverse discrete keys order for correct tree construction
std::reverse(discrete_keys.begin(), discrete_keys.end());

// Create a decision tree of all the different VectorValues
DecisionTree<Key, VectorValues::shared_ptr> delta_tree =
graph.continuousDelta(discrete_keys, bayesNet, assignments);

AlgebraicDecisionTree<Key> probPrimeTree =
graph.continuousProbPrimes(discrete_keys, bayesNet);

EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree));
HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering);
auto discreteConditional = bayesNet->atDiscrete(bayesNet->size() - 3);

// Test if the probPrimeTree matches the probability of
// the individual factor graphs
for (size_t i = 0; i < pow(2, K - 1); i++) {
Assignment<Key> discrete_assignment;
DiscreteValues discrete_assignment;
for (size_t v = 0; v < discrete_seq_map[i].size(); v++) {
discrete_assignment[M(v)] = discrete_seq_map[i][v];
}
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i),
probPrimeTree(discrete_assignment), 1e-8);
double discrete_transition_prob = 0.25;
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i) * discrete_transition_prob,
(*discreteConditional)(discrete_assignment), 1e-8);
}

discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));

Ordering discrete(graph.discreteKeys());
auto discreteBayesNet = discreteGraph->eliminateSequential();
bayesNet->add(*discreteBayesNet);

HybridValues hybrid_values = bayesNet->optimize();

// This is the correct sequence as designed
Expand Down
2 changes: 1 addition & 1 deletion gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full);

// 9 cliques in the bayes tree and 0 remaining variables to eliminate.
EXPECT_LONGS_EQUAL(7, hbt->size());
EXPECT_LONGS_EQUAL(9, hbt->size());
EXPECT_LONGS_EQUAL(0, remaining->size());

/*
Expand Down
2 changes: 1 addition & 1 deletion gtsam/hybrid/tests/testHybridGaussianISAM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {

// Test the probability values with regression tests.
DiscreteValues assignment;
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
EXPECT(assert_equal(0.0619233, m00_prob, 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 0;
EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5));
Expand Down
11 changes: 1 addition & 10 deletions gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -372,8 +372,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
dynamic_pointer_cast<DecisionTreeFactor>(hybridDiscreteFactor->inner());
CHECK(discreteFactor);
EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size());
// All leaves should be probability 1 since this is not P*(X|M,Z)
EXPECT(discreteFactor->root_->isLeaf());
EXPECT(discreteFactor->root_->isLeaf() == false);

// TODO(Varun) Test emplace_discrete
}
Expand Down Expand Up @@ -441,14 +440,6 @@ TEST(HybridFactorGraph, Full_Elimination) {
discrete_fg.push_back(df->inner());
}

// Get the probabilit P*(X | M, Z)
DiscreteKeys discrete_keys =
remainingFactorGraph_partial->at(2)->discreteKeys();
AlgebraicDecisionTree<Key> probPrimeTree =
linearizedFactorGraph.continuousProbPrimes(discrete_keys,
hybridBayesNet_partial);
discrete_fg.add(DecisionTreeFactor(discrete_keys, probPrimeTree));

ordering.clear();
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
discreteBayesNet =
Expand Down
2 changes: 1 addition & 1 deletion gtsam/hybrid/tests/testHybridNonlinearISAM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {

// Test the probability values with regression tests.
DiscreteValues assignment;
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
EXPECT(assert_equal(0.0619233, m00_prob, 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 0;
EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5));
Expand Down