Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hybrid/simplify #1388

Merged
merged 29 commits into from
Jan 17, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
34a9aef
normalizationConstants returns all constants as a DecisionTreeFactor
dellaert Jan 12, 2023
1dcc6dd
All tests still work with zero constant!
dellaert Jan 12, 2023
03ad393
Removed FactorAndConstant, no longer needed
dellaert Jan 13, 2023
906330f
Add discrete contribution to logProbability
dellaert Jan 13, 2023
681c75c
Expose toFactorGraph to wrapper
dellaert Jan 13, 2023
dfef2c2
Simplify elimination
dellaert Jan 13, 2023
070cdb7
insert_or_assign
dellaert Jan 14, 2023
96e3eb7
Some test refactoring
dellaert Jan 14, 2023
c22b2ca
Improved docs
dellaert Jan 15, 2023
5b0408c
Check for error>0 and proper normalization constant
dellaert Jan 16, 2023
191e614
Fix print
dellaert Jan 16, 2023
57e59d1
Compute log-normalization constant as the max of the individual norma…
dellaert Jan 16, 2023
7a41180
Refactored tests and removed incorrect (R not upper-triangular) test.
dellaert Jan 16, 2023
207c9b7
Implemented the "hidden constant" scheme.
dellaert Jan 17, 2023
3a446d7
Explicitly implement logNormalizationConstant
dellaert Jan 17, 2023
202a5a3
Fixed toFactorGraph and added test to verify
dellaert Jan 17, 2023
a5951d8
Fixed test to work with "hidden constant" scheme
dellaert Jan 17, 2023
8357fc7
Fix python tests (and expose HybridBayesNet.error)
dellaert Jan 17, 2023
e31884c
Eradicated GraphAndConstant
dellaert Jan 17, 2023
9af7236
Added DEBUG_MARGINALS flag
dellaert Jan 17, 2023
519b2bb
Added comment
dellaert Jan 17, 2023
32d69a3
Trap if conditional==null.
dellaert Jan 17, 2023
f4859f0
Fix logProbability tests
dellaert Jan 17, 2023
4283925
Ratio test succeeds on fg, but not on posterior yet,
dellaert Jan 17, 2023
b494a61
Removed obsolete normalizationConstants method
dellaert Jan 17, 2023
892759e
Add math related to hybrid classes
dellaert Jan 17, 2023
c3ca31f
Added partial elimination test
dellaert Jan 17, 2023
e444962
Added correction with the normalization constant in the second elimin…
dellaert Jan 17, 2023
f714c4a
Merge branch 'develop' into hybrid/simplify
dellaert Jan 17, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Expose toFactorGraph to wrapper
  • Loading branch information
dellaert committed Jan 16, 2023
commit 681c75cea42bdeb176ea1483b0ad8f0ca3c6fef1
3 changes: 3 additions & 0 deletions gtsam/hybrid/hybrid.i
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,9 @@ class HybridBayesNet {
double logProbability(const gtsam::HybridValues& values) const;
double evaluate(const gtsam::HybridValues& values) const;

gtsam::HybridGaussianFactorGraph toFactorGraph(
const gtsam::VectorValues& measurements) const;

gtsam::HybridValues optimize() const;
gtsam::HybridValues sample(const gtsam::HybridValues &given) const;
gtsam::HybridValues sample() const;
Expand Down
56 changes: 19 additions & 37 deletions python/gtsam/tests/test_HybridFactorGraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,23 +152,6 @@ def measurements(sample: HybridValues, indices) -> gtsam.VectorValues:
measurements.insert(Z(i), sample.at(Z(i)))
return measurements

@classmethod
def factor_graph_from_bayes_net(cls, bayesNet: HybridBayesNet,
sample: HybridValues):
"""Create a factor graph from the Bayes net with sampled measurements.
The factor graph is `P(x)P(n) ϕ(x, n; z0) ϕ(x, n; z1) ...`
and thus represents the same joint probability as the Bayes net.
"""
fg = HybridGaussianFactorGraph()
num_measurements = bayesNet.size() - 2
for i in range(num_measurements):
conditional = bayesNet.at(i).asMixture()
factor = conditional.likelihood(cls.measurements(sample, [i]))
fg.push_back(factor)
fg.push_back(bayesNet.at(num_measurements).asGaussian())
fg.push_back(bayesNet.at(num_measurements+1).asDiscrete())
return fg

@classmethod
def estimate_marginals(cls,
target,
Expand All @@ -182,16 +165,14 @@ def estimate_marginals(cls,
for s in range(N):
proposed = proposal_density.sample() # sample from proposal
target_proposed = target(proposed) # evaluate target
# print(target_proposed, proposal_density.evaluate(proposed))
weight = target_proposed / proposal_density.evaluate(proposed)
# print weight:
# print(f"weight: {weight}")
marginals[proposed.atDiscrete(M(0))] += weight

# print marginals:
marginals /= marginals.sum()
return marginals

@unittest.skip
def test_tiny(self):
"""Test a tiny two variable hybrid model."""
# P(x0)P(mode)P(z0|x0,mode)
Expand All @@ -202,12 +183,13 @@ def test_tiny(self):
values = HybridValues()
values.insert(X(0), [5.0])
values.insert(M(0), 0) # low-noise, standard deviation 0.5
z0: float = 5.0
values.insert(Z(0), [z0])
measurements = gtsam.VectorValues()
measurements.insert(Z(0), [5.0])
values.insert(measurements)

def unnormalized_posterior(x):
"""Posterior is proportional to joint, centered at 5.0 as well."""
x.insert(Z(0), [z0])
x.insert(measurements)
return bayesNet.evaluate(x)

# Create proposal density on (x0, mode), making sure it has same mean:
Expand All @@ -220,31 +202,31 @@ def unnormalized_posterior(x):
# Estimate marginals using importance sampling.
marginals = self.estimate_marginals(target=unnormalized_posterior,
proposal_density=proposal_density)
# print(f"True mode: {values.atDiscrete(M(0))}")
# print(f"P(mode=0; Z) = {marginals[0]}")
# print(f"P(mode=1; Z) = {marginals[1]}")
print(f"True mode: {values.atDiscrete(M(0))}")
print(f"P(mode=0; Z) = {marginals[0]}")
print(f"P(mode=1; Z) = {marginals[1]}")

# Check that the estimate is close to the true value.
self.assertAlmostEqual(marginals[0], 0.74, delta=0.01)
self.assertAlmostEqual(marginals[1], 0.26, delta=0.01)

fg = self.factor_graph_from_bayes_net(bayesNet, values)
self.assertEqual(fg.size(), 3)
fg = bayesNet.toFactorGraph(measurements)
self.assertEqual(fg.size(), 4)

# Test elimination.
posterior = fg.eliminateSequential()

def true_posterior(x):
"""Posterior from elimination."""
x.insert(Z(0), [z0])
x.insert(measurements)
return posterior.evaluate(x)

# Estimate marginals using importance sampling.
marginals = self.estimate_marginals(target=true_posterior,
proposal_density=proposal_density)
# print(f"True mode: {values.atDiscrete(M(0))}")
# print(f"P(mode=0; z0) = {marginals[0]}")
# print(f"P(mode=1; z0) = {marginals[1]}")
print(f"True mode: {values.atDiscrete(M(0))}")
print(f"P(mode=0; z0) = {marginals[0]}")
print(f"P(mode=1; z0) = {marginals[1]}")

# Check that the estimate is close to the true value.
self.assertAlmostEqual(marginals[0], 0.74, delta=0.01)
Expand Down Expand Up @@ -292,17 +274,17 @@ def unnormalized_posterior(x):
# Estimate marginals using importance sampling.
marginals = self.estimate_marginals(target=unnormalized_posterior,
proposal_density=proposal_density)
# print(f"True mode: {values.atDiscrete(M(0))}")
# print(f"P(mode=0; Z) = {marginals[0]}")
# print(f"P(mode=1; Z) = {marginals[1]}")
print(f"True mode: {values.atDiscrete(M(0))}")
print(f"P(mode=0; Z) = {marginals[0]}")
print(f"P(mode=1; Z) = {marginals[1]}")

# Check that the estimate is close to the true value.
self.assertAlmostEqual(marginals[0], 0.23, delta=0.01)
self.assertAlmostEqual(marginals[1], 0.77, delta=0.01)

# Convert to factor graph using measurements.
fg = self.factor_graph_from_bayes_net(bayesNet, values)
self.assertEqual(fg.size(), 4)
fg = bayesNet.toFactorGraph(measurements)
self.assertEqual(fg.size(), 6)

# Calculate ratio between Bayes net probability and the factor graph:
expected_ratio = self.calculate_ratio(bayesNet, fg, values)
Expand Down