Skip to content

Commit

Permalink
Update naming convention to GraphicalLassoX, GraphLassoX still availa…
Browse files Browse the repository at this point in the history
…bile but deprecated.
  • Loading branch information
jasonlaska committed Sep 9, 2018
1 parent 9900737 commit 8f0a6ab
Show file tree
Hide file tree
Showing 13 changed files with 232 additions and 182 deletions.
2 changes: 1 addition & 1 deletion examples/convergence_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
Convergence Failure of Glasso
=============================
Demonstration of cases where GraphLasso fails to converge and quic succeeds.
Demonstration of cases where graph_lasso fails to converge and quic succeeds.
"The graphical lasso: New Insights and alternatives", by Mazumder & Hastie 2012.
https://web.stanford.edu/~hastie/Papers/glassoinsights.pdf
Expand Down
98 changes: 49 additions & 49 deletions examples/estimator_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
sys.path.append("..")
sys.path.append("../inverse_covariance")
from inverse_covariance import (
QuicGraphLasso,
QuicGraphLassoCV,
QuicGraphLassoEBIC,
AdaptiveGraphLasso,
QuicGraphicalLasso,
QuicGraphicalLassoCV,
QuicGraphicalLassoEBIC,
AdaptiveGraphicalLasso,
ModelAverage,
)

Expand Down Expand Up @@ -95,19 +95,19 @@ def show_results(covs, precs):


def quic_graph_lasso(X, num_folds, metric):
"""Run QuicGraphLasso with mode='default' and use standard scikit
"""Run QuicGraphicalLasso with mode='default' and use standard scikit
GridSearchCV to find the best lambda.
Primarily demonstrates compatibility with existing scikit tooling.
"""
print("QuicGraphLasso + GridSearchCV with:")
print("QuicGraphicalLasso + GridSearchCV with:")
print(" metric: {}".format(metric))
search_grid = {
"lam": np.logspace(np.log10(0.01), np.log10(1.0), num=100, endpoint=True),
"init_method": ["cov"],
"score_metric": [metric],
}
model = GridSearchCV(QuicGraphLasso(), search_grid, cv=num_folds, refit=True)
model = GridSearchCV(QuicGraphicalLasso(), search_grid, cv=num_folds, refit=True)
model.fit(X)
bmodel = model.best_estimator_
print(" len(cv_lams): {}".format(len(search_grid["lam"])))
Expand All @@ -118,14 +118,14 @@ def quic_graph_lasso(X, num_folds, metric):


def quic_graph_lasso_cv(X, metric):
"""Run QuicGraphLassoCV on data with metric of choice.
"""Run QuicGraphicalLassoCV on data with metric of choice.
Compare results with GridSearchCV + quic_graph_lasso. The number of
lambdas tested should be much lower with similar final lam_ selected.
"""
print("QuicGraphLassoCV with:")
print("QuicGraphicalLassoCV with:")
print(" metric: {}".format(metric))
model = QuicGraphLassoCV(
model = QuicGraphicalLassoCV(
cv=2, # cant deal w more folds at small size
n_refinements=6,
n_jobs=1,
Expand All @@ -140,18 +140,18 @@ def quic_graph_lasso_cv(X, metric):


def adaptive_graph_lasso(X, model_selector, method):
"""Run QuicGraphLassoCV or QuicGraphLassoEBIC as a two step adaptive fit
"""Run QuicGraphicalLassoCV or QuicGraphicalLassoEBIC as a two step adaptive fit
with method of choice (currently: 'binary', 'inverse', 'inverse_squared').
Compare the support and values to the model-selection estimator.
"""
metric = "log_likelihood"
print("Adaptive {} with:".format(model_selector))
print(" adaptive-method: {}".format(method))
if model_selector == "QuicGraphLassoCV":
if model_selector == "QuicGraphicalLassoCV":
print(" metric: {}".format(metric))
model = AdaptiveGraphLasso(
estimator=QuicGraphLassoCV(
model = AdaptiveGraphicalLasso(
estimator=QuicGraphicalLassoCV(
cv=2, # cant deal w more folds at small size
n_refinements=6,
init_method="cov",
Expand All @@ -160,25 +160,25 @@ def adaptive_graph_lasso(X, model_selector, method):
method=method,
)

elif model_selector == "QuicGraphLassoEBIC":
model = AdaptiveGraphLasso(estimator=QuicGraphLassoEBIC(), method=method)
elif model_selector == "QuicGraphicalLassoEBIC":
model = AdaptiveGraphicalLasso(estimator=QuicGraphicalLassoEBIC(), method=method)
model.fit(X)
lam_norm_ = np.linalg.norm(model.estimator_.lam_)
print(" ||lam_||_2: {}".format(lam_norm_))
return model.estimator_.covariance_, model.estimator_.precision_, lam_norm_


def quic_graph_lasso_ebic_manual(X, gamma=0):
"""Run QuicGraphLasso with mode='path' and gamma; use EBIC criteria for model
"""Run QuicGraphicalLasso with mode='path' and gamma; use EBIC criteria for model
selection.
The EBIC criteria is built into InverseCovarianceEstimator base class
so we demonstrate those utilities here.
"""
print("QuicGraphLasso (manual EBIC) with:")
print("QuicGraphicalLasso (manual EBIC) with:")
print(" mode: path")
print(" gamma: {}".format(gamma))
model = QuicGraphLasso(
model = QuicGraphicalLasso(
lam=1.0,
mode="path",
init_method="cov",
Expand All @@ -197,15 +197,15 @@ def quic_graph_lasso_ebic_manual(X, gamma=0):


def quic_graph_lasso_ebic(X, gamma=0):
"""Run QuicGraphLassoEBIC with gamma.
"""Run QuicGraphicalLassoEBIC with gamma.
QuicGraphLassoEBIC is a convenience class. Results should be identical to
QuicGraphicalLassoEBIC is a convenience class. Results should be identical to
those obtained via quic_graph_lasso_ebic_manual.
"""
print("QuicGraphLassoEBIC with:")
print("QuicGraphicalLassoEBIC with:")
print(" mode: path")
print(" gamma: {}".format(gamma))
model = QuicGraphLassoEBIC(lam=1.0, init_method="cov", gamma=gamma)
model = QuicGraphicalLassoEBIC(lam=1.0, init_method="cov", gamma=gamma)
model.fit(X)
print(" len(path lams): {}".format(len(model.path_)))
print(" lam_scale_: {}".format(model.lam_scale_))
Expand All @@ -214,7 +214,7 @@ def quic_graph_lasso_ebic(X, gamma=0):


def model_average(X, penalization):
"""Run ModelAverage in default mode (QuicGraphLassoCV) to obtain proportion
"""Run ModelAverage in default mode (QuicGraphicalLassoCV) to obtain proportion
matrix.
NOTE: This returns precision_ proportions, not cov, prec estimates, so we
Expand All @@ -223,15 +223,15 @@ def model_average(X, penalization):
"""
n_trials = 100
print("ModelAverage with:")
print(" estimator: QuicGraphLasso (default)")
print(" estimator: QuicGraphicalLasso (default)")
print(" n_trials: {}".format(n_trials))
print(" penalization: {}".format(penalization))

# if penalization is random, first find a decent scalar lam_ to build
# random perturbation matrix around. lam doesn't matter for fully-random.
lam = 0.5
if penalization == "random":
cv_model = QuicGraphLassoCV(
cv_model = QuicGraphicalLassoCV(
cv=2, n_refinements=6, n_jobs=1, init_method="cov", score_metric=metric
)
cv_model.fit(X)
Expand All @@ -247,14 +247,14 @@ def model_average(X, penalization):


def adaptive_model_average(X, penalization, method):
"""Run ModelAverage in default mode (QuicGraphLassoCV) to obtain proportion
"""Run ModelAverage in default mode (QuicGraphicalLassoCV) to obtain proportion
matrix.
NOTE: Only method = 'binary' really makes sense in this case.
"""
n_trials = 100
print("Adaptive ModelAverage with:")
print(" estimator: QuicGraphLasso (default)")
print(" estimator: QuicGraphicalLasso (default)")
print(" n_trials: {}".format(n_trials))
print(" penalization: {}".format(penalization))
print(" adaptive-method: {}".format(method))
Expand All @@ -263,14 +263,14 @@ def adaptive_model_average(X, penalization, method):
# random perturbation matrix around. lam doesn't matter for fully-random.
lam = 0.5
if penalization == "random":
cv_model = QuicGraphLassoCV(
cv_model = QuicGraphicalLassoCV(
cv=2, n_refinements=6, n_jobs=1, init_method="cov", score_metric=metric
)
cv_model.fit(X)
lam = cv_model.lam_
print(" lam: {}".format(lam))

model = AdaptiveGraphLasso(
model = AdaptiveGraphicalLasso(
estimator=ModelAverage(
n_trials=n_trials, penalization=penalization, lam=lam, n_jobs=1
),
Expand Down Expand Up @@ -384,11 +384,11 @@ def _count_support_diff(m, m_hat):
print(" frobenius error: {}".format(error))
print("")

# QuicGraphLasso + GridSearchCV
# QuicGraphicalLasso + GridSearchCV
params = [
("QuicGraphLasso GSCV : ll", "log_likelihood"),
("QuicGraphLasso GSCV : kl", "kl"),
("QuicGraphLasso GSCV : fro", "frobenius"),
("QuicGraphicalLasso GSCV : ll", "log_likelihood"),
("QuicGraphicalLasso GSCV : kl", "kl"),
("QuicGraphicalLasso GSCV : fro", "frobenius"),
]
for name, metric in params:
start_time = time.time()
Expand All @@ -403,11 +403,11 @@ def _count_support_diff(m, m_hat):
print(" frobenius error: {}".format(error))
print("")

# QuicGraphLassoCV
# QuicGraphicalLassoCV
params = [
("QuicGraphLassoCV : ll", "log_likelihood"),
("QuicGraphLassoCV : kl", "kl"),
("QuicGraphLassoCV : fro", "frobenius"),
("QuicGraphicalLassoCV : ll", "log_likelihood"),
("QuicGraphicalLassoCV : kl", "kl"),
("QuicGraphicalLassoCV : fro", "frobenius"),
]
for name, metric in params:
start_time = time.time()
Expand All @@ -422,11 +422,11 @@ def _count_support_diff(m, m_hat):
print(" frobenius error: {}".format(error))
print("")

# QuicGraphLassoEBIC
# QuicGraphicalLassoEBIC
params = [
("QuicGraphLassoEBIC : BIC", 0),
("QuicGraphLassoEBIC : g=0.01", 0.01),
("QuicGraphLassoEBIC : g=0.1", 0.1),
("QuicGraphicalLassoEBIC : BIC", 0),
("QuicGraphicalLassoEBIC : g=0.01", 0.01),
("QuicGraphicalLassoEBIC : g=0.1", 0.1),
]
for name, gamma in params:
start_time = time.time()
Expand Down Expand Up @@ -458,14 +458,14 @@ def _count_support_diff(m, m_hat):
results.append([name, "", supp_diff, ctime, lam])
print("")

# Adaptive QuicGraphLassoCV and QuicGraphLassoEBIC
# Adaptive QuicGraphicalLassoCV and QuicGraphicalLassoEBIC
params = [
("Adaptive CV : binary", "QuicGraphLassoCV", "binary"),
("Adaptive CV : inv", "QuicGraphLassoCV", "inverse"),
("Adaptive CV : inv**2", "QuicGraphLassoCV", "inverse_squared"),
("Adaptive BIC : binary", "QuicGraphLassoEBIC", "binary"),
("Adaptive BIC : inv", "QuicGraphLassoEBIC", "inverse"),
("Adaptive BIC : inv**2", "QuicGraphLassoEBIC", "inverse_squared"),
("Adaptive CV : binary", "QuicGraphicalLassoCV", "binary"),
("Adaptive CV : inv", "QuicGraphicalLassoCV", "inverse"),
("Adaptive CV : inv**2", "QuicGraphicalLassoCV", "inverse_squared"),
("Adaptive BIC : binary", "QuicGraphicalLassoEBIC", "binary"),
("Adaptive BIC : inv", "QuicGraphicalLassoEBIC", "inverse"),
("Adaptive BIC : inv**2", "QuicGraphicalLassoEBIC", "inverse_squared"),
]
for name, model_selector, method in params:
start_time = time.time()
Expand Down
Loading

0 comments on commit 8f0a6ab

Please sign in to comment.