From 8f0a6ab12108c7e84d32ac1ba83fc70ecffbe893 Mon Sep 17 00:00:00 2001 From: Jaska Date: Sat, 8 Sep 2018 18:46:21 -0700 Subject: [PATCH] Update naming convention to GraphicalLassoX, GraphLassoX still availabile but deprecated. --- examples/convergence_comparison.py | 2 +- examples/estimator_suite.py | 98 +++++++++---------- examples/estimator_suite_spark.py | 84 ++++++++-------- examples/plot_functional_brain_networks.py | 41 ++++---- examples/trace_plot_example.py | 6 +- inverse_covariance/__init__.py | 16 ++- inverse_covariance/adaptive_graph_lasso.py | 32 +++--- inverse_covariance/model_average.py | 12 +-- inverse_covariance/quic_graph_lasso.py | 41 ++++++-- .../tests/adaptive_graph_lasso_test.py | 24 +++-- inverse_covariance/tests/common_test.py | 24 ++--- .../tests/model_average_test.py | 10 +- .../tests/quic_graph_lasso_test.py | 24 ++--- 13 files changed, 232 insertions(+), 182 deletions(-) diff --git a/examples/convergence_comparison.py b/examples/convergence_comparison.py index ddd0941..9f027d8 100644 --- a/examples/convergence_comparison.py +++ b/examples/convergence_comparison.py @@ -2,7 +2,7 @@ Convergence Failure of Glasso ============================= -Demonstration of cases where GraphLasso fails to converge and quic succeeds. +Demonstration of cases where graph_lasso fails to converge and quic succeeds. "The graphical lasso: New Insights and alternatives", by Mazumder & Hastie 2012. https://web.stanford.edu/~hastie/Papers/glassoinsights.pdf diff --git a/examples/estimator_suite.py b/examples/estimator_suite.py index dd3c14e..93851c0 100644 --- a/examples/estimator_suite.py +++ b/examples/estimator_suite.py @@ -22,10 +22,10 @@ sys.path.append("..") sys.path.append("../inverse_covariance") from inverse_covariance import ( - QuicGraphLasso, - QuicGraphLassoCV, - QuicGraphLassoEBIC, - AdaptiveGraphLasso, + QuicGraphicalLasso, + QuicGraphicalLassoCV, + QuicGraphicalLassoEBIC, + AdaptiveGraphicalLasso, ModelAverage, ) @@ -95,19 +95,19 @@ def show_results(covs, precs): def quic_graph_lasso(X, num_folds, metric): - """Run QuicGraphLasso with mode='default' and use standard scikit + """Run QuicGraphicalLasso with mode='default' and use standard scikit GridSearchCV to find the best lambda. Primarily demonstrates compatibility with existing scikit tooling. """ - print("QuicGraphLasso + GridSearchCV with:") + print("QuicGraphicalLasso + GridSearchCV with:") print(" metric: {}".format(metric)) search_grid = { "lam": np.logspace(np.log10(0.01), np.log10(1.0), num=100, endpoint=True), "init_method": ["cov"], "score_metric": [metric], } - model = GridSearchCV(QuicGraphLasso(), search_grid, cv=num_folds, refit=True) + model = GridSearchCV(QuicGraphicalLasso(), search_grid, cv=num_folds, refit=True) model.fit(X) bmodel = model.best_estimator_ print(" len(cv_lams): {}".format(len(search_grid["lam"]))) @@ -118,14 +118,14 @@ def quic_graph_lasso(X, num_folds, metric): def quic_graph_lasso_cv(X, metric): - """Run QuicGraphLassoCV on data with metric of choice. + """Run QuicGraphicalLassoCV on data with metric of choice. Compare results with GridSearchCV + quic_graph_lasso. The number of lambdas tested should be much lower with similar final lam_ selected. """ - print("QuicGraphLassoCV with:") + print("QuicGraphicalLassoCV with:") print(" metric: {}".format(metric)) - model = QuicGraphLassoCV( + model = QuicGraphicalLassoCV( cv=2, # cant deal w more folds at small size n_refinements=6, n_jobs=1, @@ -140,7 +140,7 @@ def quic_graph_lasso_cv(X, metric): def adaptive_graph_lasso(X, model_selector, method): - """Run QuicGraphLassoCV or QuicGraphLassoEBIC as a two step adaptive fit + """Run QuicGraphicalLassoCV or QuicGraphicalLassoEBIC as a two step adaptive fit with method of choice (currently: 'binary', 'inverse', 'inverse_squared'). Compare the support and values to the model-selection estimator. @@ -148,10 +148,10 @@ def adaptive_graph_lasso(X, model_selector, method): metric = "log_likelihood" print("Adaptive {} with:".format(model_selector)) print(" adaptive-method: {}".format(method)) - if model_selector == "QuicGraphLassoCV": + if model_selector == "QuicGraphicalLassoCV": print(" metric: {}".format(metric)) - model = AdaptiveGraphLasso( - estimator=QuicGraphLassoCV( + model = AdaptiveGraphicalLasso( + estimator=QuicGraphicalLassoCV( cv=2, # cant deal w more folds at small size n_refinements=6, init_method="cov", @@ -160,8 +160,8 @@ def adaptive_graph_lasso(X, model_selector, method): method=method, ) - elif model_selector == "QuicGraphLassoEBIC": - model = AdaptiveGraphLasso(estimator=QuicGraphLassoEBIC(), method=method) + elif model_selector == "QuicGraphicalLassoEBIC": + model = AdaptiveGraphicalLasso(estimator=QuicGraphicalLassoEBIC(), method=method) model.fit(X) lam_norm_ = np.linalg.norm(model.estimator_.lam_) print(" ||lam_||_2: {}".format(lam_norm_)) @@ -169,16 +169,16 @@ def adaptive_graph_lasso(X, model_selector, method): def quic_graph_lasso_ebic_manual(X, gamma=0): - """Run QuicGraphLasso with mode='path' and gamma; use EBIC criteria for model + """Run QuicGraphicalLasso with mode='path' and gamma; use EBIC criteria for model selection. The EBIC criteria is built into InverseCovarianceEstimator base class so we demonstrate those utilities here. """ - print("QuicGraphLasso (manual EBIC) with:") + print("QuicGraphicalLasso (manual EBIC) with:") print(" mode: path") print(" gamma: {}".format(gamma)) - model = QuicGraphLasso( + model = QuicGraphicalLasso( lam=1.0, mode="path", init_method="cov", @@ -197,15 +197,15 @@ def quic_graph_lasso_ebic_manual(X, gamma=0): def quic_graph_lasso_ebic(X, gamma=0): - """Run QuicGraphLassoEBIC with gamma. + """Run QuicGraphicalLassoEBIC with gamma. - QuicGraphLassoEBIC is a convenience class. Results should be identical to + QuicGraphicalLassoEBIC is a convenience class. Results should be identical to those obtained via quic_graph_lasso_ebic_manual. """ - print("QuicGraphLassoEBIC with:") + print("QuicGraphicalLassoEBIC with:") print(" mode: path") print(" gamma: {}".format(gamma)) - model = QuicGraphLassoEBIC(lam=1.0, init_method="cov", gamma=gamma) + model = QuicGraphicalLassoEBIC(lam=1.0, init_method="cov", gamma=gamma) model.fit(X) print(" len(path lams): {}".format(len(model.path_))) print(" lam_scale_: {}".format(model.lam_scale_)) @@ -214,7 +214,7 @@ def quic_graph_lasso_ebic(X, gamma=0): def model_average(X, penalization): - """Run ModelAverage in default mode (QuicGraphLassoCV) to obtain proportion + """Run ModelAverage in default mode (QuicGraphicalLassoCV) to obtain proportion matrix. NOTE: This returns precision_ proportions, not cov, prec estimates, so we @@ -223,7 +223,7 @@ def model_average(X, penalization): """ n_trials = 100 print("ModelAverage with:") - print(" estimator: QuicGraphLasso (default)") + print(" estimator: QuicGraphicalLasso (default)") print(" n_trials: {}".format(n_trials)) print(" penalization: {}".format(penalization)) @@ -231,7 +231,7 @@ def model_average(X, penalization): # random perturbation matrix around. lam doesn't matter for fully-random. lam = 0.5 if penalization == "random": - cv_model = QuicGraphLassoCV( + cv_model = QuicGraphicalLassoCV( cv=2, n_refinements=6, n_jobs=1, init_method="cov", score_metric=metric ) cv_model.fit(X) @@ -247,14 +247,14 @@ def model_average(X, penalization): def adaptive_model_average(X, penalization, method): - """Run ModelAverage in default mode (QuicGraphLassoCV) to obtain proportion + """Run ModelAverage in default mode (QuicGraphicalLassoCV) to obtain proportion matrix. NOTE: Only method = 'binary' really makes sense in this case. """ n_trials = 100 print("Adaptive ModelAverage with:") - print(" estimator: QuicGraphLasso (default)") + print(" estimator: QuicGraphicalLasso (default)") print(" n_trials: {}".format(n_trials)) print(" penalization: {}".format(penalization)) print(" adaptive-method: {}".format(method)) @@ -263,14 +263,14 @@ def adaptive_model_average(X, penalization, method): # random perturbation matrix around. lam doesn't matter for fully-random. lam = 0.5 if penalization == "random": - cv_model = QuicGraphLassoCV( + cv_model = QuicGraphicalLassoCV( cv=2, n_refinements=6, n_jobs=1, init_method="cov", score_metric=metric ) cv_model.fit(X) lam = cv_model.lam_ print(" lam: {}".format(lam)) - model = AdaptiveGraphLasso( + model = AdaptiveGraphicalLasso( estimator=ModelAverage( n_trials=n_trials, penalization=penalization, lam=lam, n_jobs=1 ), @@ -384,11 +384,11 @@ def _count_support_diff(m, m_hat): print(" frobenius error: {}".format(error)) print("") - # QuicGraphLasso + GridSearchCV + # QuicGraphicalLasso + GridSearchCV params = [ - ("QuicGraphLasso GSCV : ll", "log_likelihood"), - ("QuicGraphLasso GSCV : kl", "kl"), - ("QuicGraphLasso GSCV : fro", "frobenius"), + ("QuicGraphicalLasso GSCV : ll", "log_likelihood"), + ("QuicGraphicalLasso GSCV : kl", "kl"), + ("QuicGraphicalLasso GSCV : fro", "frobenius"), ] for name, metric in params: start_time = time.time() @@ -403,11 +403,11 @@ def _count_support_diff(m, m_hat): print(" frobenius error: {}".format(error)) print("") - # QuicGraphLassoCV + # QuicGraphicalLassoCV params = [ - ("QuicGraphLassoCV : ll", "log_likelihood"), - ("QuicGraphLassoCV : kl", "kl"), - ("QuicGraphLassoCV : fro", "frobenius"), + ("QuicGraphicalLassoCV : ll", "log_likelihood"), + ("QuicGraphicalLassoCV : kl", "kl"), + ("QuicGraphicalLassoCV : fro", "frobenius"), ] for name, metric in params: start_time = time.time() @@ -422,11 +422,11 @@ def _count_support_diff(m, m_hat): print(" frobenius error: {}".format(error)) print("") - # QuicGraphLassoEBIC + # QuicGraphicalLassoEBIC params = [ - ("QuicGraphLassoEBIC : BIC", 0), - ("QuicGraphLassoEBIC : g=0.01", 0.01), - ("QuicGraphLassoEBIC : g=0.1", 0.1), + ("QuicGraphicalLassoEBIC : BIC", 0), + ("QuicGraphicalLassoEBIC : g=0.01", 0.01), + ("QuicGraphicalLassoEBIC : g=0.1", 0.1), ] for name, gamma in params: start_time = time.time() @@ -458,14 +458,14 @@ def _count_support_diff(m, m_hat): results.append([name, "", supp_diff, ctime, lam]) print("") - # Adaptive QuicGraphLassoCV and QuicGraphLassoEBIC + # Adaptive QuicGraphicalLassoCV and QuicGraphicalLassoEBIC params = [ - ("Adaptive CV : binary", "QuicGraphLassoCV", "binary"), - ("Adaptive CV : inv", "QuicGraphLassoCV", "inverse"), - ("Adaptive CV : inv**2", "QuicGraphLassoCV", "inverse_squared"), - ("Adaptive BIC : binary", "QuicGraphLassoEBIC", "binary"), - ("Adaptive BIC : inv", "QuicGraphLassoEBIC", "inverse"), - ("Adaptive BIC : inv**2", "QuicGraphLassoEBIC", "inverse_squared"), + ("Adaptive CV : binary", "QuicGraphicalLassoCV", "binary"), + ("Adaptive CV : inv", "QuicGraphicalLassoCV", "inverse"), + ("Adaptive CV : inv**2", "QuicGraphicalLassoCV", "inverse_squared"), + ("Adaptive BIC : binary", "QuicGraphicalLassoEBIC", "binary"), + ("Adaptive BIC : inv", "QuicGraphicalLassoEBIC", "inverse"), + ("Adaptive BIC : inv**2", "QuicGraphicalLassoEBIC", "inverse_squared"), ] for name, model_selector, method in params: start_time = time.time() diff --git a/examples/estimator_suite_spark.py b/examples/estimator_suite_spark.py index ca66f01..e130afc 100644 --- a/examples/estimator_suite_spark.py +++ b/examples/estimator_suite_spark.py @@ -34,10 +34,10 @@ sys.path.append("..") sys.path.append("../inverse_covariance") from inverse_covariance import ( - QuicGraphLasso, - QuicGraphLassoCV, - QuicGraphLassoEBIC, - AdaptiveGraphLasso, + QuicGraphicalLasso, + QuicGraphicalLassoCV, + QuicGraphicalLassoEBIC, + AdaptiveGraphicalLasso, ModelAverage, ) from inverse_covariance.profiling import LatticeGraph @@ -54,14 +54,14 @@ def make_data(n_samples, n_features): def quic_graph_lasso_cv(X, metric): - """Run QuicGraphLassoCV on data with metric of choice. + """Run QuicGraphicalLassoCV on data with metric of choice. Compare results with GridSearchCV + quic_graph_lasso. The number of lambdas tested should be much lower with similar final lam_ selected. """ - print("QuicGraphLassoCV with:") + print("QuicGraphicalLassoCV with:") print(" metric: {}".format(metric)) - model = QuicGraphLassoCV( + model = QuicGraphicalLassoCV( cv=2, # cant deal w more folds at small size n_refinements=6, sc=spark.sparkContext, # NOQA @@ -76,7 +76,7 @@ def quic_graph_lasso_cv(X, metric): def adaptive_graph_lasso(X, model_selector, method): - """Run QuicGraphLassoCV or QuicGraphLassoEBIC as a two step adaptive fit + """Run QuicGraphicalLassoCV or QuicGraphicalLassoEBIC as a two step adaptive fit with method of choice (currently: 'binary', 'inverse', 'inverse_squared'). Compare the support and values to the model-selection estimator. @@ -84,10 +84,10 @@ def adaptive_graph_lasso(X, model_selector, method): metric = "log_likelihood" print("Adaptive {} with:".format(model_selector)) print(" adaptive-method: {}".format(method)) - if model_selector == "QuicGraphLassoCV": + if model_selector == "QuicGraphicalLassoCV": print(" metric: {}".format(metric)) - model = AdaptiveGraphLasso( - estimator=QuicGraphLassoCV( + model = AdaptiveGraphicalLasso( + estimator=QuicGraphicalLassoCV( cv=2, # cant deal w more folds at small size n_refinements=6, init_method="cov", @@ -97,8 +97,8 @@ def adaptive_graph_lasso(X, model_selector, method): method=method, ) - elif model_selector == "QuicGraphLassoEBIC": - model = AdaptiveGraphLasso(estimator=QuicGraphLassoEBIC(), method=method) + elif model_selector == "QuicGraphicalLassoEBIC": + model = AdaptiveGraphicalLasso(estimator=QuicGraphicalLassoEBIC(), method=method) model.fit(X) lam_norm_ = np.linalg.norm(model.estimator_.lam_) print(" ||lam_||_2: {}".format(lam_norm_)) @@ -106,16 +106,16 @@ def adaptive_graph_lasso(X, model_selector, method): def quic_graph_lasso_ebic_manual(X, gamma=0): - """Run QuicGraphLasso with mode='path' and gamma; use EBIC criteria for model + """Run QuicGraphicalLasso with mode='path' and gamma; use EBIC criteria for model selection. The EBIC criteria is built into InverseCovarianceEstimator base class so we demonstrate those utilities here. """ - print("QuicGraphLasso (manual EBIC) with:") + print("QuicGraphicalLasso (manual EBIC) with:") print(" mode: path") print(" gamma: {}".format(gamma)) - model = QuicGraphLasso( + model = QuicGraphicalLasso( lam=1.0, mode="path", init_method="cov", @@ -134,15 +134,15 @@ def quic_graph_lasso_ebic_manual(X, gamma=0): def quic_graph_lasso_ebic(X, gamma=0): - """Run QuicGraphLassoEBIC with gamma. + """Run QuicGraphicalLassoEBIC with gamma. - QuicGraphLassoEBIC is a convenience class. Results should be identical to + QuicGraphicalLassoEBIC is a convenience class. Results should be identical to those obtained via quic_graph_lasso_ebic_manual. """ - print("QuicGraphLassoEBIC with:") + print("QuicGraphicalLassoEBIC with:") print(" mode: path") print(" gamma: {}".format(gamma)) - model = QuicGraphLassoEBIC(lam=1.0, init_method="cov", gamma=gamma) + model = QuicGraphicalLassoEBIC(lam=1.0, init_method="cov", gamma=gamma) model.fit(X) print(" len(path lams): {}".format(len(model.path_))) print(" lam_scale_: {}".format(model.lam_scale_)) @@ -151,7 +151,7 @@ def quic_graph_lasso_ebic(X, gamma=0): def model_average(X, penalization): - """Run ModelAverage in default mode (QuicGraphLassoCV) to obtain proportion + """Run ModelAverage in default mode (QuicGraphicalLassoCV) to obtain proportion matrix. NOTE: This returns precision_ proportions, not cov, prec estimates, so we @@ -160,7 +160,7 @@ def model_average(X, penalization): """ n_trials = 100 print("ModelAverage with:") - print(" estimator: QuicGraphLasso (default)") + print(" estimator: QuicGraphicalLasso (default)") print(" n_trials: {}".format(n_trials)) print(" penalization: {}".format(penalization)) @@ -168,7 +168,7 @@ def model_average(X, penalization): # random perturbation matrix around. lam doesn't matter for fully-random. lam = 0.5 if penalization == "random": - cv_model = QuicGraphLassoCV( + cv_model = QuicGraphicalLassoCV( cv=2, n_refinements=6, sc=spark.sparkContext, # NOQA @@ -188,14 +188,14 @@ def model_average(X, penalization): def adaptive_model_average(X, penalization, method): - """Run ModelAverage in default mode (QuicGraphLassoCV) to obtain proportion + """Run ModelAverage in default mode (QuicGraphicalLassoCV) to obtain proportion matrix. NOTE: Only method = 'binary' really makes sense in this case. """ n_trials = 100 print("Adaptive ModelAverage with:") - print(" estimator: QuicGraphLasso (default)") + print(" estimator: QuicGraphicalLasso (default)") print(" n_trials: {}".format(n_trials)) print(" penalization: {}".format(penalization)) print(" adaptive-method: {}".format(method)) @@ -204,7 +204,7 @@ def adaptive_model_average(X, penalization, method): # random perturbation matrix around. lam doesn't matter for fully-random. lam = 0.5 if penalization == "random": - cv_model = QuicGraphLassoCV( + cv_model = QuicGraphicalLassoCV( cv=2, n_refinements=6, sc=spark.sparkContext, # NOQA @@ -215,7 +215,7 @@ def adaptive_model_average(X, penalization, method): lam = cv_model.lam_ print(" lam: {}".format(lam)) - model = AdaptiveGraphLasso( + model = AdaptiveGraphicalLasso( estimator=ModelAverage( n_trials=n_trials, penalization=penalization, lam=lam, sc=spark.sparkContext ), # NOQA @@ -305,11 +305,11 @@ def _count_support_diff(m, m_hat): print(" frobenius error: {}".format(error)) print("") - # QuicGraphLassoCV + # QuicGraphicalLassoCV params = [ - ("QuicGraphLassoCV : ll", "log_likelihood"), - ("QuicGraphLassoCV : kl", "kl"), - ("QuicGraphLassoCV : fro", "frobenius"), + ("QuicGraphicalLassoCV : ll", "log_likelihood"), + ("QuicGraphicalLassoCV : kl", "kl"), + ("QuicGraphicalLassoCV : fro", "frobenius"), ] for name, metric in params: start_time = time.time() @@ -324,11 +324,11 @@ def _count_support_diff(m, m_hat): print(" frobenius error: {}".format(error)) print("") - # QuicGraphLassoEBIC + # QuicGraphicalLassoEBIC params = [ - ("QuicGraphLassoEBIC : BIC", 0), - ("QuicGraphLassoEBIC : g=0.01", 0.01), - ("QuicGraphLassoEBIC : g=0.1", 0.1), + ("QuicGraphicalLassoEBIC : BIC", 0), + ("QuicGraphicalLassoEBIC : g=0.01", 0.01), + ("QuicGraphicalLassoEBIC : g=0.1", 0.1), ] for name, gamma in params: start_time = time.time() @@ -360,14 +360,14 @@ def _count_support_diff(m, m_hat): results.append([name, "", supp_diff, ctime, lam]) print("") - # Adaptive QuicGraphLassoCV and QuicGraphLassoEBIC + # Adaptive QuicGraphicalLassoCV and QuicGraphicalLassoEBIC params = [ - ("Adaptive CV : binary", "QuicGraphLassoCV", "binary"), - ("Adaptive CV : inv", "QuicGraphLassoCV", "inverse"), - ("Adaptive CV : inv**2", "QuicGraphLassoCV", "inverse_squared"), - ("Adaptive BIC : binary", "QuicGraphLassoEBIC", "binary"), - ("Adaptive BIC : inv", "QuicGraphLassoEBIC", "inverse"), - ("Adaptive BIC : inv**2", "QuicGraphLassoEBIC", "inverse_squared"), + ("Adaptive CV : binary", "QuicGraphicalLassoCV", "binary"), + ("Adaptive CV : inv", "QuicGraphicalLassoCV", "inverse"), + ("Adaptive CV : inv**2", "QuicGraphicalLassoCV", "inverse_squared"), + ("Adaptive BIC : binary", "QuicGraphicalLassoEBIC", "binary"), + ("Adaptive BIC : inv", "QuicGraphicalLassoEBIC", "inverse"), + ("Adaptive BIC : inv**2", "QuicGraphicalLassoEBIC", "inverse_squared"), ] for name, model_selector, method in params: start_time = time.time() diff --git a/examples/plot_functional_brain_networks.py b/examples/plot_functional_brain_networks.py index 4d7ab58..1b5ae18 100644 --- a/examples/plot_functional_brain_networks.py +++ b/examples/plot_functional_brain_networks.py @@ -24,10 +24,10 @@ sys.path.append("..") sys.path.append("../inverse_covariance") from inverse_covariance import ( - QuicGraphLasso, - QuicGraphLassoCV, - QuicGraphLassoEBIC, - AdaptiveGraphLasso, + QuicGraphicalLasso, + QuicGraphicalLassoCV, + QuicGraphicalLassoEBIC, + AdaptiveGraphicalLasso, ) plt.ion() @@ -64,27 +64,28 @@ ############################################################################### # Extract and plot sparse inverse covariance -estimator_type = "QuicGraphLasso" +estimator_type = "QuicGraphicalLasso" -if estimator_type == "QuicGraphLasso": - # Compute the sparse inverse covariance via QuicGraphLasso - estimator = QuicGraphLasso(init_method="cov", lam=0.5, mode="default", verbose=1) +if estimator_type == "QuicGraphicalLasso": + # Compute the sparse inverse covariance via QuicGraphicalLasso + estimator = QuicGraphicalLasso(init_method="cov", lam=0.5, mode="default", verbose=1) + estimator.fit(timeseries) -elif estimator_type == "QuicGraphLassoCV": - # Compute the sparse inverse covariance via QuicGraphLassoCV - estimator = QuicGraphLassoCV(init_method="cov", verbose=1) +elif estimator_type == "QuicGraphicalLassoCV": + # Compute the sparse inverse covariance via QuicGraphicalLassoCV + estimator = QuicGraphicalLassoCV(init_method="cov", verbose=1) estimator.fit(timeseries) -elif estimator_type == "QuicGraphLassoEBIC": - # Compute the sparse inverse covariance via QuicGraphLassoEBIC - estimator = QuicGraphLassoEBIC(init_method="cov", verbose=1) +elif estimator_type == "QuicGraphicalLassoEBIC": + # Compute the sparse inverse covariance via QuicGraphicalLassoEBIC + estimator = QuicGraphicalLassoEBIC(init_method="cov", verbose=1) estimator.fit(timeseries) -elif estimator_type == "AdaptiveQuicGraphLasso": +elif estimator_type == "AdaptiveQuicGraphicalLasso": # Compute the sparse inverse covariance via - # AdaptiveGraphLasso + QuicGraphLassoEBIC + method='binary' - model = AdaptiveGraphLasso( - estimator=QuicGraphLassoEBIC(init_method="cov"), method="binary" + # AdaptiveGraphicalLasso + QuicGraphicalLassoEBIC + method='binary' + model = AdaptiveGraphicalLasso( + estimator=QuicGraphicalLassoEBIC(init_method="cov"), method="binary" ) model.fit(timeseries) estimator = model.estimator_ @@ -106,4 +107,6 @@ node_size=20, ) plotting.show() -raw_input() + +eval(input("Press any key to exit..")) + diff --git a/examples/trace_plot_example.py b/examples/trace_plot_example.py index 8f0ac14..0c68fee 100644 --- a/examples/trace_plot_example.py +++ b/examples/trace_plot_example.py @@ -12,7 +12,7 @@ from sklearn.datasets import make_sparse_spd_matrix sys.path.append("..") -from inverse_covariance import QuicGraphLasso +from inverse_covariance import QuicGraphicalLasso from inverse_covariance.plot_util import trace_plot from inverse_covariance.profiling import LatticeGraph @@ -46,14 +46,14 @@ def make_data_banded(n_samples, n_features): def show_quic_coefficient_trace(X): path = np.logspace(np.log10(0.01), np.log10(1.0), num=50, endpoint=True)[::-1] - estimator = QuicGraphLasso(lam=1.0, path=path, mode="path") + estimator = QuicGraphicalLasso(lam=1.0, path=path, mode="path") estimator.fit(X) trace_plot(estimator.precision_, estimator.path_, n_edges=20) def show_quic_coefficient_trace_truth(X, truth): path = np.logspace(np.log10(0.01), np.log10(1.0), num=50, endpoint=True)[::-1] - estimator = QuicGraphLasso(lam=1.0, path=path, mode="path") + estimator = QuicGraphicalLasso(lam=1.0, path=path, mode="path") estimator.fit(X) trace_plot(estimator.precision_, estimator.path_, n_edges=6, ground_truth=truth) diff --git a/inverse_covariance/__init__.py b/inverse_covariance/__init__.py index d00a29c..7e9b48b 100644 --- a/inverse_covariance/__init__.py +++ b/inverse_covariance/__init__.py @@ -1,10 +1,18 @@ from __future__ import absolute_import from .inverse_covariance import InverseCovarianceEstimator -from .quic_graph_lasso import quic, QuicGraphLasso, QuicGraphLassoCV, QuicGraphLassoEBIC +from .quic_graph_lasso import ( + quic, + QuicGraphLasso, + QuicGraphLassoCV, + QuicGraphLassoEBIC, + QuicGraphicalLasso, + QuicGraphicalLassoCV, + QuicGraphicalLassoEBIC, +) from .metrics import log_likelihood, kl_loss, quadratic_loss, ebic from .rank_correlation import spearman_correlation, kendalltau_correlation from .model_average import ModelAverage -from .adaptive_graph_lasso import AdaptiveGraphLasso +from .adaptive_graph_lasso import AdaptiveGraphLasso, AdaptiveGraphicalLasso from .cross_validation import RepeatedKFold __all__ = [ @@ -13,6 +21,9 @@ "QuicGraphLasso", "QuicGraphLassoCV", "QuicGraphLassoEBIC", + "QuicGraphicalLasso", + "QuicGraphicalLassoCV", + "QuicGraphicalLassoEBIC", "log_likelihood", "kl_loss", "quadratic_loss", @@ -21,5 +32,6 @@ "kendalltau_correlation", "ModelAverage", "AdaptiveGraphLasso", + "AdaptiveGraphicalLasso", "RepeatedKFold", ] diff --git a/inverse_covariance/adaptive_graph_lasso.py b/inverse_covariance/adaptive_graph_lasso.py index 1211d5d..71721df 100644 --- a/inverse_covariance/adaptive_graph_lasso.py +++ b/inverse_covariance/adaptive_graph_lasso.py @@ -1,13 +1,13 @@ from __future__ import absolute_import import numpy as np -from sklearn.utils import check_array, as_float_array +from sklearn.utils import check_array, as_float_array, deprecated from sklearn.base import BaseEstimator -from . import QuicGraphLasso, QuicGraphLassoCV, InverseCovarianceEstimator +from . import QuicGraphicalLasso, QuicGraphicalLassoCV, InverseCovarianceEstimator -class AdaptiveGraphLasso(BaseEstimator): +class AdaptiveGraphicalLasso(BaseEstimator): """ Two-stage adaptive estimator. @@ -16,7 +16,7 @@ class AdaptiveGraphLasso(BaseEstimator): output. b) The resulting coefficients are used to generate adaptive weights and - the QuicGraphLasso is refit with these weights. + the QuicGraphicalLasso is refit with these weights. See: "High dimensional covariance estimation based on Gaussian graphical @@ -28,8 +28,8 @@ class AdaptiveGraphLasso(BaseEstimator): Parameters ----------- - estimator : GraphLasso instance with model selection - (default=QuicGraphLassoCV()) + estimator : GraphicalLasso instance with model selection + (default=QuicGraphicalLassoCV()) After being fit, estimator.precision_ must either be a matrix. method : one of 'binary', 'inverse_squared', 'inverse' (default='binary') @@ -40,7 +40,7 @@ class AdaptiveGraphLasso(BaseEstimator): Attributes ---------- - estimator_ : QuicGraphLasso instance + estimator_ : QuicGraphicalLasso instance The final estimator refit with adaptive weights. lam_ : 2D ndarray, shape (n_features, n_features) @@ -85,8 +85,8 @@ def fit(self, X, y=None): X : ndarray, shape (n_samples, n_features) Data from which to compute the proportion matrix. """ - # default to QuicGraphLassoCV - estimator = self.estimator or QuicGraphLassoCV() + # default to QuicGraphicalLassoCV + estimator = self.estimator or QuicGraphicalLassoCV() self.lam_ = None self.estimator_ = None @@ -104,7 +104,7 @@ def fit(self, X, y=None): self.lam_ = self._binary_weights(estimator) # perform second step adaptive estimate - self.estimator_ = QuicGraphLasso( + self.estimator_ = QuicGraphicalLasso( lam=self.lam_ * estimator.lam_, mode="default", init_method="cov", @@ -116,7 +116,7 @@ def fit(self, X, y=None): self.lam_ = self._inverse_squared_weights(estimator) # perform second step adaptive estimate - self.estimator_ = QuicGraphLassoCV( + self.estimator_ = QuicGraphicalLassoCV( lam=self.lam_ * self.estimator.lam_, auto_scale=False ) self.estimator_.fit(X) @@ -125,7 +125,7 @@ def fit(self, X, y=None): self.lam_ = self._inverse_weights(estimator) # perform second step adaptive estimate - self.estimator_ = QuicGraphLassoCV( + self.estimator_ = QuicGraphicalLassoCV( lam=self.lam_ * estimator.lam_, auto_scale=False ) self.estimator_.fit(X) @@ -140,3 +140,11 @@ def fit(self, X, y=None): self.is_fitted_ = True return self + + +@deprecated( + "The class AdaptiveGraphLasso is deprecated " + "Use class AdaptiveGraphicalLasso instead." +) +class AdaptiveGraphLasso(AdaptiveGraphicalLasso): + pass diff --git a/inverse_covariance/model_average.py b/inverse_covariance/model_average.py index e083cbe..4f04504 100644 --- a/inverse_covariance/model_average.py +++ b/inverse_covariance/model_average.py @@ -7,7 +7,7 @@ from functools import partial from .inverse_covariance import _init_coefs -from . import QuicGraphLasso +from . import QuicGraphicalLasso def _check_psd(m): @@ -216,7 +216,7 @@ class ModelAverage(BaseEstimator): subsampling: Only the observations will be subsampled, the original penalty supplied in the estimator instance will be used. Use this technique when the estimator does not support - matrix penalization (e.g., sklearn GraphLasso). + matrix penalization (e.g., sklearn GraphicalLasso). random: In addition to randomly subsampling the observations, 'random' applies a randomly-perturbed 'lam' weight matrix. The entries @@ -328,8 +328,8 @@ def fit(self, X, y=None): X : ndarray, shape (n_samples, n_features) Data from which to compute the proportion matrix. """ - # default to QuicGraphLasso - estimator = self.estimator or QuicGraphLasso() + # default to QuicGraphicalLasso + estimator = self.estimator or QuicGraphicalLasso() if self.penalization != "subsampling" and not hasattr( estimator, self.penalty_name @@ -415,14 +415,14 @@ def fit(self, X, y=None): @property def precision_(self): - """Convenience property to make compatible with AdaptiveGraphLasso. + """Convenience property to make compatible with AdaptiveGraphicalLasso. This is not a very good precision estimate. """ return self.support_ @property def covariance_(self): - """Convenience property to make compatible with AdaptiveGraphLasso. + """Convenience property to make compatible with AdaptiveGraphicalLasso. This is not a very good covariance estimate. """ return np.linalg.inv(self.support_) diff --git a/inverse_covariance/quic_graph_lasso.py b/inverse_covariance/quic_graph_lasso.py index e2e04cb..5f95ca9 100644 --- a/inverse_covariance/quic_graph_lasso.py +++ b/inverse_covariance/quic_graph_lasso.py @@ -7,7 +7,7 @@ from functools import partial from sklearn.covariance import EmpiricalCovariance -from sklearn.utils import check_array, as_float_array +from sklearn.utils import check_array, as_float_array, deprecated from sklearn.utils.testing import assert_array_almost_equal from sklearn.externals.joblib import Parallel, delayed from sklearn.model_selection import cross_val_score # NOQA >= 0.18 @@ -165,7 +165,7 @@ def quic( return Theta_out, Sigma_out, opt, cputime, iters, dGap -class QuicGraphLasso(InverseCovarianceEstimator): +class QuicGraphicalLasso(InverseCovarianceEstimator): """ Computes a sparse inverse covariance matrix estimation using quadratic approximation. @@ -304,7 +304,7 @@ def __init__( raise ValueError("path required in path mode.") return - super(QuicGraphLasso, self).__init__( + super(QuicGraphicalLasso, self).__init__( score_metric=score_metric, init_method=init_method, auto_scale=auto_scale ) @@ -376,6 +376,13 @@ def lam_(self): return self.lam_at_index(0) +@deprecated( + "The class QuicGraphLasso is deprecated " "Use class QuicGraphicalLasso instead." +) +class QuicGraphLasso(QuicGraphicalLasso): + pass + + def _quic_path( X, path, @@ -437,11 +444,11 @@ def _quic_path_spark(indexed_params, quic_path, X_bc): return index, result -class QuicGraphLassoCV(InverseCovarianceEstimator): +class QuicGraphicalLassoCV(InverseCovarianceEstimator): """Sparse inverse covariance w/ cross-validated choice of the l1 penalty via quadratic approximation. - This takes advantage of "path" mode in QuicGraphLasso. + This takes advantage of "path" mode in QuicGraphicalLasso. See sklearn.covariance.graph_lasso.GraphLassoCV. Parameters @@ -582,7 +589,7 @@ def __init__( self.verbose = verbose self.backend = backend - super(QuicGraphLassoCV, self).__init__( + super(QuicGraphicalLassoCV, self).__init__( score_metric=score_metric, init_method=init_method, auto_scale=auto_scale ) @@ -794,12 +801,20 @@ def fit(self, X, y=None): return self -class QuicGraphLassoEBIC(InverseCovarianceEstimator): +@deprecated( + "The class QuicGraphLassoCV is deprecated " + "Use class QuicGraphicalLassoCV instead." +) +class QuicGraphLassoCV(QuicGraphicalLassoCV): + pass + + +class QuicGraphicalLassoEBIC(InverseCovarianceEstimator): """ Computes a sparse inverse covariance matrix estimation using quadratic approximation and EBIC model selection. (Convenience Class) - Note: This estimate can be obtained using the more general QuicGraphLasso + Note: This estimate can be obtained using the more general QuicGraphicalLasso estimator and taking advantage of `ebic_select()` and `lambda_at_index()` methods. @@ -901,7 +916,7 @@ def __init__( self.path = path self.gamma = gamma - super(QuicGraphLassoEBIC, self).__init__( + super(QuicGraphicalLassoEBIC, self).__init__( init_method=init_method, score_metric=score_metric, auto_scale=auto_scale ) @@ -974,3 +989,11 @@ def fit(self, X, y=None, **fit_params): self.is_fitted_ = True return self + + +@deprecated( + "The class QuicGraphLassoEBIC is deprecated " + "Use class QuicGraphicalLassoEBIC instead." +) +class QuicGraphLassoEBIC(QuicGraphicalLassoEBIC): + pass diff --git a/inverse_covariance/tests/adaptive_graph_lasso_test.py b/inverse_covariance/tests/adaptive_graph_lasso_test.py index 47e3b98..ada96d6 100644 --- a/inverse_covariance/tests/adaptive_graph_lasso_test.py +++ b/inverse_covariance/tests/adaptive_graph_lasso_test.py @@ -1,17 +1,21 @@ import numpy as np import pytest -from inverse_covariance import QuicGraphLassoEBIC, AdaptiveGraphLasso, QuicGraphLassoCV +from inverse_covariance import ( + QuicGraphicalLassoEBIC, + AdaptiveGraphicalLasso, + QuicGraphicalLassoCV, +) from inverse_covariance.profiling import ClusterGraph -class TestAdaptiveGraphLasso(object): +class TestAdaptiveGraphicalLasso(object): @pytest.mark.parametrize( "params_in", [ ( { - "estimator": QuicGraphLassoCV( + "estimator": QuicGraphicalLassoCV( cv=2, n_refinements=6, init_method="cov", @@ -22,7 +26,7 @@ class TestAdaptiveGraphLasso(object): ), ( { - "estimator": QuicGraphLassoCV( + "estimator": QuicGraphicalLassoCV( cv=2, n_refinements=6, init_method="cov", @@ -33,7 +37,7 @@ class TestAdaptiveGraphLasso(object): ), ( { - "estimator": QuicGraphLassoCV( + "estimator": QuicGraphicalLassoCV( cv=2, n_refinements=6, init_method="cov", @@ -42,12 +46,12 @@ class TestAdaptiveGraphLasso(object): "method": "inverse_squared", } ), - ({"estimator": QuicGraphLassoEBIC(), "method": "binary"}), - ({"estimator": QuicGraphLassoEBIC(), "method": "inverse"}), - ({"estimator": QuicGraphLassoEBIC(), "method": "inverse_squared"}), + ({"estimator": QuicGraphicalLassoEBIC(), "method": "binary"}), + ({"estimator": QuicGraphicalLassoEBIC(), "method": "inverse"}), + ({"estimator": QuicGraphicalLassoEBIC(), "method": "inverse_squared"}), ], ) - def test_integration_adaptive_graph_lasso(self, params_in): + def test_integration_adaptive_graphical_lasso(self, params_in): """ Just tests inputs/outputs (not validity of result). """ @@ -59,7 +63,7 @@ def test_integration_adaptive_graph_lasso(self, params_in): prng = np.random.RandomState(2) X = prng.multivariate_normal(np.zeros(n_features), cov, size=n_samples) - model = AdaptiveGraphLasso(**params_in) + model = AdaptiveGraphicalLasso(**params_in) model.fit(X) assert model.estimator_ is not None assert model.lam_ is not None diff --git a/inverse_covariance/tests/common_test.py b/inverse_covariance/tests/common_test.py index 3adcfd4..aadfd96 100644 --- a/inverse_covariance/tests/common_test.py +++ b/inverse_covariance/tests/common_test.py @@ -1,27 +1,27 @@ from sklearn.utils.estimator_checks import check_estimator from inverse_covariance import ( - QuicGraphLasso, - QuicGraphLassoCV, - QuicGraphLassoEBIC, - AdaptiveGraphLasso, + QuicGraphicalLasso, + QuicGraphicalLassoCV, + QuicGraphicalLassoEBIC, + AdaptiveGraphicalLasso, ModelAverage, ) -def test_quic_graph_lasso(): - return check_estimator(QuicGraphLasso) +def test_quic_graphical_lasso(): + return check_estimator(QuicGraphicalLasso) -def test_quic_graph_lasso_cv(): - return check_estimator(QuicGraphLassoCV) +def test_quic_graphical_lasso_cv(): + return check_estimator(QuicGraphicalLassoCV) -def test_quic_graph_lasso_ebic(): - return check_estimator(QuicGraphLassoEBIC) +def test_quic_graphical_lasso_ebic(): + return check_estimator(QuicGraphicalLassoEBIC) -def test_adaptive_graph_lasso(): - return check_estimator(AdaptiveGraphLasso) +def test_adaptive_graphical_lasso(): + return check_estimator(AdaptiveGraphicalLasso) def test_model_average(): diff --git a/inverse_covariance/tests/model_average_test.py b/inverse_covariance/tests/model_average_test.py index 7e53615..ad1f8b6 100644 --- a/inverse_covariance/tests/model_average_test.py +++ b/inverse_covariance/tests/model_average_test.py @@ -3,7 +3,7 @@ from sklearn.covariance import GraphLassoCV -from inverse_covariance import QuicGraphLassoCV, QuicGraphLasso, ModelAverage +from inverse_covariance import QuicGraphicalLassoCV, QuicGraphicalLasso, ModelAverage from inverse_covariance.profiling import ClusterGraph @@ -13,7 +13,7 @@ class TestModelAverage(object): [ ( { - "estimator": QuicGraphLasso(), + "estimator": QuicGraphicalLasso(), "n_trials": 10, "normalize": True, "subsample": 0.3, @@ -22,7 +22,7 @@ class TestModelAverage(object): ), ( { - "estimator": QuicGraphLasso(lam=0.5, mode="trace"), + "estimator": QuicGraphicalLasso(lam=0.5, mode="trace"), "n_trials": 10, "normalize": False, "subsample": 0.6, @@ -31,7 +31,7 @@ class TestModelAverage(object): ), ( { - "estimator": QuicGraphLassoCV(cv=(2, 1)), + "estimator": QuicGraphicalLassoCV(cv=(2, 1)), "n_trials": 2, "normalize": True, "subsample": 0.9, @@ -52,7 +52,7 @@ class TestModelAverage(object): ), ( { - "estimator": QuicGraphLasso(), + "estimator": QuicGraphicalLasso(), "n_trials": 10, "normalize": True, "subsample": 0.3, diff --git a/inverse_covariance/tests/quic_graph_lasso_test.py b/inverse_covariance/tests/quic_graph_lasso_test.py index 0a3ff6b..dec6b63 100644 --- a/inverse_covariance/tests/quic_graph_lasso_test.py +++ b/inverse_covariance/tests/quic_graph_lasso_test.py @@ -6,9 +6,9 @@ from sklearn import datasets from inverse_covariance import ( - QuicGraphLasso, - QuicGraphLassoCV, - QuicGraphLassoEBIC, + QuicGraphicalLasso, + QuicGraphicalLassoCV, + QuicGraphicalLassoEBIC, quic, ) @@ -18,7 +18,7 @@ def custom_init(X): return init_cov, np.max(np.abs(np.triu(init_cov))) -class TestQuicGraphLasso(object): +class TestQuicGraphicalLasso(object): @pytest.mark.parametrize( "params_in, expected", [ @@ -85,12 +85,12 @@ class TestQuicGraphLasso(object): ), # NOQA ], ) - def test_integration_quic_graph_lasso(self, params_in, expected): + def test_integration_quic_graphical_lasso(self, params_in, expected): """ Just tests inputs/outputs (not validity of result). """ X = datasets.load_diabetes().data - ic = QuicGraphLasso(**params_in) + ic = QuicGraphicalLasso(**params_in) ic.fit(X) result_vec = [ @@ -159,7 +159,7 @@ def test_integration_quic_graph_lasso(self, params_in, expected): ), # NOQA ], ) - def test_integration_quic_graph_lasso_fun(self, params_in, expected): + def test_integration_quic_graphical_lasso_fun(self, params_in, expected): """ Just tests inputs/outputs (not validity of result). """ @@ -247,12 +247,12 @@ def test_integration_quic_graph_lasso_fun(self, params_in, expected): ), # NOQA ], ) - def test_integration_quic_graph_lasso_cv(self, params_in, expected): + def test_integration_quic_graphical_lasso_cv(self, params_in, expected): """ Just tests inputs/outputs (not validity of result). """ X = datasets.load_diabetes().data - ic = QuicGraphLassoCV(**params_in) + ic = QuicGraphicalLassoCV(**params_in) ic.fit(X) result_vec = [ @@ -282,12 +282,12 @@ def test_integration_quic_graph_lasso_cv(self, params_in, expected): ), # NOQA ], ) - def test_integration_quic_graph_lasso_ebic(self, params_in, expected): + def test_integration_quic_graphical_lasso_ebic(self, params_in, expected): """ Just tests inputs/outputs (not validity of result). """ X = datasets.load_diabetes().data - ic = QuicGraphLassoEBIC(**params_in) + ic = QuicGraphicalLassoEBIC(**params_in) ic.fit(X) result_vec = [np.linalg.norm(ic.covariance_), np.linalg.norm(ic.precision_)] @@ -304,5 +304,5 @@ def test_invalid_method(self): Test behavior of invalid inputs. """ X = datasets.load_diabetes().data - ic = QuicGraphLasso(method="unknownmethod") + ic = QuicGraphicalLasso(method="unknownmethod") assert_raises(NotImplementedError, ic.fit, X)