Skip to content

Commit f33e9ac

Browse files
committed
Merge pull request scikit-learn#3396 from amueller/less_loud_randomized_lasso
ENH add convergence_warning option to lars_path.
2 parents 60bd856 + d146d23 commit f33e9ac

File tree

2 files changed

+15
-8
lines changed

2 files changed

+15
-8
lines changed

examples/linear_model/plot_sparse_recovery.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@
5656
from sklearn.metrics import auc, precision_recall_curve
5757
from sklearn.ensemble import ExtraTreesRegressor
5858
from sklearn.utils.extmath import pinvh
59+
from sklearn.utils import ConvergenceWarning
5960

6061

6162
def mutual_incoherence(X_relevant, X_irelevant):
@@ -137,6 +138,7 @@ def mutual_incoherence(X_relevant, X_irelevant):
137138
# as it is specifically set up to be challenging.
138139
with warnings.catch_warnings():
139140
warnings.simplefilter('ignore', UserWarning)
141+
warnings.simplefilter('ignore', ConvergenceWarning)
140142
lars_cv = LassoLarsCV(cv=6).fit(X, y)
141143

142144
# Run the RandomizedLasso: we use a paths going down to .1*alpha_max

sklearn/linear_model/randomized_l1.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
# License: BSD 3 clause
99
import itertools
1010
from abc import ABCMeta, abstractmethod
11+
import warnings
1112

1213
import numpy as np
1314
from scipy.sparse import issparse
@@ -19,7 +20,7 @@
1920
from ..externals import six
2021
from ..externals.joblib import Memory, Parallel, delayed
2122
from ..utils import (as_float_array, check_random_state, safe_asarray,
22-
check_arrays, safe_mask)
23+
check_arrays, safe_mask, ConvergenceWarning)
2324
from .least_angle import lars_path, LassoLarsIC
2425
from .logistic import LogisticRegression
2526

@@ -158,11 +159,13 @@ def _randomized_lasso(X, y, weights, mask, alpha=1., verbose=False,
158159
alpha = np.atleast_1d(np.asarray(alpha, dtype=np.float))
159160

160161
X = (1 - weights) * X
161-
alphas_, _, coef_ = lars_path(X, y,
162-
Gram=precompute, copy_X=False,
163-
copy_Gram=False, alpha_min=np.min(alpha),
164-
method='lasso', verbose=verbose,
165-
max_iter=max_iter, eps=eps)
162+
with warnings.catch_warnings():
163+
warnings.simplefilter('ignore', ConvergenceWarning)
164+
alphas_, _, coef_ = lars_path(X, y,
165+
Gram=precompute, copy_X=False,
166+
copy_Gram=False, alpha_min=np.min(alpha),
167+
method='lasso', verbose=verbose,
168+
max_iter=max_iter, eps=eps)
166169

167170
if len(alpha) > 1:
168171
if len(alphas_) > 1: # np.min(alpha) < alpha_min
@@ -504,8 +507,10 @@ def _lasso_stability_path(X, y, mask, weights, eps):
504507

505508
alpha_max = np.max(np.abs(np.dot(X.T, y))) / X.shape[0]
506509
alpha_min = eps * alpha_max # set for early stopping in path
507-
alphas, _, coefs = lars_path(X, y, method='lasso', verbose=False,
508-
alpha_min=alpha_min)
510+
with warnings.catch_warnings():
511+
warnings.simplefilter('ignore', ConvergenceWarning)
512+
alphas, _, coefs = lars_path(X, y, method='lasso', verbose=False,
513+
alpha_min=alpha_min)
509514
# Scale alpha by alpha_max
510515
alphas /= alphas[0]
511516
# Sort alphas in assending order

0 commit comments

Comments
 (0)