Skip to content

Commit e18a32d

Browse files
committed
1 parent ae377f6 commit e18a32d

File tree

2 files changed

+17
-8
lines changed

2 files changed

+17
-8
lines changed

sklearn/linear_model/ridge.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -582,8 +582,11 @@ def fit(self, X, y, sample_weight=1.0):
582582
self : Returns self.
583583
"""
584584
if self.cv is None:
585-
estimator = _RidgeGCV(self.alphas, self.fit_intercept,
586-
self.score_func, self.loss_func,
585+
estimator = _RidgeGCV(self.alphas,
586+
fit_intercept=self.fit_intercept,
587+
normalize=self.normalize,
588+
score_func=self.score_func,
589+
loss_func=self.loss_func,
587590
gcv_mode=self.gcv_mode,
588591
store_cv_values=self.store_cv_values)
589592
estimator.fit(X, y, sample_weight=sample_weight)

sklearn/linear_model/tests/test_ridge.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def _test_ridge_loo(filter_):
160160
ret = []
161161

162162
ridge_gcv = _RidgeGCV(fit_intercept=False)
163-
ridge = Ridge(fit_intercept=False)
163+
ridge = Ridge(alpha=1.0, fit_intercept=False)
164164

165165
# generalized cross-validation (efficient leave-one-out)
166166
decomp = ridge_gcv._pre_compute(X_diabetes, y_diabetes)
@@ -187,8 +187,8 @@ def _test_ridge_loo(filter_):
187187
# generalized cross-validation (efficient leave-one-out,
188188
# SVD variation)
189189
decomp = ridge_gcv._pre_compute_svd(X_diabetes, y_diabetes)
190-
errors3, c = ridge_gcv._errors_svd(1.0, y_diabetes, *decomp)
191-
values3, c = ridge_gcv._values_svd(1.0, y_diabetes, *decomp)
190+
errors3, c = ridge_gcv._errors_svd(ridge.alpha, y_diabetes, *decomp)
191+
values3, c = ridge_gcv._values_svd(ridge.alpha, y_diabetes, *decomp)
192192

193193
# check that efficient and SVD efficient LOO give same results
194194
assert_almost_equal(errors, errors3)
@@ -200,10 +200,16 @@ def _test_ridge_loo(filter_):
200200
ret.append(best_alpha)
201201

202202
# check that we get same best alpha with custom loss_func
203-
ridge_gcv2 = _RidgeGCV(fit_intercept=False, loss_func=mean_squared_error)
203+
ridge_gcv2 = RidgeCV(fit_intercept=False, loss_func=mean_squared_error)
204204
ridge_gcv2.fit(filter_(X_diabetes), y_diabetes)
205205
assert_equal(ridge_gcv2.best_alpha, best_alpha)
206206

207+
# check that we get same best alpha with custom score_func
208+
func = lambda x, y: -mean_squared_error(x, y)
209+
ridge_gcv3 = RidgeCV(fit_intercept=False, score_func=func)
210+
ridge_gcv3.fit(filter_(X_diabetes), y_diabetes)
211+
assert_equal(ridge_gcv3.best_alpha, best_alpha)
212+
207213
# check that we get same best alpha with sample weights
208214
ridge_gcv.fit(filter_(X_diabetes), y_diabetes,
209215
sample_weight=np.ones(n_samples))
@@ -347,9 +353,9 @@ def test_class_weights_cv():
347353
assert_array_equal(clf.predict([[-.2, 2]]), np.array([-1]))
348354

349355

350-
def test_ridgegcv_store_cv_values():
356+
def test_ridgecv_store_cv_values():
351357
"""
352-
Test _RidgeGCV's store_cv_values attribute.
358+
Test _RidgeCV's store_cv_values attribute.
353359
"""
354360
rng = rng = np.random.RandomState(42)
355361

0 commit comments

Comments
 (0)