Skip to content

Commit 77e69ed

Browse files
committed
COSMIT rm deprecated stuff -- lots of it
1 parent 46f4c55 commit 77e69ed

File tree

12 files changed

+32
-174
lines changed

12 files changed

+32
-174
lines changed

doc/whats_new.rst

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,21 @@ Changelog
1616

1717
- Speed up of :func:`metrics.precision_recall_curve` by Conrad Lee.
1818

19+
API changes summary
20+
-------------------
21+
22+
- The module ``sklearn.linear_model.sparse`` is gone. Sparse matrix support
23+
was already integrated into the "regular" linear models.
24+
25+
- ``sklearn.metrics.mean_square_error``, which incorrectly returned the
26+
cumulated error, was removed. Use ``mean_squared_error`` instead.
27+
28+
- Passing ``class_weight`` parameters to ``fit`` methods is no longer
29+
supported. Pass them to estimator constuctors instead.
30+
31+
- GMMs no longer have ``decode`` and ``rvs`` methods. Use the ``score``,
32+
``predict`` or ``sample`` methods instead.
33+
1934

2035
.. _changes_0_12:
2136

sklearn/grid_search.py

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -329,10 +329,10 @@ def __init__(self, estimator, param_grid, loss_func=None, score_func=None,
329329
self.pre_dispatch = pre_dispatch
330330

331331
def _set_methods(self):
332-
if hasattr(self._best_estimator_, 'predict'):
333-
self.predict = self._best_estimator_.predict
334-
if hasattr(self._best_estimator_, 'predict_proba'):
335-
self.predict_proba = self._best_estimator_.predict_proba
332+
if hasattr(self.best_estimator_, 'predict'):
333+
self.predict = self.best_estimator_.predict
334+
if hasattr(self.best_estimator_, 'predict_proba'):
335+
self.predict_proba = self.best_estimator_.predict_proba
336336

337337
def fit(self, X, y=None, **params):
338338
"""Run fit with all sets of parameters
@@ -379,7 +379,7 @@ def _fit(self, X, y):
379379
params = next(iter(grid))
380380
base_clf.set_params(**params)
381381
base_clf.fit(X, y)
382-
self._best_estimator_ = base_clf
382+
self.best_estimator_ = base_clf
383383
self._set_methods()
384384
return self
385385

@@ -434,7 +434,7 @@ def _fit(self, X, y):
434434
# clone first to work around broken estimators
435435
best_estimator = clone(base_clf).set_params(**best_params)
436436
best_estimator.fit(X, y, **self.fit_params)
437-
self._best_estimator_ = best_estimator
437+
self.best_estimator_ = best_estimator
438438
self._set_methods()
439439

440440
# Store the computed scores
@@ -455,14 +455,3 @@ def score(self, X, y=None):
455455
% self.best_estimator_)
456456
y_predicted = self.predict(X)
457457
return self.score_func(y, y_predicted)
458-
459-
# TODO around 0.13: remove this property, make it an attribute
460-
@property
461-
def best_estimator_(self):
462-
if hasattr(self, '_best_estimator_'):
463-
return self._best_estimator_
464-
else:
465-
raise RuntimeError("Grid search has to be run with 'refit=True'"
466-
" to make predictions or obtain an instance of the best "
467-
" estimator. To obtain the best parameter settings, "
468-
" use ``best_params_``.")

sklearn/linear_model/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
from .perceptron import Perceptron
2727
from .randomized_l1 import RandomizedLasso, RandomizedLogisticRegression, \
2828
lasso_stability_path
29-
from . import sparse
3029
from .isotonic_regression_ import IsotonicRegression
3130

3231
__all__ = ['ARDRegression',
@@ -65,5 +64,4 @@
6564
'lasso_stability_path',
6665
'orthogonal_mp',
6766
'orthogonal_mp_gram',
68-
'ridge_regression',
69-
'sparse']
67+
'ridge_regression']

sklearn/linear_model/sparse/__init__.py

Lines changed: 0 additions & 8 deletions
This file was deleted.

sklearn/linear_model/sparse/coordinate_descent.py

Lines changed: 0 additions & 15 deletions
This file was deleted.

sklearn/linear_model/sparse/logistic.py

Lines changed: 0 additions & 8 deletions
This file was deleted.

sklearn/linear_model/sparse/stochastic_gradient.py

Lines changed: 0 additions & 15 deletions
This file was deleted.

sklearn/linear_model/stochastic_gradient.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -450,8 +450,7 @@ def _partial_fit(self, X, y, n_iter, classes=None, sample_weight=None,
450450

451451
return self
452452

453-
def partial_fit(self, X, y, classes=None,
454-
class_weight=None, sample_weight=None):
453+
def partial_fit(self, X, y, classes=None, sample_weight=None):
455454
"""Fit linear model with Stochastic Gradient Descent.
456455
457456
Parameters
@@ -478,12 +477,6 @@ def partial_fit(self, X, y, classes=None,
478477
-------
479478
self : returns an instance of self.
480479
"""
481-
if class_weight is not None:
482-
warnings.warn("Using 'class_weight' as a parameter to the 'fit'"
483-
"method is deprecated and will be removed in 0.13. "
484-
"Set it on initialization instead.",
485-
DeprecationWarning, stacklevel=2)
486-
self.class_weight = class_weight
487480
return self._partial_fit(X, y, n_iter=1, classes=classes,
488481
sample_weight=sample_weight)
489482

sklearn/metrics/__init__.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,12 @@
33
and pairwise metrics and distance computations.
44
"""
55

6-
from .metrics import confusion_matrix, roc_curve, auc, precision_score, \
7-
recall_score, fbeta_score, f1_score, zero_one_score, \
8-
precision_recall_fscore_support, classification_report, \
9-
precision_recall_curve, explained_variance_score, r2_score, \
10-
zero_one, mean_square_error, hinge_loss, matthews_corrcoef, \
11-
mean_squared_error, average_precision_score, auc_score
6+
from .metrics import (confusion_matrix, roc_curve, auc, precision_score,
7+
recall_score, fbeta_score, f1_score, zero_one_score,
8+
precision_recall_fscore_support, classification_report,
9+
precision_recall_curve, explained_variance_score,
10+
r2_score, zero_one, hinge_loss, matthews_corrcoef,
11+
mean_squared_error, average_precision_score, auc_score)
1212

1313
from . import cluster
1414
from .cluster import adjusted_rand_score

sklearn/metrics/metrics.py

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1027,30 +1027,6 @@ def mean_squared_error(y_true, y_pred):
10271027
return np.mean((y_pred - y_true) ** 2)
10281028

10291029

1030-
@deprecated("""Incorrectly returns the cumulated error: use mean_squared_error
1031-
instead; to be removed in v0.13""")
1032-
def mean_square_error(y_true, y_pred):
1033-
"""Cumulated square error regression loss
1034-
1035-
Positive floating point value: the best value is 0.0.
1036-
1037-
return the mean square error
1038-
1039-
Parameters
1040-
----------
1041-
y_true : array-like
1042-
1043-
y_pred : array-like
1044-
1045-
Returns
1046-
-------
1047-
loss : float
1048-
1049-
"""
1050-
y_true, y_pred = check_arrays(y_true, y_pred)
1051-
return np.linalg.norm(y_pred - y_true) ** 2
1052-
1053-
10541030
def hinge_loss(y_true, pred_decision, pos_label=1, neg_label=-1):
10551031
"""
10561032
Cumulated hinge loss (non-regularized).

0 commit comments

Comments
 (0)