@@ -41,21 +41,19 @@ class _BaseVoting(TransformerMixin, _BaseHeterogeneousEnsemble):
4141
4242 @property
4343 def _weights_not_none (self ):
44- """Get the weights of not `None` estimators"""
44+ """Get the weights of not `None` estimators. """
4545 if self .weights is None :
4646 return None
4747 return [w for est , w in zip (self .estimators , self .weights )
4848 if est [1 ] not in (None , 'drop' )]
4949
5050 def _predict (self , X ):
51- """Collect results from clf.predict calls. """
51+ """Collect results from clf.predict calls."""
5252 return np .asarray ([est .predict (X ) for est in self .estimators_ ]).T
5353
5454 @abstractmethod
5555 def fit (self , X , y , sample_weight = None ):
56- """
57- common fit operations.
58- """
56+ """Get common fit operations."""
5957 names , clfs = self ._validate_estimators ()
6058
6159 if (self .weights is not None and
@@ -90,7 +88,7 @@ class VotingClassifier(ClassifierMixin, _BaseVoting):
9088
9189 Parameters
9290 ----------
93- estimators : list of (string , estimator) tuples
91+ estimators : list of (str , estimator) tuples
9492 Invoking the ``fit`` method on the ``VotingClassifier`` will fit clones
9593 of those original estimators that will be stored in the class attribute
9694 ``self.estimators_``. An estimator can be set to ``'drop'``
@@ -138,6 +136,10 @@ class VotingClassifier(ClassifierMixin, _BaseVoting):
138136 classes_ : array-like, shape (n_predictions,)
139137 The classes labels.
140138
139+ See Also
140+ --------
141+ VotingRegressor: Prediction voting regressor.
142+
141143 Examples
142144 --------
143145 >>> import numpy as np
@@ -172,10 +174,6 @@ class VotingClassifier(ClassifierMixin, _BaseVoting):
172174 [1 1 1 2 2 2]
173175 >>> print(eclf3.transform(X).shape)
174176 (6, 6)
175-
176- See also
177- --------
178- VotingRegressor: Prediction voting regressor.
179177 """
180178
181179 def __init__ (self , estimators , voting = 'hard' , weights = None , n_jobs = None ,
@@ -187,7 +185,7 @@ def __init__(self, estimators, voting='hard', weights=None, n_jobs=None,
187185 self .flatten_transform = flatten_transform
188186
189187 def fit (self , X , y , sample_weight = None ):
190- """ Fit the estimators.
188+ """Fit the estimators.
191189
192190 Parameters
193191 ----------
@@ -206,6 +204,7 @@ def fit(self, X, y, sample_weight=None):
206204 Returns
207205 -------
208206 self : object
207+
209208 """
210209 check_classification_targets (y )
211210 if isinstance (y , np .ndarray ) and len (y .shape ) > 1 and y .shape [1 ] > 1 :
@@ -223,7 +222,7 @@ def fit(self, X, y, sample_weight=None):
223222 return super ().fit (X , transformed_y , sample_weight )
224223
225224 def predict (self , X ):
226- """ Predict class labels for X.
225+ """Predict class labels for X.
227226
228227 Parameters
229228 ----------
@@ -235,7 +234,6 @@ def predict(self, X):
235234 maj : array-like, shape (n_samples,)
236235 Predicted class labels.
237236 """
238-
239237 check_is_fitted (self )
240238 if self .voting == 'soft' :
241239 maj = np .argmax (self .predict_proba (X ), axis = 1 )
@@ -252,11 +250,11 @@ def predict(self, X):
252250 return maj
253251
254252 def _collect_probas (self , X ):
255- """Collect results from clf.predict calls. """
253+ """Collect results from clf.predict calls."""
256254 return np .asarray ([clf .predict_proba (X ) for clf in self .estimators_ ])
257255
258256 def _predict_proba (self , X ):
259- """Predict class probabilities for X in 'soft' voting """
257+ """Predict class probabilities for X in 'soft' voting. """
260258 check_is_fitted (self )
261259 avg = np .average (self ._collect_probas (X ), axis = 0 ,
262260 weights = self ._weights_not_none )
0 commit comments