Skip to content

Commit 43d1aa4

Browse files
committed
COSMIT pep8
1 parent 55959ed commit 43d1aa4

File tree

6 files changed

+30
-26
lines changed

6 files changed

+30
-26
lines changed

sklearn/hmm.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -952,12 +952,13 @@ class GMMHMM(_BaseHMM):
952952
>>> GMMHMM(n_components=2, n_mix=10, covariance_type='diag')
953953
... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
954954
GMMHMM(algorithm='viterbi', covariance_type='diag', covars_prior=0.01,
955-
gmms=[GMM(covariance_type=None, init_params='wmc', min_covar=0.001,
956-
n_components=10, n_init=1, n_iter=100, params='wmc', random_state=None,
957-
thresh=0.01), GMM(covariance_type=None, init_params='wmc', min_covar=0.001,
958-
n_components=10, n_init=1, n_iter=100, params='wmc', random_state=None,
959-
thresh=0.01)], n_components=2, n_mix=10, random_state=None, startprob=None,
960-
startprob_prior=1.0, transmat=None, transmat_prior=1.0)
955+
gmms=[GMM(covariance_type=None, init_params='wmc', min_covar=0.001,
956+
n_components=10, n_init=1, n_iter=100, params='wmc', random_state=None,
957+
thresh=0.01), GMM(covariance_type=None, init_params='wmc',
958+
min_covar=0.001, n_components=10, n_init=1, n_iter=100, params='wmc',
959+
random_state=None, thresh=0.01)], n_components=2, n_mix=10,
960+
random_state=None, startprob=None, startprob_prior=1.0, transmat=None,
961+
transmat_prior=1.0)
961962
962963
See Also
963964
--------

sklearn/mixture/dpgmm.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
from ..utils import check_random_state
1919
from ..utils.extmath import norm
20-
from ..utils import deprecated
2120
from .. import cluster
2221
from .gmm import GMM
2322

@@ -203,7 +202,7 @@ def __init__(self, n_components=1, covariance_type='diag', alpha=1.0,
203202
super(DPGMM, self).__init__(n_components, covariance_type,
204203
random_state=random_state,
205204
thresh=thresh, min_covar=min_covar,
206-
n_iter=n_iter, params=params,
205+
n_iter=n_iter, params=params,
207206
init_params=init_params)
208207

209208
def _get_precisions(self):
@@ -480,7 +479,7 @@ def fit(self, X, **kwargs):
480479
A initialization step is performed before entering the em
481480
algorithm. If you want to avoid this step, set the keyword
482481
argument init_params to the empty string '' when when creating
483-
the object. Likewise, if you would like just to do an
482+
the object. Likewise, if you would like just to do an
484483
initialization, set n_iter=0.
485484
486485
Parameters
@@ -491,15 +490,15 @@ def fit(self, X, **kwargs):
491490
"""
492491
self.random_state = check_random_state(self.random_state)
493492
if kwargs:
494-
warnings.warn("Setting parameters in the 'fit' method is deprecated"
495-
"Set it on initialization instead.",
493+
warnings.warn("Setting parameters in the 'fit' method is"
494+
"deprecated. Set it on initialization instead.",
496495
DeprecationWarning)
497496
# initialisations for in case the user still adds parameters to fit
498497
# so things don't break
499498
if 'n_iter' in kwargs:
500-
self.n_iter = kwargs['n_iter']
499+
self.n_iter = kwargs['n_iter']
501500
if 'params' in kwargs:
502-
self.params = kwargs['params']
501+
self.params = kwargs['params']
503502
if 'init_params' in kwargs:
504503
self.init_params = kwargs['init_params']
505504

sklearn/mixture/gmm.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -196,8 +196,9 @@ class GMM(BaseEstimator):
196196
>>> obs = np.concatenate((np.random.randn(100, 1),
197197
... 10 + np.random.randn(300, 1)))
198198
>>> g.fit(obs) # doctest: +NORMALIZE_WHITESPACE
199-
GMM(covariance_type=None, init_params='wmc', min_covar=0.001, n_components=2,
200-
n_init=1, n_iter=100, params='wmc', random_state=None, thresh=0.01)
199+
GMM(covariance_type=None, init_params='wmc', min_covar=0.001,
200+
n_components=2, n_init=1, n_iter=100, params='wmc',
201+
random_state=None, thresh=0.01)
201202
>>> np.round(g.weights_, 2)
202203
array([ 0.75, 0.25])
203204
>>> np.round(g.means_, 2)
@@ -213,8 +214,9 @@ class GMM(BaseEstimator):
213214
>>> # Refit the model on new data (initial parameters remain the
214215
>>> # same), this time with an even split between the two modes.
215216
>>> g.fit(20 * [[0]] + 20 * [[10]]) # doctest: +NORMALIZE_WHITESPACE
216-
GMM(covariance_type=None, init_params='wmc', min_covar=0.001, n_components=2,
217-
n_init=1, n_iter=100, params='wmc', random_state=None, thresh=0.01)
217+
GMM(covariance_type=None, init_params='wmc', min_covar=0.001,
218+
n_components=2, n_init=1, n_iter=100, params='wmc',
219+
random_state=None, thresh=0.01)
218220
>>> np.round(g.weights_, 2)
219221
array([ 0.5, 0.5])
220222
@@ -435,15 +437,15 @@ def fit(self, X, **kwargs):
435437
436438
A initialization step is performed before entering the em
437439
algorithm. If you want to avoid this step, set the keyword
438-
argument init_params to the empty string '' when creating the
439-
GMM object. Likewise, if you would like just to do an
440+
argument init_params to the empty string '' when creating the
441+
GMM object. Likewise, if you would like just to do an
440442
initialization, set n_iter=0.
441443
442444
Parameters
443445
----------
444446
X : array_like, shape (n, n_features)
445447
List of n_features-dimensional data points. Each row
446-
corresponds to a single data point.
448+
corresponds to a single data point.
447449
"""
448450
## initialization step
449451
X = np.asarray(X)
@@ -454,13 +456,13 @@ def fit(self, X, **kwargs):
454456
'GMM estimation with %s components, but got only %s samples' %
455457
(self.n_components, X.shape[0]))
456458
if kwargs:
457-
warnings.warn("Setting parameters in the 'fit' method is deprecated"
458-
"Set it on initialization instead.",
459+
warnings.warn("Setting parameters in the 'fit' method is"
460+
"deprecated. Set it on initialization instead.",
459461
DeprecationWarning)
460462
# initialisations for in case the user still adds parameters to fit
461463
# so things don't break
462464
if 'n_iter' in kwargs:
463-
self.n_iter = kwargs['n_iter']
465+
self.n_iter = kwargs['n_iter']
464466
if 'n_init' in kwargs:
465467
if kwargs['n_init'] < 1:
466468
raise ValueError('GMM estimation requires at least one run')
@@ -470,7 +472,6 @@ def fit(self, X, **kwargs):
470472
self.params = kwargs['params']
471473
if 'init_params' in kwargs:
472474
self.init_params = kwargs['init_params']
473-
474475

475476
max_log_prob = - np.infty
476477

sklearn/mixture/tests/test_gmm.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ def test_train_degenerate(self, params='wmc'):
228228
X = rng.randn(100, self.n_features)
229229
X.T[1:] = 0
230230
g = self.model(n_components=2, covariance_type=self.covariance_type,
231-
random_state=rng, min_covar=1e-3, n_iter=5,
231+
random_state=rng, min_covar=1e-3, n_iter=5,
232232
init_params=params)
233233
g.fit(X)
234234
trainll = g.score(X)
@@ -241,7 +241,7 @@ def test_train_1d(self, params='wmc'):
241241
X = rng.randn(100, 1)
242242
#X.T[1:] = 0
243243
g = self.model(n_components=2, covariance_type=self.covariance_type,
244-
random_state=rng, min_covar=1e-7, n_iter=5,
244+
random_state=rng, min_covar=1e-7, n_iter=5,
245245
init_params=params)
246246
g.fit(X)
247247
trainll = g.score(X)

sklearn/neighbors/base.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,14 @@
1616
from ..metrics import pairwise_distances
1717
from ..utils import safe_asarray, atleast2d_or_csr
1818

19+
1920
class NeighborsWarning(UserWarning):
2021
pass
2122

2223
# Make sure that NeighborsWarning are displayed more than once
2324
warnings.simplefilter("always", NeighborsWarning)
2425

26+
2527
def warn_equidistant():
2628
msg = ("kneighbors: neighbor k+1 and neighbor k have the same "
2729
"distance: results will be dependent on data order.")

sklearn/utils/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
# Make sure that DeprecationWarning get printed
1212
warnings.simplefilter("always", DeprecationWarning)
1313

14+
1415
class deprecated(object):
1516
"""Decorator to mark a function or class as deprecated.
1617

0 commit comments

Comments
 (0)