@@ -95,7 +95,7 @@ Scikit-learn documentation for more information about this type of classifier.)
9595 >>> knn = KNeighborsClassifier()
9696 >>> knn.fit(iris_X_train, iris_y_train) # doctest: +NORMALIZE_WHITESPACE
9797 KNeighborsClassifier(algorithm='auto', leaf_size=30, metric='minkowski',
98- metric_params=None, n_jobs=1 , n_neighbors=5, p=2,
98+ metric_params=None, n_jobs=None , n_neighbors=5, p=2,
9999 weights='uniform')
100100 >>> knn.predict(iris_X_test)
101101 array([1, 2, 1, 0, 0, 0, 2, 1, 2, 0])
@@ -176,13 +176,16 @@ Linear models: :math:`y = X\beta + \epsilon`
176176 >>> from sklearn import linear_model
177177 >>> regr = linear_model.LinearRegression()
178178 >>> regr.fit(diabetes_X_train, diabetes_y_train)
179- LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=False)
179+ ... # doctest: +NORMALIZE_WHITESPACE
180+ LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None,
181+ normalize=False)
180182 >>> print(regr.coef_)
181183 [ 0.30349955 -237.63931533 510.53060544 327.73698041 -814.13170937
182184 492.81458798 102.84845219 184.60648906 743.51961675 76.09517222]
183185
184186 >>> # The mean square error
185- >>> np.mean((regr.predict(diabetes_X_test)-diabetes_y_test)**2)# doctest: +ELLIPSIS
187+ >>> np.mean((regr.predict(diabetes_X_test)-diabetes_y_test)**2)
188+ ... # doctest: +ELLIPSIS
186189 2004.56760268...
187190
188191 >>> # Explained variance score: 1 is perfect prediction
@@ -257,8 +260,11 @@ diabetes dataset rather than our synthetic data::
257260 >>> from __future__ import print_function
258261 >>> print([regr.set_params(alpha=alpha
259262 ... ).fit(diabetes_X_train, diabetes_y_train,
260- ... ).score(diabetes_X_test, diabetes_y_test) for alpha in alphas]) # doctest: +ELLIPSIS
261- [0.5851110683883..., 0.5852073015444..., 0.5854677540698..., 0.5855512036503..., 0.5830717085554..., 0.57058999437...]
263+ ... ).score(diabetes_X_test, diabetes_y_test)
264+ ... for alpha in alphas])
265+ ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
266+ [0.5851110683883..., 0.5852073015444..., 0.5854677540698...,
267+ 0.5855512036503..., 0.5830717085554..., 0.57058999437...]
262268
263269
264270.. note ::
@@ -372,7 +378,7 @@ function or **logistic** function:
372378 >>> logistic.fit(iris_X_train, iris_y_train)
373379 LogisticRegression(C=100000.0, class_weight=None, dual=False,
374380 fit_intercept=True, intercept_scaling=1, max_iter=100,
375- multi_class='ovr', n_jobs=1 , penalty='l2', random_state=None,
381+ multi_class='ovr', n_jobs=None , penalty='l2', random_state=None,
376382 solver='liblinear', tol=0.0001, verbose=0, warm_start=False)
377383
378384This is known as :class: `LogisticRegression `.
0 commit comments