2424iris .data = iris .data [perm ]
2525iris .target = iris .target [perm ]
2626
27+
2728def test_libsvm_parameters ():
2829 """
2930 Test parameters on classes that make use of libsvm.
3031 """
3132
3233 clf = svm .SVC (kernel = 'linear' ).fit (X , Y )
33- assert_array_equal (clf .dual_coef_ , [[ 0.25 , - .25 ]])
34+ assert_array_equal (clf .dual_coef_ , [[0.25 , - .25 ]])
3435 assert_array_equal (clf .support_ , [1 , 3 ])
3536 assert_array_equal (clf .support_vectors_ , (X [1 ], X [3 ]))
3637 assert_array_equal (clf .intercept_ , [0. ])
@@ -77,7 +78,7 @@ def test_precomputed():
7778 KT = np .zeros_like (KT )
7879 for i in range (len (T )):
7980 for j in clf .support_ :
80- KT [i ,j ] = np .dot (T [i ], X [j ])
81+ KT [i , j ] = np .dot (T [i ], X [j ])
8182
8283 pred = clf .predict (KT )
8384 assert_array_equal (pred , true_result )
@@ -113,7 +114,7 @@ def test_precomputed():
113114 K = np .zeros_like (K )
114115 for i in range (len (iris .data )):
115116 for j in clf .support_ :
116- K [i ,j ] = np .dot (iris .data [i ], iris .data [j ])
117+ K [i , j ] = np .dot (iris .data [i ], iris .data [j ])
117118
118119 pred = clf .predict (K )
119120 assert_almost_equal (np .mean (pred == iris .target ), .99 , decimal = 2 )
@@ -149,8 +150,8 @@ def test_SVR():
149150 decimal = 3 )
150151 assert_raises (NotImplementedError , lambda : clf .coef_ )
151152 assert_array_almost_equal (clf .support_vectors_ , X )
152- assert_array_almost_equal (clf .intercept_ , [ 1.49997261 ])
153- assert_array_almost_equal (pred , [ 1.10001274 , 1.86682485 , 1.73300377 ])
153+ assert_array_almost_equal (clf .intercept_ , [1.49997261 ])
154+ assert_array_almost_equal (pred , [1.10001274 , 1.86682485 , 1.73300377 ])
154155
155156
156157def test_oneclass ():
@@ -163,7 +164,7 @@ def test_oneclass():
163164
164165 assert_array_almost_equal (pred , [1 , - 1 , - 1 ])
165166 assert_array_almost_equal (clf .intercept_ , [- 1.351 ], decimal = 3 )
166- assert_array_almost_equal (clf .dual_coef_ , [[ 0.750 , 0.749 , 0.749 , 0.750 ]],
167+ assert_array_almost_equal (clf .dual_coef_ , [[0.750 , 0.749 , 0.749 , 0.750 ]],
167168 decimal = 3 )
168169 assert_raises (NotImplementedError , lambda : clf .coef_ )
169170
@@ -201,8 +202,8 @@ def test_probability():
201202 T = [[0 , 0 , 0 , 0 ],
202203 [2 , 2 , 2 , 2 ]]
203204 assert_array_almost_equal (clf .predict_proba (T ),
204- [[ 0.993 , 0.003 , 0.002 ],
205- [ 0.740 , 0.223 , 0.035 ]],
205+ [[0.993 , 0.003 , 0.002 ],
206+ [0.740 , 0.223 , 0.035 ]],
206207 decimal = 2 )
207208
208209 assert_almost_equal (clf .predict_proba (T ),
@@ -236,11 +237,11 @@ def test_decision_function():
236237 dec = np .empty (n_class * (n_class - 1 ) / 2 )
237238 p = 0
238239 for i in range (n_class ):
239- for j in range (i + 1 , n_class ):
240- coef1 = clf .dual_coef_ [j - 1 ]
240+ for j in range (i + 1 , n_class ):
241+ coef1 = clf .dual_coef_ [j - 1 ]
241242 coef2 = clf .dual_coef_ [i ]
242- idx1 = slice (sv_start [i ], sv_start [i + 1 ])
243- idx2 = slice (sv_start [j ], sv_start [j + 1 ])
243+ idx1 = slice (sv_start [i ], sv_start [i + 1 ])
244+ idx2 = slice (sv_start [j ], sv_start [j + 1 ])
244245 s = np .dot (coef1 [idx1 ], kvalue [idx1 ]) + \
245246 np .dot (coef2 [idx2 ], kvalue [idx2 ]) + \
246247 clf .intercept_ [p ]
@@ -260,10 +261,10 @@ def test_weight():
260261 # so all predicted values belong to class 2
261262 assert_array_almost_equal (clf .predict (X ), [2 ] * 6 )
262263
263- X_ , y_ = test_dataset_classif (n_samples = 200 , n_features = 100 , param = [5 ,1 ],
264+ X_ , y_ = test_dataset_classif (n_samples = 200 , n_features = 100 , param = [5 , 1 ],
264265 seed = 0 )
265266 for clf in (linear_model .LogisticRegression (), svm .LinearSVC (), svm .SVC ()):
266- clf .fit (X_ [:180 ], y_ [:180 ], class_weight = {0 :5 })
267+ clf .fit (X_ [: 180 ], y_ [: 180 ], class_weight = {0 : 5 })
267268 y_pred = clf .predict (X_ [180 :])
268269 assert np .sum (y_pred == y_ [180 :]) >= 11
269270
@@ -276,7 +277,7 @@ def test_sample_weights():
276277 clf .fit (X , Y )
277278 assert_array_equal (clf .predict (X [2 ]), [1. ])
278279
279- sample_weight = [.1 ]* 3 + [10 ]* 3
280+ sample_weight = [.1 ] * 3 + [10 ] * 3
280281 clf .fit (X , Y , sample_weight = sample_weight )
281282 assert_array_equal (clf .predict (X [2 ]), [2. ])
282283
@@ -298,7 +299,7 @@ def test_auto_weight():
298299 assert_array_almost_equal (clf .coef_ , clf_auto .coef_ , 6 )
299300
300301 # build an very very imbalanced dataset out of iris data
301- X_0 = X [y == 0 ,:]
302+ X_0 = X [y == 0 , :]
302303 y_0 = y [y == 0 ]
303304
304305 X_imbalanced = np .vstack ([X ] + [X_0 ] * 10 )
@@ -319,13 +320,13 @@ def test_error():
319320 Test that it gives proper exception on deficient input
320321 """
321322 # impossible value of C
322- assert_raises (ValueError , svm .SVC (C = - 1 ).fit , X , Y )
323+ assert_raises (ValueError , svm .SVC (C = - 1 ).fit , X , Y )
323324
324325 # impossible value of nu
325326 clf = svm .NuSVC (nu = 0.0 )
326327 assert_raises (ValueError , clf .fit , X , Y )
327328
328- Y2 = Y [:- 1 ] # wrong dimensions for labels
329+ Y2 = Y [:- 1 ] # wrong dimensions for labels
329330 assert_raises (ValueError , clf .fit , X , Y2 )
330331 assert_raises (AssertionError , svm .SVC , X , Y2 )
331332
@@ -368,6 +369,7 @@ def test_LinearSVC_iris():
368369 clf = svm .LinearSVC ().fit (iris .data , iris .target )
369370 assert np .mean (clf .predict (iris .data ) == iris .target ) > 0.95
370371
372+
371373def test_dense_liblinear_intercept_handling (classifier = svm .LinearSVC ):
372374 """
373375 Test that dense liblinear honours intercept_scaling param
@@ -430,9 +432,9 @@ def liblinear_prediction_function(farray, clf, labels):
430432 a , b = weights .shape
431433 weights = weights .reshape ((b , a ))
432434
433- D = np .column_stack ([farray ,np .ones (farray .shape [0 ])])
435+ D = np .column_stack ([farray , np .ones (farray .shape [0 ])])
434436
435- H = np .dot (D ,weights )
437+ H = np .dot (D , weights )
436438
437439 return H .argmax (1 )
438440
0 commit comments