@@ -976,10 +976,26 @@ def test_non_uniform_weights_toy_edge_case_reg():
976976 y = [0 , 0 , 1 , 0 ]
977977 # ignore the first 2 training samples by setting their weight to 0
978978 sample_weight = [0 , 0 , 1 , 1 ]
979- for loss in ('ls ' , 'huber ' , 'lad' , 'quantile' ):
980- gb = GradientBoostingRegressor (n_estimators = 5 )
979+ for loss in ('huber ' , 'ls ' , 'lad' , 'quantile' ):
980+ gb = GradientBoostingRegressor (learning_rate = 1.0 , n_estimators = 2 , loss = loss )
981981 gb .fit (X , y , sample_weight = sample_weight )
982- assert_true (gb .predict ([[1 , 0 ]])[0 ] > 0.5 )
982+ assert_greater (gb .predict ([[1 , 0 ]])[0 ], 0.5 )
983+
984+
985+ def test_non_uniform_weights_toy_min_weight_leaf ():
986+ """Regression test for https://github.com/scikit-learn/scikit-learn/issues/4447 """
987+ X = [[1 , 0 ],
988+ [1 , 0 ],
989+ [1 , 0 ],
990+ [0 , 1 ],
991+ ]
992+ y = [0 , 0 , 1 , 0 ]
993+ # ignore the first 2 training samples by setting their weight to 0
994+ sample_weight = [0 , 0 , 1 , 1 ]
995+ gb = GradientBoostingRegressor (n_estimators = 5 , min_weight_fraction_leaf = 0.1 )
996+ gb .fit (X , y , sample_weight = sample_weight )
997+ assert_true (gb .predict ([[1 , 0 ]])[0 ] > 0.5 )
998+ assert_almost_equal (gb .estimators_ [0 ,0 ].splitter .min_weight_leaf , 0.2 )
983999
9841000
9851001def test_non_uniform_weights_toy_edge_case_clf ():
0 commit comments