Skip to content

Commit b499743

Browse files
committed
DOC: update examples, use matplotlib.pyplot instead of pylab
1 parent 8cdd2be commit b499743

14 files changed

+139
-137
lines changed

examples/ensemble/plot_adaboost_hastie_10_2.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,12 @@
2626
# License: BSD 3 clause
2727

2828
import numpy as np
29+
import matplotlib.pyplot as plt
30+
2931
from sklearn import datasets
3032
from sklearn.tree import DecisionTreeClassifier
3133
from sklearn.metrics import zero_one_loss
3234
from sklearn.ensemble import AdaBoostClassifier
33-
import pylab as pl
3435

3536

3637
n_estimators = 400
@@ -64,7 +65,7 @@
6465
algorithm="SAMME.R")
6566
ada_real.fit(X_train, y_train)
6667

67-
fig = pl.figure()
68+
fig = plt.figure()
6869
ax = fig.add_subplot(111)
6970

7071
ax.plot([1, n_estimators], [dt_stump_err] * 2, 'k-',
@@ -108,4 +109,4 @@
108109
leg = ax.legend(loc='upper right', fancybox=True)
109110
leg.get_frame().set_alpha(0.7)
110111

111-
pl.show()
112+
plt.show()

examples/ensemble/plot_adaboost_multiclass.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131

3232
from sklearn.externals.six.moves import zip
3333

34-
import pylab as pl
34+
import matplotlib.pyplot as plt
3535

3636
from sklearn.datasets import make_gaussian_quantiles
3737
from sklearn.ensemble import AdaBoostClassifier
@@ -74,36 +74,36 @@
7474

7575
n_trees = xrange(1, len(bdt_discrete) + 1)
7676

77-
pl.figure(figsize=(15, 5))
77+
plt.figure(figsize=(15, 5))
7878

79-
pl.subplot(131)
80-
pl.plot(n_trees, discrete_test_errors, c='black', label='SAMME')
81-
pl.plot(n_trees, real_test_errors, c='black',
79+
plt.subplot(131)
80+
plt.plot(n_trees, discrete_test_errors, c='black', label='SAMME')
81+
plt.plot(n_trees, real_test_errors, c='black',
8282
linestyle='dashed', label='SAMME.R')
83-
pl.legend()
84-
pl.ylim(0.18, 0.62)
85-
pl.ylabel('Test Error')
86-
pl.xlabel('Number of Trees')
87-
88-
pl.subplot(132)
89-
pl.plot(n_trees, bdt_discrete.estimator_errors_, "b", label='SAMME', alpha=.5)
90-
pl.plot(n_trees, bdt_real.estimator_errors_, "r", label='SAMME.R', alpha=.5)
91-
pl.legend()
92-
pl.ylabel('Error')
93-
pl.xlabel('Number of Trees')
94-
pl.ylim((.2,
83+
plt.legend()
84+
plt.ylim(0.18, 0.62)
85+
plt.ylabel('Test Error')
86+
plt.xlabel('Number of Trees')
87+
88+
plt.subplot(132)
89+
plt.plot(n_trees, bdt_discrete.estimator_errors_, "b", label='SAMME', alpha=.5)
90+
plt.plot(n_trees, bdt_real.estimator_errors_, "r", label='SAMME.R', alpha=.5)
91+
plt.legend()
92+
plt.ylabel('Error')
93+
plt.xlabel('Number of Trees')
94+
plt.ylim((.2,
9595
max(bdt_real.estimator_errors_.max(),
9696
bdt_discrete.estimator_errors_.max()) * 1.2))
97-
pl.xlim((-20, len(bdt_discrete) + 20))
97+
plt.xlim((-20, len(bdt_discrete) + 20))
9898

99-
pl.subplot(133)
100-
pl.plot(n_trees, bdt_discrete.estimator_weights_, "b", label='SAMME')
101-
pl.legend()
102-
pl.ylabel('Weight')
103-
pl.xlabel('Number of Trees')
104-
pl.ylim((0, bdt_discrete.estimator_weights_.max() * 1.2))
105-
pl.xlim((-20, len(bdt_discrete) + 20))
99+
plt.subplot(133)
100+
plt.plot(n_trees, bdt_discrete.estimator_weights_, "b", label='SAMME')
101+
plt.legend()
102+
plt.ylabel('Weight')
103+
plt.xlabel('Number of Trees')
104+
plt.ylim((0, bdt_discrete.estimator_weights_.max() * 1.2))
105+
plt.xlim((-20, len(bdt_discrete) + 20))
106106

107107
# prevent overlapping y-axis labels
108-
pl.subplots_adjust(wspace=0.25)
109-
pl.show()
108+
plt.subplots_adjust(wspace=0.25)
109+
plt.show()

examples/ensemble/plot_adaboost_regression.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
print(__doc__)
1616

1717
import numpy as np
18+
import matplotlib.pyplot as plt
1819

1920
# Create a the dataset
2021
rng = np.random.RandomState(1)
@@ -38,14 +39,12 @@
3839
y_2 = clf_2.predict(X)
3940

4041
# Plot the results
41-
import pylab as pl
42-
43-
pl.figure()
44-
pl.scatter(X, y, c="k", label="training samples")
45-
pl.plot(X, y_1, c="g", label="n_estimators=1", linewidth=2)
46-
pl.plot(X, y_2, c="r", label="n_estimators=300", linewidth=2)
47-
pl.xlabel("data")
48-
pl.ylabel("target")
49-
pl.title("Boosted Decision Tree Regression")
50-
pl.legend()
51-
pl.show()
42+
plt.figure()
43+
plt.scatter(X, y, c="k", label="training samples")
44+
plt.plot(X, y_1, c="g", label="n_estimators=1", linewidth=2)
45+
plt.plot(X, y_2, c="r", label="n_estimators=300", linewidth=2)
46+
plt.xlabel("data")
47+
plt.ylabel("target")
48+
plt.title("Boosted Decision Tree Regression")
49+
plt.legend()
50+
plt.show()

examples/ensemble/plot_adaboost_twoclass.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
"""
1919
print(__doc__)
2020

21-
import pylab as pl
2221
import numpy as np
22+
import matplotlib.pyplot as plt
2323

2424
from sklearn.ensemble import AdaBoostClassifier
2525
from sklearn.tree import DecisionTreeClassifier
@@ -47,47 +47,47 @@
4747
plot_step = 0.02
4848
class_names = "AB"
4949

50-
pl.figure(figsize=(10, 5))
50+
plt.figure(figsize=(10, 5))
5151

5252
# Plot the decision boundaries
53-
pl.subplot(121)
53+
plt.subplot(121)
5454
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
5555
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
5656
xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step),
5757
np.arange(y_min, y_max, plot_step))
5858

5959
Z = bdt.predict(np.c_[xx.ravel(), yy.ravel()])
6060
Z = Z.reshape(xx.shape)
61-
cs = pl.contourf(xx, yy, Z, cmap=pl.cm.Paired)
62-
pl.axis("tight")
61+
cs = plt.contourf(xx, yy, Z, cmap=plt.cm.Paired)
62+
plt.axis("tight")
6363

6464
# Plot the training points
6565
for i, n, c in zip(range(2), class_names, plot_colors):
6666
idx = np.where(y == i)
67-
pl.scatter(X[idx, 0], X[idx, 1],
68-
c=c, cmap=pl.cm.Paired,
67+
plt.scatter(X[idx, 0], X[idx, 1],
68+
c=c, cmap=plt.cm.Paired,
6969
label="Class %s" % n)
70-
pl.xlim(x_min, x_max)
71-
pl.ylim(y_min, y_max)
72-
pl.legend(loc='upper right')
73-
pl.xlabel("Decision Boundary")
70+
plt.xlim(x_min, x_max)
71+
plt.ylim(y_min, y_max)
72+
plt.legend(loc='upper right')
73+
plt.xlabel("Decision Boundary")
7474

7575
# Plot the two-class decision scores
7676
twoclass_output = bdt.decision_function(X)
7777
plot_range = (twoclass_output.min(), twoclass_output.max())
78-
pl.subplot(122)
78+
plt.subplot(122)
7979
for i, n, c in zip(range(2), class_names, plot_colors):
80-
pl.hist(twoclass_output[y == i],
80+
plt.hist(twoclass_output[y == i],
8181
bins=10,
8282
range=plot_range,
8383
facecolor=c,
8484
label='Class %s' % n,
8585
alpha=.5)
86-
x1, x2, y1, y2 = pl.axis()
87-
pl.axis((x1, x2, y1, y2 * 1.2))
88-
pl.legend(loc='upper right')
89-
pl.ylabel('Samples')
90-
pl.xlabel('Decision Scores')
86+
x1, x2, y1, y2 = plt.axis()
87+
plt.axis((x1, x2, y1, y2 * 1.2))
88+
plt.legend(loc='upper right')
89+
plt.ylabel('Samples')
90+
plt.xlabel('Decision Scores')
9191

92-
pl.subplots_adjust(wspace=0.25)
93-
pl.show()
92+
plt.subplots_adjust(wspace=0.25)
93+
plt.show()

examples/ensemble/plot_bias_variance.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@
6767
# License: BSD 3 clause
6868

6969
import numpy as np
70-
from matplotlib import pyplot as plt
70+
import matplotlib.pyplot as plt
7171

7272
from sklearn.ensemble import BaggingRegressor
7373
from sklearn.tree import DecisionTreeRegressor

examples/ensemble/plot_forest_importances.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
print(__doc__)
1414

1515
import numpy as np
16-
import pylab as pl
16+
import matplotlib.pyplot as plt
1717

1818
from sklearn.datasets import make_classification
1919
from sklearn.ensemble import ExtraTreesClassifier
@@ -45,10 +45,10 @@
4545
print("%d. feature %d (%f)" % (f + 1, indices[f], importances[indices[f]]))
4646

4747
# Plot the feature importances of the forest
48-
pl.figure()
49-
pl.title("Feature importances")
50-
pl.bar(range(10), importances[indices],
48+
plt.figure()
49+
plt.title("Feature importances")
50+
plt.bar(range(10), importances[indices],
5151
color="r", yerr=std[indices], align="center")
52-
pl.xticks(range(10), indices)
53-
pl.xlim([-1, 10])
54-
pl.show()
52+
plt.xticks(range(10), indices)
53+
plt.xlim([-1, 10])
54+
plt.show()

examples/ensemble/plot_forest_importances_faces.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
print(__doc__)
1414

1515
from time import time
16-
import pylab as pl
16+
import matplotlib.pyplot as plt
1717

1818
from sklearn.datasets import fetch_olivetti_faces
1919
from sklearn.ensemble import ExtraTreesClassifier
@@ -44,6 +44,6 @@
4444
importances = importances.reshape(data.images[0].shape)
4545

4646
# Plot pixel importances
47-
pl.matshow(importances, cmap=pl.cm.hot)
48-
pl.title("Pixel importances with forests of trees")
49-
pl.show()
47+
plt.matshow(importances, cmap=plt.cm.hot)
48+
plt.title("Pixel importances with forests of trees")
49+
plt.show()

examples/ensemble/plot_forest_iris.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
print(__doc__)
4242

4343
import numpy as np
44-
import pylab as pl
44+
import matplotlib.pyplot as plt
4545

4646
from sklearn import clone
4747
from sklearn.datasets import load_iris
@@ -54,7 +54,7 @@
5454
n_classes = 3
5555
n_estimators = 30
5656
plot_colors = "ryb"
57-
cmap = pl.cm.RdYlBu
57+
cmap = plt.cm.RdYlBu
5858
plot_step = 0.02 # fine step width for decision surface contours
5959
plot_step_coarser = 0.5 # step widths for coarse classifier guesses
6060
RANDOM_SEED = 13 # fix the seed on each iteration
@@ -101,10 +101,10 @@
101101
model_details += " with {} estimators".format(len(model.estimators_))
102102
print( model_details + " with features", pair, "has a score of", scores )
103103

104-
pl.subplot(3, 4, plot_idx)
104+
plt.subplot(3, 4, plot_idx)
105105
if plot_idx <= len(models):
106106
# Add a title at the top of each column
107-
pl.title(model_title)
107+
plt.title(model_title)
108108

109109
# Now plot the decision boundary using a fine mesh as input to a
110110
# filled contour plot
@@ -118,7 +118,7 @@
118118
if isinstance(model, DecisionTreeClassifier):
119119
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
120120
Z = Z.reshape(xx.shape)
121-
cs = pl.contourf(xx, yy, Z, cmap=cmap)
121+
cs = plt.contourf(xx, yy, Z, cmap=cmap)
122122
else:
123123
# Choose alpha blend level with respect to the number of estimators
124124
# that are in use (noting that AdaBoost can use fewer estimators
@@ -127,26 +127,26 @@
127127
for tree in model.estimators_:
128128
Z = tree.predict(np.c_[xx.ravel(), yy.ravel()])
129129
Z = Z.reshape(xx.shape)
130-
cs = pl.contourf(xx, yy, Z, alpha=estimator_alpha, cmap=cmap)
130+
cs = plt.contourf(xx, yy, Z, alpha=estimator_alpha, cmap=cmap)
131131

132132
# Build a coarser grid to plot a set of ensemble classifications
133133
# to show how these are different to what we see in the decision
134134
# surfaces. These points are regularly space and do not have a black outline
135135
xx_coarser, yy_coarser = np.meshgrid(np.arange(x_min, x_max, plot_step_coarser),
136136
np.arange(y_min, y_max, plot_step_coarser))
137137
Z_points_coarser = model.predict(np.c_[xx_coarser.ravel(), yy_coarser.ravel()]).reshape(xx_coarser.shape)
138-
cs_points = pl.scatter(xx_coarser, yy_coarser, s=15, c=Z_points_coarser, cmap=cmap, edgecolors="none")
138+
cs_points = plt.scatter(xx_coarser, yy_coarser, s=15, c=Z_points_coarser, cmap=cmap, edgecolors="none")
139139

140140
# Plot the training points, these are clustered together and have a
141141
# black outline
142142
for i, c in zip(xrange(n_classes), plot_colors):
143143
idx = np.where(y == i)
144-
pl.scatter(X[idx, 0], X[idx, 1], c=c, label=iris.target_names[i],
144+
plt.scatter(X[idx, 0], X[idx, 1], c=c, label=iris.target_names[i],
145145
cmap=cmap)
146146

147147
plot_idx += 1 # move on to the next plot in sequence
148148

149-
pl.suptitle("Classifiers on feature subsets of the Iris dataset")
150-
pl.axis("tight")
149+
plt.suptitle("Classifiers on feature subsets of the Iris dataset")
150+
plt.axis("tight")
151151

152-
pl.show()
152+
plt.show()

examples/ensemble/plot_gradient_boosting_oob.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
# License: BSD 3 clause
3131

3232
import numpy as np
33-
from matplotlib import pyplot as plt
33+
import matplotlib.pyplot as plt
3434

3535
from sklearn import ensemble
3636
from sklearn.cross_validation import KFold

examples/ensemble/plot_gradient_boosting_quantile.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88
"""
99

1010
import numpy as np
11-
import pylab as pl
12-
from sklearn.ensemble import GradientBoostingRegressor
11+
import matplotlib.pyplot as plt
1312

13+
from sklearn.ensemble import GradientBoostingRegressor
1414

1515
np.random.seed(1)
1616

@@ -63,17 +63,17 @@ def f(x):
6363

6464
# Plot the function, the prediction and the 95% confidence interval based on
6565
# the MSE
66-
fig = pl.figure()
67-
pl.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
68-
pl.plot(X, y, 'b.', markersize=10, label=u'Observations')
69-
pl.plot(xx, y_pred, 'r-', label=u'Prediction')
70-
pl.plot(xx, y_upper, 'k-')
71-
pl.plot(xx, y_lower, 'k-')
72-
pl.fill(np.concatenate([xx, xx[::-1]]),
66+
fig = plt.figure()
67+
plt.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
68+
plt.plot(X, y, 'b.', markersize=10, label=u'Observations')
69+
plt.plot(xx, y_pred, 'r-', label=u'Prediction')
70+
plt.plot(xx, y_upper, 'k-')
71+
plt.plot(xx, y_lower, 'k-')
72+
plt.fill(np.concatenate([xx, xx[::-1]]),
7373
np.concatenate([y_upper, y_lower[::-1]]),
7474
alpha=.5, fc='b', ec='None', label='95% prediction interval')
75-
pl.xlabel('$x$')
76-
pl.ylabel('$f(x)$')
77-
pl.ylim(-10, 20)
78-
pl.legend(loc='upper left')
79-
pl.show()
75+
plt.xlabel('$x$')
76+
plt.ylabel('$f(x)$')
77+
plt.ylim(-10, 20)
78+
plt.legend(loc='upper left')
79+
plt.show()

0 commit comments

Comments
 (0)