Skip to content

Commit b33c413

Browse files
committed
Pushing the docs to 0.19/ for branch: 0.19.X, commit b661a9c81930429cba4a56af291ce2bf8c59f8c9
1 parent be46fe1 commit b33c413

File tree

2,022 files changed

+10828
-10266
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,022 files changed

+10828
-10266
lines changed

0.19/.buildinfo

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: feae17352b9a1e879fecaeccb69c70be
3+
config: 9b34422868d711fa4d5d6dcf1d5646c1
44
tags: 645f666f9bcd5a90fca523b33c5a78b7
1.86 KB
Binary file not shown.
1.83 KB
Binary file not shown.

0.19/_downloads/document_classification_20newsgroups.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/document_clustering.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/grid_search_text_feature_extraction.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/hashing_vs_dict_vectorizer.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/hetero_feature_union.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_adaboost_hastie_10_2.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_adaboost_multiclass.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_adaboost_regression.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_adaboost_twoclass.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_adjusted_for_chance_measures.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_affinity_propagation.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_agglomerative_clustering.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_agglomerative_clustering_metrics.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_all_scaling.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@
208208
"name": "python",
209209
"nbconvert_exporter": "python",
210210
"pygments_lexer": "ipython3",
211-
"version": "3.6.2"
211+
"version": "3.6.3"
212212
}
213213
},
214214
"nbformat": 4,

0.19/_downloads/plot_ard.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_bayesian_ridge.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_beta_divergence.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_bias_variance.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Author: Gilles Louppe <[email protected]>\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.ensemble import BaggingRegressor\nfrom sklearn.tree import DecisionTreeRegressor\n\n# Settings\nn_repeat = 50 # Number of iterations for computing expectations\nn_train = 50 # Size of the training set\nn_test = 1000 # Size of the test set\nnoise = 0.1 # Standard deviation of the noise\nnp.random.seed(0)\n\n# Change this for exploring the bias-variance decomposition of other\n# estimators. This should work well for estimators with high variance (e.g.,\n# decision trees or KNN), but poorly for estimators with low variance (e.g.,\n# linear models).\nestimators = [(\"Tree\", DecisionTreeRegressor()),\n (\"Bagging(Tree)\", BaggingRegressor(DecisionTreeRegressor()))]\n\nn_estimators = len(estimators)\n\n# Generate data\ndef f(x):\n x = x.ravel()\n\n return np.exp(-x ** 2) + 1.5 * np.exp(-(x - 2) ** 2)\n\ndef generate(n_samples, noise, n_repeat=1):\n X = np.random.rand(n_samples) * 10 - 5\n X = np.sort(X)\n\n if n_repeat == 1:\n y = f(X) + np.random.normal(0.0, noise, n_samples)\n else:\n y = np.zeros((n_samples, n_repeat))\n\n for i in range(n_repeat):\n y[:, i] = f(X) + np.random.normal(0.0, noise, n_samples)\n\n X = X.reshape((n_samples, 1))\n\n return X, y\n\nX_train = []\ny_train = []\n\nfor i in range(n_repeat):\n X, y = generate(n_samples=n_train, noise=noise)\n X_train.append(X)\n y_train.append(y)\n\nX_test, y_test = generate(n_samples=n_test, noise=noise, n_repeat=n_repeat)\n\n# Loop over estimators to compare\nfor n, (name, estimator) in enumerate(estimators):\n # Compute predictions\n y_predict = np.zeros((n_test, n_repeat))\n\n for i in range(n_repeat):\n estimator.fit(X_train[i], y_train[i])\n y_predict[:, i] = estimator.predict(X_test)\n\n # Bias^2 + Variance + Noise decomposition of the mean squared error\n y_error = np.zeros(n_test)\n\n for i in range(n_repeat):\n for j in range(n_repeat):\n y_error += (y_test[:, j] - y_predict[:, i]) ** 2\n\n y_error /= (n_repeat * n_repeat)\n\n y_noise = np.var(y_test, axis=1)\n y_bias = (f(X_test) - np.mean(y_predict, axis=1)) ** 2\n y_var = np.var(y_predict, axis=1)\n\n print(\"{0}: {1:.4f} (error) = {2:.4f} (bias^2) \"\n \" + {3:.4f} (var) + {4:.4f} (noise)\".format(name,\n np.mean(y_error),\n np.mean(y_bias),\n np.mean(y_var),\n np.mean(y_noise)))\n\n # Plot figures\n plt.subplot(2, n_estimators, n + 1)\n plt.plot(X_test, f(X_test), \"b\", label=\"$f(x)$\")\n plt.plot(X_train[0], y_train[0], \".b\", label=\"LS ~ $y = f(x)+noise$\")\n\n for i in range(n_repeat):\n if i == 0:\n plt.plot(X_test, y_predict[:, i], \"r\", label=\"$\\^y(x)$\")\n else:\n plt.plot(X_test, y_predict[:, i], \"r\", alpha=0.05)\n\n plt.plot(X_test, np.mean(y_predict, axis=1), \"c\",\n label=\"$\\mathbb{E}_{LS} \\^y(x)$\")\n\n plt.xlim([-5, 5])\n plt.title(name)\n\n if n == 0:\n plt.legend(loc=\"upper left\", prop={\"size\": 11})\n\n plt.subplot(2, n_estimators, n_estimators + n + 1)\n plt.plot(X_test, y_error, \"r\", label=\"$error(x)$\")\n plt.plot(X_test, y_bias, \"b\", label=\"$bias^2(x)$\"),\n plt.plot(X_test, y_var, \"g\", label=\"$variance(x)$\"),\n plt.plot(X_test, y_noise, \"c\", label=\"$noise(x)$\")\n\n plt.xlim([-5, 5])\n plt.ylim([0, 0.1])\n\n if n == 0:\n plt.legend(loc=\"upper left\", prop={\"size\": 11})\n\nplt.show()"
29+
"print(__doc__)\n\n# Author: Gilles Louppe <[email protected]>\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.ensemble import BaggingRegressor\nfrom sklearn.tree import DecisionTreeRegressor\n\n# Settings\nn_repeat = 50 # Number of iterations for computing expectations\nn_train = 50 # Size of the training set\nn_test = 1000 # Size of the test set\nnoise = 0.1 # Standard deviation of the noise\nnp.random.seed(0)\n\n# Change this for exploring the bias-variance decomposition of other\n# estimators. This should work well for estimators with high variance (e.g.,\n# decision trees or KNN), but poorly for estimators with low variance (e.g.,\n# linear models).\nestimators = [(\"Tree\", DecisionTreeRegressor()),\n (\"Bagging(Tree)\", BaggingRegressor(DecisionTreeRegressor()))]\n\nn_estimators = len(estimators)\n\n\n# Generate data\ndef f(x):\n x = x.ravel()\n\n return np.exp(-x ** 2) + 1.5 * np.exp(-(x - 2) ** 2)\n\n\ndef generate(n_samples, noise, n_repeat=1):\n X = np.random.rand(n_samples) * 10 - 5\n X = np.sort(X)\n\n if n_repeat == 1:\n y = f(X) + np.random.normal(0.0, noise, n_samples)\n else:\n y = np.zeros((n_samples, n_repeat))\n\n for i in range(n_repeat):\n y[:, i] = f(X) + np.random.normal(0.0, noise, n_samples)\n\n X = X.reshape((n_samples, 1))\n\n return X, y\n\n\nX_train = []\ny_train = []\n\nfor i in range(n_repeat):\n X, y = generate(n_samples=n_train, noise=noise)\n X_train.append(X)\n y_train.append(y)\n\nX_test, y_test = generate(n_samples=n_test, noise=noise, n_repeat=n_repeat)\n\nplt.figure(figsize=(10, 8))\n\n# Loop over estimators to compare\nfor n, (name, estimator) in enumerate(estimators):\n # Compute predictions\n y_predict = np.zeros((n_test, n_repeat))\n\n for i in range(n_repeat):\n estimator.fit(X_train[i], y_train[i])\n y_predict[:, i] = estimator.predict(X_test)\n\n # Bias^2 + Variance + Noise decomposition of the mean squared error\n y_error = np.zeros(n_test)\n\n for i in range(n_repeat):\n for j in range(n_repeat):\n y_error += (y_test[:, j] - y_predict[:, i]) ** 2\n\n y_error /= (n_repeat * n_repeat)\n\n y_noise = np.var(y_test, axis=1)\n y_bias = (f(X_test) - np.mean(y_predict, axis=1)) ** 2\n y_var = np.var(y_predict, axis=1)\n\n print(\"{0}: {1:.4f} (error) = {2:.4f} (bias^2) \"\n \" + {3:.4f} (var) + {4:.4f} (noise)\".format(name,\n np.mean(y_error),\n np.mean(y_bias),\n np.mean(y_var),\n np.mean(y_noise)))\n\n # Plot figures\n plt.subplot(2, n_estimators, n + 1)\n plt.plot(X_test, f(X_test), \"b\", label=\"$f(x)$\")\n plt.plot(X_train[0], y_train[0], \".b\", label=\"LS ~ $y = f(x)+noise$\")\n\n for i in range(n_repeat):\n if i == 0:\n plt.plot(X_test, y_predict[:, i], \"r\", label=\"$\\^y(x)$\")\n else:\n plt.plot(X_test, y_predict[:, i], \"r\", alpha=0.05)\n\n plt.plot(X_test, np.mean(y_predict, axis=1), \"c\",\n label=\"$\\mathbb{E}_{LS} \\^y(x)$\")\n\n plt.xlim([-5, 5])\n plt.title(name)\n\n if n == n_estimators - 1:\n plt.legend(loc=(1.1, .5))\n\n plt.subplot(2, n_estimators, n_estimators + n + 1)\n plt.plot(X_test, y_error, \"r\", label=\"$error(x)$\")\n plt.plot(X_test, y_bias, \"b\", label=\"$bias^2(x)$\"),\n plt.plot(X_test, y_var, \"g\", label=\"$variance(x)$\"),\n plt.plot(X_test, y_noise, \"c\", label=\"$noise(x)$\")\n\n plt.xlim([-5, 5])\n plt.ylim([0, 0.1])\n\n if n == n_estimators - 1:\n\n plt.legend(loc=(1.1, .5))\n\nplt.subplots_adjust(right=.75)\nplt.show()"
3030
]
3131
}
3232
],
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_bias_variance.py

+11-4
Original file line numberDiff line numberDiff line change
@@ -88,12 +88,14 @@
8888

8989
n_estimators = len(estimators)
9090

91+
9192
# Generate data
9293
def f(x):
9394
x = x.ravel()
9495

9596
return np.exp(-x ** 2) + 1.5 * np.exp(-(x - 2) ** 2)
9697

98+
9799
def generate(n_samples, noise, n_repeat=1):
98100
X = np.random.rand(n_samples) * 10 - 5
99101
X = np.sort(X)
@@ -110,6 +112,7 @@ def generate(n_samples, noise, n_repeat=1):
110112

111113
return X, y
112114

115+
113116
X_train = []
114117
y_train = []
115118

@@ -120,6 +123,8 @@ def generate(n_samples, noise, n_repeat=1):
120123

121124
X_test, y_test = generate(n_samples=n_test, noise=noise, n_repeat=n_repeat)
122125

126+
plt.figure(figsize=(10, 8))
127+
123128
# Loop over estimators to compare
124129
for n, (name, estimator) in enumerate(estimators):
125130
# Compute predictions
@@ -166,8 +171,8 @@ def generate(n_samples, noise, n_repeat=1):
166171
plt.xlim([-5, 5])
167172
plt.title(name)
168173

169-
if n == 0:
170-
plt.legend(loc="upper left", prop={"size": 11})
174+
if n == n_estimators - 1:
175+
plt.legend(loc=(1.1, .5))
171176

172177
plt.subplot(2, n_estimators, n_estimators + n + 1)
173178
plt.plot(X_test, y_error, "r", label="$error(x)$")
@@ -178,7 +183,9 @@ def generate(n_samples, noise, n_repeat=1):
178183
plt.xlim([-5, 5])
179184
plt.ylim([0, 0.1])
180185

181-
if n == 0:
182-
plt.legend(loc="upper left", prop={"size": 11})
186+
if n == n_estimators - 1:
187+
188+
plt.legend(loc=(1.1, .5))
183189

190+
plt.subplots_adjust(right=.75)
184191
plt.show()

0.19/_downloads/plot_bicluster_newsgroups.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_birch_vs_minibatchkmeans.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_calibration.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_calibration_curve.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_calibration_multiclass.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_classification.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_classification_probability.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0.19/_downloads/plot_classifier_chain_yeast.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"name": "python",
4747
"nbconvert_exporter": "python",
4848
"pygments_lexer": "ipython3",
49-
"version": "3.6.2"
49+
"version": "3.6.3"
5050
}
5151
},
5252
"nbformat": 4,

0 commit comments

Comments
 (0)