Skip to content

Commit b253016

Browse files
committed
Pushing the docs to dev/ for branch: master, commit c84ad60c8cfb007d8ce158d24177355bda8dffa1
1 parent 6487fcb commit b253016

File tree

1,213 files changed

+3667
-3667
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,213 files changed

+3667
-3667
lines changed

dev/_downloads/1fdea43ae2c6f41584a8f0032d7ed247/plot_gradient_boosting_quantile.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.ensemble import GradientBoostingRegressor\n\nnp.random.seed(1)\n\n\ndef f(x):\n \"\"\"The function to predict.\"\"\"\n return x * np.sin(x)\n\n#----------------------------------------------------------------------\n# First the noiseless case\nX = np.atleast_2d(np.random.uniform(0, 10.0, size=100)).T\nX = X.astype(np.float32)\n\n# Observations\ny = f(X).ravel()\n\ndy = 1.5 + 1.0 * np.random.random(y.shape)\nnoise = np.random.normal(0, dy)\ny += noise\ny = y.astype(np.float32)\n\n# Mesh the input space for evaluations of the real function, the prediction and\n# its MSE\nxx = np.atleast_2d(np.linspace(0, 10, 1000)).T\nxx = xx.astype(np.float32)\n\nalpha = 0.95\n\nclf = GradientBoostingRegressor(loss='quantile', alpha=alpha,\n n_estimators=250, max_depth=3,\n learning_rate=.1, min_samples_leaf=9,\n min_samples_split=9)\n\nclf.fit(X, y)\n\n# Make the prediction on the meshed x-axis\ny_upper = clf.predict(xx)\n\nclf.set_params(alpha=1.0 - alpha)\nclf.fit(X, y)\n\n# Make the prediction on the meshed x-axis\ny_lower = clf.predict(xx)\n\nclf.set_params(loss='ls')\nclf.fit(X, y)\n\n# Make the prediction on the meshed x-axis\ny_pred = clf.predict(xx)\n\n# Plot the function, the prediction and the 90% confidence interval based on\n# the MSE\nfig = plt.figure()\nplt.plot(xx, f(xx), 'g:', label=r'$f(x) = x\\,\\sin(x)$')\nplt.plot(X, y, 'b.', markersize=10, label=u'Observations')\nplt.plot(xx, y_pred, 'r-', label=u'Prediction')\nplt.plot(xx, y_upper, 'k-')\nplt.plot(xx, y_lower, 'k-')\nplt.fill(np.concatenate([xx, xx[::-1]]),\n np.concatenate([y_upper, y_lower[::-1]]),\n alpha=.5, fc='b', ec='None', label='90% prediction interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.ensemble import GradientBoostingRegressor\n\nnp.random.seed(1)\n\n\ndef f(x):\n \"\"\"The function to predict.\"\"\"\n return x * np.sin(x)\n\n#----------------------------------------------------------------------\n# First the noiseless case\nX = np.atleast_2d(np.random.uniform(0, 10.0, size=100)).T\nX = X.astype(np.float32)\n\n# Observations\ny = f(X).ravel()\n\ndy = 1.5 + 1.0 * np.random.random(y.shape)\nnoise = np.random.normal(0, dy)\ny += noise\ny = y.astype(np.float32)\n\n# Mesh the input space for evaluations of the real function, the prediction and\n# its MSE\nxx = np.atleast_2d(np.linspace(0, 10, 1000)).T\nxx = xx.astype(np.float32)\n\nalpha = 0.95\n\nclf = GradientBoostingRegressor(loss='quantile', alpha=alpha,\n n_estimators=250, max_depth=3,\n learning_rate=.1, min_samples_leaf=9,\n min_samples_split=9)\n\nclf.fit(X, y)\n\n# Make the prediction on the meshed x-axis\ny_upper = clf.predict(xx)\n\nclf.set_params(alpha=1.0 - alpha)\nclf.fit(X, y)\n\n# Make the prediction on the meshed x-axis\ny_lower = clf.predict(xx)\n\nclf.set_params(loss='ls')\nclf.fit(X, y)\n\n# Make the prediction on the meshed x-axis\ny_pred = clf.predict(xx)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nfig = plt.figure()\nplt.plot(xx, f(xx), 'g:', label=r'$f(x) = x\\,\\sin(x)$')\nplt.plot(X, y, 'b.', markersize=10, label=u'Observations')\nplt.plot(xx, y_pred, 'r-', label=u'Prediction')\nplt.plot(xx, y_upper, 'k-')\nplt.plot(xx, y_lower, 'k-')\nplt.fill(np.concatenate([xx, xx[::-1]]),\n np.concatenate([y_upper, y_lower[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% prediction interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\nplt.show()"
3030
]
3131
}
3232
],
Binary file not shown.

dev/_downloads/4a0433ff6081bdaf198052ab22360ec8/plot_gradient_boosting_quantile.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def f(x):
6161
# Make the prediction on the meshed x-axis
6262
y_pred = clf.predict(xx)
6363

64-
# Plot the function, the prediction and the 90% confidence interval based on
64+
# Plot the function, the prediction and the 95% confidence interval based on
6565
# the MSE
6666
fig = plt.figure()
6767
plt.plot(xx, f(xx), 'g:', label=r'$f(x) = x\,\sin(x)$')
@@ -71,7 +71,7 @@ def f(x):
7171
plt.plot(xx, y_lower, 'k-')
7272
plt.fill(np.concatenate([xx, xx[::-1]]),
7373
np.concatenate([y_upper, y_lower[::-1]]),
74-
alpha=.5, fc='b', ec='None', label='90% prediction interval')
74+
alpha=.5, fc='b', ec='None', label='95% prediction interval')
7575
plt.xlabel('$x$')
7676
plt.ylabel('$f(x)$')
7777
plt.ylim(-10, 20)
Binary file not shown.

dev/_downloads/scikit-learn-docs.pdf

-1.93 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes
-93 Bytes
-93 Bytes
-198 Bytes
-198 Bytes

0 commit comments

Comments
 (0)