Skip to content

Commit 1bc1536

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 9358a94aa7c8bb887bd64ae156e0d0a7ee637d1c
1 parent 2de5fb4 commit 1bc1536

File tree

1,293 files changed

+4945
-4750
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,293 files changed

+4945
-4750
lines changed
Binary file not shown.

dev/_downloads/215c560d29193ab9b0a495609bc74802/plot_monotonic_constraints.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"from sklearn.experimental import enable_hist_gradient_boosting # noqa\nfrom sklearn.ensemble import HistGradientBoostingRegressor\nfrom sklearn.inspection import plot_partial_dependence\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\nprint(__doc__)\n\nrng = np.random.RandomState(0)\n\nn_samples = 5000\nf_0 = rng.rand(n_samples) # positive correlation with y\nf_1 = rng.rand(n_samples) # negative correlation with y\nX = np.c_[f_0, f_1]\nnoise = rng.normal(loc=0.0, scale=0.01, size=n_samples)\ny = (5 * f_0 + np.sin(10 * np.pi * f_0) -\n 5 * f_1 - np.cos(10 * np.pi * f_1) +\n noise)\n\nfig, ax = plt.subplots()\n\n\n# Without any constraint\ngbdt = HistGradientBoostingRegressor()\ngbdt.fit(X, y)\ndisp = plot_partial_dependence(\n gbdt, X, features=[0, 1],\n line_kw={'linewidth': 4, 'label': 'unconstrained'},\n ax=ax)\n\n# With positive and negative constraints\ngbdt = HistGradientBoostingRegressor(monotonic_cst=[1, -1])\ngbdt.fit(X, y)\n\nplot_partial_dependence(\n gbdt, X, features=[0, 1],\n feature_names=('First feature\\nPositive constraint',\n 'Second feature\\nNegtive constraint'),\n line_kw={'linewidth': 4, 'label': 'constrained'},\n ax=disp.axes_)\n\nfor f_idx in (0, 1):\n disp.axes_[0, f_idx].plot(X[:, f_idx], y, 'o', alpha=.3, zorder=-1)\n disp.axes_[0, f_idx].set_ylim(-6, 6)\n\nplt.legend()\nfig.suptitle(\"Monotonic constraints illustration\")\n\nplt.show()"
29+
"from sklearn.experimental import enable_hist_gradient_boosting # noqa\nfrom sklearn.ensemble import HistGradientBoostingRegressor\nfrom sklearn.inspection import plot_partial_dependence\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\nprint(__doc__)\n\nrng = np.random.RandomState(0)\n\nn_samples = 5000\nf_0 = rng.rand(n_samples) # positive correlation with y\nf_1 = rng.rand(n_samples) # negative correlation with y\nX = np.c_[f_0, f_1]\nnoise = rng.normal(loc=0.0, scale=0.01, size=n_samples)\ny = (5 * f_0 + np.sin(10 * np.pi * f_0) -\n 5 * f_1 - np.cos(10 * np.pi * f_1) +\n noise)\n\nfig, ax = plt.subplots()\n\n\n# Without any constraint\ngbdt = HistGradientBoostingRegressor()\ngbdt.fit(X, y)\ndisp = plot_partial_dependence(\n gbdt,\n X,\n features=[0, 1],\n line_kw={\"linewidth\": 4, \"label\": \"unconstrained\", \"color\": \"tab:blue\"},\n ax=ax,\n)\n\n# With positive and negative constraints\ngbdt = HistGradientBoostingRegressor(monotonic_cst=[1, -1])\ngbdt.fit(X, y)\n\nplot_partial_dependence(\n gbdt,\n X,\n features=[0, 1],\n feature_names=(\n \"First feature\\nPositive constraint\",\n \"Second feature\\nNegtive constraint\",\n ),\n line_kw={\"linewidth\": 4, \"label\": \"constrained\", \"color\": \"tab:orange\"},\n ax=disp.axes_,\n)\n\nfor f_idx in (0, 1):\n disp.axes_[0, f_idx].plot(\n X[:, f_idx], y, \"o\", alpha=0.3, zorder=-1, color=\"tab:green\"\n )\n disp.axes_[0, f_idx].set_ylim(-6, 6)\n\nplt.legend()\nfig.suptitle(\"Monotonic constraints illustration\")\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/21b82d82985712b5de6347f382c77c86/plot_partial_dependence.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080
},
8181
"outputs": [],
8282
"source": [
83-
"import matplotlib.pyplot as plt\nfrom sklearn.inspection import partial_dependence\nfrom sklearn.inspection import plot_partial_dependence\n\nprint('Computing partial dependence plots...')\ntic = time()\nfeatures = ['MedInc', 'AveOccup', 'HouseAge', 'AveRooms']\ndisplay = plot_partial_dependence(\n est, X_train, features, kind=\"both\", subsample=50,\n n_jobs=3, grid_resolution=20\n)\nprint(f\"done in {time() - tic:.3f}s\")\ndisplay.figure_.suptitle(\n 'Partial dependence of house value on non-___location features\\n'\n 'for the California housing dataset, with MLPRegressor'\n)\ndisplay.figure_.subplots_adjust(hspace=0.3)"
83+
"import matplotlib.pyplot as plt\nfrom sklearn.inspection import partial_dependence\nfrom sklearn.inspection import plot_partial_dependence\n\nprint('Computing partial dependence plots...')\ntic = time()\nfeatures = ['MedInc', 'AveOccup', 'HouseAge', 'AveRooms']\ndisplay = plot_partial_dependence(\n est, X_train, features, kind=\"both\", subsample=50,\n n_jobs=3, grid_resolution=20, random_state=0\n)\nprint(f\"done in {time() - tic:.3f}s\")\ndisplay.figure_.suptitle(\n 'Partial dependence of house value on non-___location features\\n'\n 'for the California housing dataset, with MLPRegressor'\n)\ndisplay.figure_.subplots_adjust(hspace=0.3)"
8484
]
8585
},
8686
{
@@ -116,7 +116,7 @@
116116
},
117117
"outputs": [],
118118
"source": [
119-
"print('Computing partial dependence plots...')\ntic = time()\ndisplay = plot_partial_dependence(\n est, X_train, features, kind=\"both\", subsample=50,\n n_jobs=3, grid_resolution=20\n)\nprint(f\"done in {time() - tic:.3f}s\")\ndisplay.figure_.suptitle(\n 'Partial dependence of house value on non-___location features\\n'\n 'for the California housing dataset, with Gradient Boosting'\n)\ndisplay.figure_.subplots_adjust(wspace=0.4, hspace=0.3)"
119+
"print('Computing partial dependence plots...')\ntic = time()\ndisplay = plot_partial_dependence(\n est, X_train, features, kind=\"both\", subsample=50,\n n_jobs=3, grid_resolution=20, random_state=0\n)\nprint(f\"done in {time() - tic:.3f}s\")\ndisplay.figure_.suptitle(\n 'Partial dependence of house value on non-___location features\\n'\n 'for the California housing dataset, with Gradient Boosting'\n)\ndisplay.figure_.subplots_adjust(wspace=0.4, hspace=0.3)"
120120
]
121121
},
122122
{

dev/_downloads/4f07b03421908788913e044918d8ed1e/plot_release_highlights_0_23_0.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -138,10 +138,13 @@
138138

139139
disp = plot_partial_dependence(
140140
gbdt_no_cst, X, features=[0], feature_names=['feature 0'],
141-
line_kw={'linewidth': 4, 'label': 'unconstrained'})
141+
line_kw={'linewidth': 4, 'label': 'unconstrained', "color": "tab:blue"})
142142
plot_partial_dependence(gbdt_cst, X, features=[0],
143-
line_kw={'linewidth': 4, 'label': 'constrained'}, ax=disp.axes_)
144-
disp.axes_[0, 0].plot(X[:, 0], y, 'o', alpha=.5, zorder=-1, label='samples')
143+
line_kw={'linewidth': 4, 'label': 'constrained', "color": "tab:orange"},
144+
ax=disp.axes_)
145+
disp.axes_[0, 0].plot(
146+
X[:, 0], y, 'o', alpha=.5, zorder=-1, label='samples', color="tab:green"
147+
)
145148
disp.axes_[0, 0].set_ylim(-3, 3); disp.axes_[0, 0].set_xlim(-1, 1)
146149
plt.legend()
147150
plt.show()
Binary file not shown.

dev/_downloads/923fcad5e07de1ce7dc8dcbd7327c178/plot_release_highlights_0_23_0.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@
8787
},
8888
"outputs": [],
8989
"source": [
90-
"import numpy as np\nfrom matplotlib import pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.inspection import plot_partial_dependence\nfrom sklearn.experimental import enable_hist_gradient_boosting # noqa\nfrom sklearn.ensemble import HistGradientBoostingRegressor\n\nn_samples = 500\nrng = np.random.RandomState(0)\nX = rng.randn(n_samples, 2)\nnoise = rng.normal(loc=0.0, scale=0.01, size=n_samples)\ny = (5 * X[:, 0] + np.sin(10 * np.pi * X[:, 0]) - noise)\n\ngbdt_no_cst = HistGradientBoostingRegressor().fit(X, y)\ngbdt_cst = HistGradientBoostingRegressor(monotonic_cst=[1, 0]).fit(X, y)\n\ndisp = plot_partial_dependence(\n gbdt_no_cst, X, features=[0], feature_names=['feature 0'],\n line_kw={'linewidth': 4, 'label': 'unconstrained'})\nplot_partial_dependence(gbdt_cst, X, features=[0],\n line_kw={'linewidth': 4, 'label': 'constrained'}, ax=disp.axes_)\ndisp.axes_[0, 0].plot(X[:, 0], y, 'o', alpha=.5, zorder=-1, label='samples')\ndisp.axes_[0, 0].set_ylim(-3, 3); disp.axes_[0, 0].set_xlim(-1, 1)\nplt.legend()\nplt.show()"
90+
"import numpy as np\nfrom matplotlib import pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.inspection import plot_partial_dependence\nfrom sklearn.experimental import enable_hist_gradient_boosting # noqa\nfrom sklearn.ensemble import HistGradientBoostingRegressor\n\nn_samples = 500\nrng = np.random.RandomState(0)\nX = rng.randn(n_samples, 2)\nnoise = rng.normal(loc=0.0, scale=0.01, size=n_samples)\ny = (5 * X[:, 0] + np.sin(10 * np.pi * X[:, 0]) - noise)\n\ngbdt_no_cst = HistGradientBoostingRegressor().fit(X, y)\ngbdt_cst = HistGradientBoostingRegressor(monotonic_cst=[1, 0]).fit(X, y)\n\ndisp = plot_partial_dependence(\n gbdt_no_cst, X, features=[0], feature_names=['feature 0'],\n line_kw={'linewidth': 4, 'label': 'unconstrained', \"color\": \"tab:blue\"})\nplot_partial_dependence(gbdt_cst, X, features=[0],\n line_kw={'linewidth': 4, 'label': 'constrained', \"color\": \"tab:orange\"},\n ax=disp.axes_)\ndisp.axes_[0, 0].plot(\n X[:, 0], y, 'o', alpha=.5, zorder=-1, label='samples', color=\"tab:green\"\n)\ndisp.axes_[0, 0].set_ylim(-3, 3); disp.axes_[0, 0].set_xlim(-1, 1)\nplt.legend()\nplt.show()"
9191
]
9292
},
9393
{

dev/_downloads/9e22207e9bd6485b95f32783b59d9a80/plot_monotonic_constraints.py

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -45,23 +45,33 @@
4545
gbdt = HistGradientBoostingRegressor()
4646
gbdt.fit(X, y)
4747
disp = plot_partial_dependence(
48-
gbdt, X, features=[0, 1],
49-
line_kw={'linewidth': 4, 'label': 'unconstrained'},
50-
ax=ax)
48+
gbdt,
49+
X,
50+
features=[0, 1],
51+
line_kw={"linewidth": 4, "label": "unconstrained", "color": "tab:blue"},
52+
ax=ax,
53+
)
5154

5255
# With positive and negative constraints
5356
gbdt = HistGradientBoostingRegressor(monotonic_cst=[1, -1])
5457
gbdt.fit(X, y)
5558

5659
plot_partial_dependence(
57-
gbdt, X, features=[0, 1],
58-
feature_names=('First feature\nPositive constraint',
59-
'Second feature\nNegtive constraint'),
60-
line_kw={'linewidth': 4, 'label': 'constrained'},
61-
ax=disp.axes_)
60+
gbdt,
61+
X,
62+
features=[0, 1],
63+
feature_names=(
64+
"First feature\nPositive constraint",
65+
"Second feature\nNegtive constraint",
66+
),
67+
line_kw={"linewidth": 4, "label": "constrained", "color": "tab:orange"},
68+
ax=disp.axes_,
69+
)
6270

6371
for f_idx in (0, 1):
64-
disp.axes_[0, f_idx].plot(X[:, f_idx], y, 'o', alpha=.3, zorder=-1)
72+
disp.axes_[0, f_idx].plot(
73+
X[:, f_idx], y, "o", alpha=0.3, zorder=-1, color="tab:green"
74+
)
6575
disp.axes_[0, f_idx].set_ylim(-6, 6)
6676

6777
plt.legend()

0 commit comments

Comments
 (0)