Skip to content

Commit 319476e

Browse files
committed
Pushing the docs to _pst_preview/ for branch: new_web_theme, commit 6c97e77d561b9324fa2543d19d07d3b5751b3b28
1 parent 1bbee86 commit 319476e

File tree

1,935 files changed

+10782
-6694
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,935 files changed

+10782
-6694
lines changed

_pst_preview/.buildinfo

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: 4713bc29eed92547457eff6ed9191854
3+
config: ea616b3889ddb7cceaa91d830b0a1b20
44
tags: 645f666f9bcd5a90fca523b33c5a78b7

_pst_preview/_downloads/0785ea6d45bde062e5beedda88131215/plot_release_highlights_1_3_0.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
"cell_type": "markdown",
5555
"metadata": {},
5656
"source": [
57-
"## Missing values support in decision trees\nThe classes :class:`tree.DecisionTreeClassifier` and\n:class:`tree.DecisionTreeRegressor` now support missing values. For each potential\nthreshold on the non-missing data, the splitter will evaluate the split with all the\nmissing values going to the left node or the right node.\nMore details in the `User Guide <tree_missing_value_support>`.\n\n"
57+
"## Missing values support in decision trees\nThe classes :class:`tree.DecisionTreeClassifier` and\n:class:`tree.DecisionTreeRegressor` now support missing values. For each potential\nthreshold on the non-missing data, the splitter will evaluate the split with all the\nmissing values going to the left node or the right node.\nSee more details in the `User Guide <tree_missing_value_support>` or see\n`sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py` for a usecase\nexample of this feature in :class:`~ensemble.HistGradientBoostingRegressor`.\n\n"
5858
]
5959
},
6060
{
Binary file not shown.

_pst_preview/_downloads/1e0968da80ca868bbdf21c1d0547f68c/plot_lle_digits.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@
8787
},
8888
"outputs": [],
8989
"source": [
90-
"from sklearn.decomposition import TruncatedSVD\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom sklearn.ensemble import RandomTreesEmbedding\nfrom sklearn.manifold import (\n MDS,\n TSNE,\n Isomap,\n LocallyLinearEmbedding,\n SpectralEmbedding,\n)\nfrom sklearn.neighbors import NeighborhoodComponentsAnalysis\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.random_projection import SparseRandomProjection\n\nembeddings = {\n \"Random projection embedding\": SparseRandomProjection(\n n_components=2, random_state=42\n ),\n \"Truncated SVD embedding\": TruncatedSVD(n_components=2),\n \"Linear Discriminant Analysis embedding\": LinearDiscriminantAnalysis(\n n_components=2\n ),\n \"Isomap embedding\": Isomap(n_neighbors=n_neighbors, n_components=2),\n \"Standard LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"standard\"\n ),\n \"Modified LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"modified\"\n ),\n \"Hessian LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"hessian\"\n ),\n \"LTSA LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"ltsa\"\n ),\n \"MDS embedding\": MDS(n_components=2, n_init=1, max_iter=120, n_jobs=2),\n \"Random Trees embedding\": make_pipeline(\n RandomTreesEmbedding(n_estimators=200, max_depth=5, random_state=0),\n TruncatedSVD(n_components=2),\n ),\n \"Spectral embedding\": SpectralEmbedding(\n n_components=2, random_state=0, eigen_solver=\"arpack\"\n ),\n \"t-SNE embedding\": TSNE(\n n_components=2,\n n_iter=500,\n n_iter_without_progress=150,\n n_jobs=2,\n random_state=0,\n ),\n \"NCA embedding\": NeighborhoodComponentsAnalysis(\n n_components=2, init=\"pca\", random_state=0\n ),\n}"
90+
"from sklearn.decomposition import TruncatedSVD\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom sklearn.ensemble import RandomTreesEmbedding\nfrom sklearn.manifold import (\n MDS,\n TSNE,\n Isomap,\n LocallyLinearEmbedding,\n SpectralEmbedding,\n)\nfrom sklearn.neighbors import NeighborhoodComponentsAnalysis\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.random_projection import SparseRandomProjection\n\nembeddings = {\n \"Random projection embedding\": SparseRandomProjection(\n n_components=2, random_state=42\n ),\n \"Truncated SVD embedding\": TruncatedSVD(n_components=2),\n \"Linear Discriminant Analysis embedding\": LinearDiscriminantAnalysis(\n n_components=2\n ),\n \"Isomap embedding\": Isomap(n_neighbors=n_neighbors, n_components=2),\n \"Standard LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"standard\"\n ),\n \"Modified LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"modified\"\n ),\n \"Hessian LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"hessian\"\n ),\n \"LTSA LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"ltsa\"\n ),\n \"MDS embedding\": MDS(n_components=2, n_init=1, max_iter=120, n_jobs=2),\n \"Random Trees embedding\": make_pipeline(\n RandomTreesEmbedding(n_estimators=200, max_depth=5, random_state=0),\n TruncatedSVD(n_components=2),\n ),\n \"Spectral embedding\": SpectralEmbedding(\n n_components=2, random_state=0, eigen_solver=\"arpack\"\n ),\n \"t-SNE embedding\": TSNE(\n n_components=2,\n max_iter=500,\n n_iter_without_progress=150,\n n_jobs=2,\n random_state=0,\n ),\n \"NCA embedding\": NeighborhoodComponentsAnalysis(\n n_components=2, init=\"pca\", random_state=0\n ),\n}"
9191
]
9292
},
9393
{

_pst_preview/_downloads/2da78c80da33b4e0d313b0a90b923ec8/plot_adaboost_regression.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,10 @@
99
regressor. As the number of boosts is increased the regressor can fit more
1010
detail.
1111
12+
See :ref:`sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py` for an
13+
example showcasing the benefits of using more efficient regression models such
14+
as :class:`~ensemble.HistGradientBoostingRegressor`.
15+
1216
.. [1] `H. Drucker, "Improving Regressors using Boosting Techniques", 1997.
1317
<https://citeseerx.ist.psu.edu/doc_view/pid/8d49e2dedb817f2c3330e74b63c5fc86d2399ce3>`_
1418

_pst_preview/_downloads/2f3ef774a6d7e52e1e6b7ccbb75d25f0/plot_gradient_boosting_quantile.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44
=====================================================
55
66
This example shows how quantile regression can be used to create prediction
7-
intervals.
7+
intervals. See :ref:`sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py`
8+
for an example showcasing some other features of
9+
:class:`~ensemble.HistGradientBoostingRegressor`.
810
911
"""
1012

_pst_preview/_downloads/3316f301d7c7651c033565a5ae51c295/plot_release_highlights_1_3_0.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,9 @@
8888
# :class:`tree.DecisionTreeRegressor` now support missing values. For each potential
8989
# threshold on the non-missing data, the splitter will evaluate the split with all the
9090
# missing values going to the left node or the right node.
91-
# More details in the :ref:`User Guide <tree_missing_value_support>`.
91+
# See more details in the :ref:`User Guide <tree_missing_value_support>` or see
92+
# :ref:`sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py` for a usecase
93+
# example of this feature in :class:`~ensemble.HistGradientBoostingRegressor`.
9294
import numpy as np
9395
from sklearn.tree import DecisionTreeClassifier
9496

_pst_preview/_downloads/38e826c9e3778d7de78b2fc671fd7903/plot_adaboost_regression.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"cell_type": "markdown",
55
"metadata": {},
66
"source": [
7-
"\n# Decision Tree Regression with AdaBoost\n\nA decision tree is boosted using the AdaBoost.R2 [1]_ algorithm on a 1D\nsinusoidal dataset with a small amount of Gaussian noise.\n299 boosts (300 decision trees) is compared with a single decision tree\nregressor. As the number of boosts is increased the regressor can fit more\ndetail.\n\n.. [1] [H. Drucker, \"Improving Regressors using Boosting Techniques\", 1997.](https://citeseerx.ist.psu.edu/doc_view/pid/8d49e2dedb817f2c3330e74b63c5fc86d2399ce3)\n"
7+
"\n# Decision Tree Regression with AdaBoost\n\nA decision tree is boosted using the AdaBoost.R2 [1]_ algorithm on a 1D\nsinusoidal dataset with a small amount of Gaussian noise.\n299 boosts (300 decision trees) is compared with a single decision tree\nregressor. As the number of boosts is increased the regressor can fit more\ndetail.\n\nSee `sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py` for an\nexample showcasing the benefits of using more efficient regression models such\nas :class:`~ensemble.HistGradientBoostingRegressor`.\n\n.. [1] [H. Drucker, \"Improving Regressors using Boosting Techniques\", 1997.](https://citeseerx.ist.psu.edu/doc_view/pid/8d49e2dedb817f2c3330e74b63c5fc86d2399ce3)\n"
88
]
99
},
1010
{

_pst_preview/_downloads/4cf0456267ced0f869a458ef4776d4c5/plot_release_highlights_1_1_0.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
"""
2323

2424
# %%
25+
# .. _quantile_support_hgbdt:
26+
#
2527
# Quantile loss in :class:`ensemble.HistGradientBoostingRegressor`
2628
# ----------------------------------------------------------------
2729
# :class:`~ensemble.HistGradientBoostingRegressor` can model quantiles with
@@ -51,6 +53,9 @@
5153
ax.plot(X_1d, hist.predict(X), label=quantile)
5254
_ = ax.legend(loc="lower left")
5355

56+
# %%
57+
# For a usecase example, see
58+
# :ref:`sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py`
5459

5560
# %%
5661
# `get_feature_names_out` Available in all Transformers

_pst_preview/_downloads/4f07b03421908788913e044918d8ed1e/plot_release_highlights_0_23_0.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,8 @@
122122
# specific features. In the following example, we construct a target that is
123123
# generally positively correlated with the first feature, with some noise.
124124
# Applying monotoinc constraints allows the prediction to capture the global
125-
# effect of the first feature, instead of fitting the noise.
125+
# effect of the first feature, instead of fitting the noise. For a usecase
126+
# example, see :ref:`sphx_glr_auto_examples_ensemble_plot_hgbt_regression.py`.
126127
import numpy as np
127128
from matplotlib import pyplot as plt
128129
from sklearn.model_selection import train_test_split

0 commit comments

Comments
 (0)