Skip to content

Commit a725102

Browse files
committed
Pushing the docs to 1.1/ for branch: 1.1.X, commit 6cb2c52375a812ff509c00f4eed1da232e7a8932
1 parent 61834c9 commit a725102

File tree

1,509 files changed

+27523
-24759
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,509 files changed

+27523
-24759
lines changed

1.1/.buildinfo

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: 0eaf86448214f705bb4019609544b7b3
3+
config: 8c6ff21e847d280e934fd16d253894de
44
tags: 645f666f9bcd5a90fca523b33c5a78b7

1.1/_downloads/02a1306a494b46cc56c930ceec6e8c4a/plot_species_kde.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
The two species are:
2020
2121
- `"Bradypus variegatus"
22-
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
22+
<https://www.iucnredlist.org/species/3038/47437046>`_ ,
2323
the Brown-throated Sloth.
2424
2525
- `"Microryzomys minutus"

1.1/_downloads/06cfc926acb27652fb2aa5bfc583e7cb/plot_hashing_vs_dict_vectorizer.ipynb

Lines changed: 290 additions & 2 deletions
Large diffs are not rendered by default.
33.8 KB
Binary file not shown.

1.1/_downloads/08fc4f471ae40388eb535678346dc9d1/plot_gpc_xor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929

3030
# fit the model
3131
plt.figure(figsize=(10, 5))
32-
kernels = [1.0 * RBF(length_scale=1.0), 1.0 * DotProduct(sigma_0=1.0) ** 2]
32+
kernels = [1.0 * RBF(length_scale=1.15), 1.0 * DotProduct(sigma_0=1.0) ** 2]
3333
for i, kernel in enumerate(kernels):
3434
clf = GaussianProcessClassifier(kernel=kernel, warm_start=True).fit(X, Y)
3535

1.1/_downloads/14f620cd922ca2c9a39ae5784034dd0d/plot_lda.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,20 +71,23 @@ def generate_data(n_samples, n_features):
7171
linewidth=2,
7272
label="Linear Discriminant Analysis with Ledoit Wolf",
7373
color="navy",
74+
linestyle="dashed",
7475
)
7576
plt.plot(
7677
features_samples_ratio,
7778
acc_clf2,
7879
linewidth=2,
7980
label="Linear Discriminant Analysis",
8081
color="gold",
82+
linestyle="solid",
8183
)
8284
plt.plot(
8385
features_samples_ratio,
8486
acc_clf3,
8587
linewidth=2,
8688
label="Linear Discriminant Analysis with OAS",
8789
color="red",
90+
linestyle="dotted",
8891
)
8992

9093
plt.xlabel("n_features / n_samples")

1.1/_downloads/1bcb2039afa126da41f1cea42b4a5866/plot_gpr_prior_posterior.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def plot_gpr_samples(gpr_model, n_samples, ax):
158158
)
159159

160160
# %%
161-
# Periodic kernel
161+
# Exp-Sine-Squared kernel
162162
# ...............
163163
from sklearn.gaussian_process.kernels import ExpSineSquared
164164

@@ -183,7 +183,7 @@ def plot_gpr_samples(gpr_model, n_samples, ax):
183183
axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left")
184184
axs[1].set_title("Samples from posterior distribution")
185185

186-
fig.suptitle("Periodic kernel", fontsize=18)
186+
fig.suptitle("Exp-Sine-Squared kernel", fontsize=18)
187187
plt.tight_layout()
188188

189189
# %%
@@ -194,7 +194,7 @@ def plot_gpr_samples(gpr_model, n_samples, ax):
194194
)
195195

196196
# %%
197-
# Dot product kernel
197+
# Dot-product kernel
198198
# ..................
199199
from sklearn.gaussian_process.kernels import ConstantKernel, DotProduct
200200

@@ -216,7 +216,7 @@ def plot_gpr_samples(gpr_model, n_samples, ax):
216216
axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left")
217217
axs[1].set_title("Samples from posterior distribution")
218218

219-
fig.suptitle("Dot product kernel", fontsize=18)
219+
fig.suptitle("Dot-product kernel", fontsize=18)
220220
plt.tight_layout()
221221

222222
# %%
@@ -227,7 +227,7 @@ def plot_gpr_samples(gpr_model, n_samples, ax):
227227
)
228228

229229
# %%
230-
# Mattern kernel
230+
# Matérn kernel
231231
# ..............
232232
from sklearn.gaussian_process.kernels import Matern
233233

@@ -247,7 +247,7 @@ def plot_gpr_samples(gpr_model, n_samples, ax):
247247
axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left")
248248
axs[1].set_title("Samples from posterior distribution")
249249

250-
fig.suptitle("Mattern kernel", fontsize=18)
250+
fig.suptitle("Matérn kernel", fontsize=18)
251251
plt.tight_layout()
252252

253253
# %%

1.1/_downloads/1c4a422dfa5bd721501d19a2b7e2499b/plot_species_kde.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"cell_type": "markdown",
1616
"metadata": {},
1717
"source": [
18-
"\n# Kernel Density Estimate of Species Distributions\nThis shows an example of a neighbors-based query (in particular a kernel\ndensity estimate) on geospatial data, using a Ball Tree built upon the\nHaversine distance metric -- i.e. distances over points in latitude/longitude.\nThe dataset is provided by Phillips et. al. (2006).\nIf available, the example uses\n`basemap <https://matplotlib.org/basemap/>`_\nto plot the coast lines and national boundaries of South America.\n\nThis example does not perform any learning over the data\n(see `sphx_glr_auto_examples_applications_plot_species_distribution_modeling.py` for\nan example of classification based on the attributes in this dataset). It\nsimply shows the kernel density estimate of observed data points in\ngeospatial coordinates.\n\nThe two species are:\n\n - `\"Bradypus variegatus\"\n <http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,\n the Brown-throated Sloth.\n\n - `\"Microryzomys minutus\"\n <http://www.iucnredlist.org/details/13408/0>`_ ,\n also known as the Forest Small Rice Rat, a rodent that lives in Peru,\n Colombia, Ecuador, Peru, and Venezuela.\n\n## References\n\n * `\"Maximum entropy modeling of species geographic distributions\"\n <http://rob.schapire.net/papers/ecolmod.pdf>`_\n S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,\n 190:231-259, 2006.\n"
18+
"\n# Kernel Density Estimate of Species Distributions\nThis shows an example of a neighbors-based query (in particular a kernel\ndensity estimate) on geospatial data, using a Ball Tree built upon the\nHaversine distance metric -- i.e. distances over points in latitude/longitude.\nThe dataset is provided by Phillips et. al. (2006).\nIf available, the example uses\n`basemap <https://matplotlib.org/basemap/>`_\nto plot the coast lines and national boundaries of South America.\n\nThis example does not perform any learning over the data\n(see `sphx_glr_auto_examples_applications_plot_species_distribution_modeling.py` for\nan example of classification based on the attributes in this dataset). It\nsimply shows the kernel density estimate of observed data points in\ngeospatial coordinates.\n\nThe two species are:\n\n - `\"Bradypus variegatus\"\n <https://www.iucnredlist.org/species/3038/47437046>`_ ,\n the Brown-throated Sloth.\n\n - `\"Microryzomys minutus\"\n <http://www.iucnredlist.org/details/13408/0>`_ ,\n also known as the Forest Small Rice Rat, a rodent that lives in Peru,\n Colombia, Ecuador, Peru, and Venezuela.\n\n## References\n\n * `\"Maximum entropy modeling of species geographic distributions\"\n <http://rob.schapire.net/papers/ecolmod.pdf>`_\n S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,\n 190:231-259, 2006.\n"
1919
]
2020
},
2121
{

1.1/_downloads/24475810034a0d0d190a9de0f87d72b5/plot_all_scaling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,7 @@ def make_plot(item_idx):
324324
#
325325
# Unlike the previous scalers, the centering and scaling statistics of
326326
# :class:`~sklearn.preprocessing.RobustScaler`
327-
# is based on percentiles and are therefore not influenced by a few
327+
# are based on percentiles and are therefore not influenced by a small
328328
# number of very large marginal outliers. Consequently, the resulting range of
329329
# the transformed feature values is larger than for the previous scalers and,
330330
# more importantly, are approximately similar: for both features most of the

1.1/_downloads/2f3ef774a6d7e52e1e6b7ccbb75d25f0/plot_gradient_boosting_quantile.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,11 @@ def f(x):
7272
all_models["q %1.2f" % alpha] = gbr.fit(X_train, y_train)
7373

7474
# %%
75+
# Notice that :class:`~sklearn.ensemble.HistGradientBoostingRegressor` is much
76+
# faster than :class:`~sklearn.ensemble.GradientBoostingRegressor` starting with
77+
# intermediate datasets (`n_samples >= 10_000`), which is not the case of the
78+
# present example.
79+
#
7580
# For the sake of comparison, we also fit a baseline model trained with the
7681
# usual (mean) squared error (MSE).
7782
gbr_ls = GradientBoostingRegressor(loss="squared_error", **common_params)

0 commit comments

Comments
 (0)