Skip to content

Commit d2d26a3

Browse files
committed
Pushing the docs to dev/ for branch: main, commit 34f9dbf54164e3c62d68765fe45f27f067a45562
1 parent a3b2983 commit d2d26a3

File tree

1,249 files changed

+4623
-4781
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,249 files changed

+4623
-4781
lines changed

dev/_downloads/006fc185672e58b056a5c134db26935c/plot_coin_segmentation.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Gael Varoquaux <[email protected]>\n# Brian Cheung\n# Andrew Knyazev <[email protected]>\n# License: BSD 3 clause\n\nimport time\n\nimport numpy as np\nfrom scipy.ndimage.filters import gaussian_filter\nimport matplotlib.pyplot as plt\nimport skimage\nfrom skimage.data import coins\nfrom skimage.transform import rescale\n\nfrom sklearn.feature_extraction import image\nfrom sklearn.cluster import spectral_clustering\nfrom sklearn.utils.fixes import parse_version\n\n# these were introduced in skimage-0.14\nif parse_version(skimage.__version__) >= parse_version(\"0.14\"):\n rescale_params = {\"anti_aliasing\": False, \"multichannel\": False}\nelse:\n rescale_params = {}\n\n# load the coins as a numpy array\norig_coins = coins()\n\n# Resize it to 20% of the original size to speed up the processing\n# Applying a Gaussian filter for smoothing prior to down-scaling\n# reduces aliasing artifacts.\nsmoothened_coins = gaussian_filter(orig_coins, sigma=2)\nrescaled_coins = rescale(smoothened_coins, 0.2, mode=\"reflect\", **rescale_params)\n\n# Convert the image into a graph with the value of the gradient on the\n# edges.\ngraph = image.img_to_graph(rescaled_coins)\n\n# Take a decreasing function of the gradient: an exponential\n# The smaller beta is, the more independent the segmentation is of the\n# actual image. For beta=1, the segmentation is close to a voronoi\nbeta = 10\neps = 1e-6\ngraph.data = np.exp(-beta * graph.data / graph.data.std()) + eps\n\n# The number of segmented regions to display needs to be chosen manually.\n# The current version of 'spectral_clustering' does not support determining\n# the number of good quality clusters automatically.\nn_regions = 26"
29+
"# Author: Gael Varoquaux <[email protected]>\n# Brian Cheung\n# Andrew Knyazev <[email protected]>\n# License: BSD 3 clause\n\nimport time\n\nimport numpy as np\nfrom scipy.ndimage.filters import gaussian_filter\nimport matplotlib.pyplot as plt\nfrom skimage.data import coins\nfrom skimage.transform import rescale\n\nfrom sklearn.feature_extraction import image\nfrom sklearn.cluster import spectral_clustering\n\n\n# load the coins as a numpy array\norig_coins = coins()\n\n# Resize it to 20% of the original size to speed up the processing\n# Applying a Gaussian filter for smoothing prior to down-scaling\n# reduces aliasing artifacts.\nsmoothened_coins = gaussian_filter(orig_coins, sigma=2)\nrescaled_coins = rescale(\n smoothened_coins, 0.2, mode=\"reflect\", anti_aliasing=False, multichannel=False\n)\n\n# Convert the image into a graph with the value of the gradient on the\n# edges.\ngraph = image.img_to_graph(rescaled_coins)\n\n# Take a decreasing function of the gradient: an exponential\n# The smaller beta is, the more independent the segmentation is of the\n# actual image. For beta=1, the segmentation is close to a voronoi\nbeta = 10\neps = 1e-6\ngraph.data = np.exp(-beta * graph.data / graph.data.std()) + eps\n\n# The number of segmented regions to display needs to be chosen manually.\n# The current version of 'spectral_clustering' does not support determining\n# the number of good quality clusters automatically.\nn_regions = 26"
3030
]
3131
},
3232
{
Binary file not shown.

dev/_downloads/285b194a4740110cb23e241031123972/plot_johnson_lindenstrauss_bound.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"import sys\nfrom time import time\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom sklearn.random_projection import johnson_lindenstrauss_min_dim\nfrom sklearn.random_projection import SparseRandomProjection\nfrom sklearn.datasets import fetch_20newsgroups_vectorized\nfrom sklearn.datasets import load_digits\nfrom sklearn.metrics.pairwise import euclidean_distances\nfrom sklearn.utils.fixes import parse_version\n\n# `normed` is being deprecated in favor of `density` in histograms\nif parse_version(matplotlib.__version__) >= parse_version(\"2.1\"):\n density_param = {\"density\": True}\nelse:\n density_param = {\"normed\": True}"
29+
"import sys\nfrom time import time\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.random_projection import johnson_lindenstrauss_min_dim\nfrom sklearn.random_projection import SparseRandomProjection\nfrom sklearn.datasets import fetch_20newsgroups_vectorized\nfrom sklearn.datasets import load_digits\nfrom sklearn.metrics.pairwise import euclidean_distances"
3030
]
3131
},
3232
{
@@ -98,7 +98,7 @@
9898
},
9999
"outputs": [],
100100
"source": [
101-
"n_samples, n_features = data.shape\nprint(\n \"Embedding %d samples with dim %d using various random projections\"\n % (n_samples, n_features)\n)\n\nn_components_range = np.array([300, 1000, 10000])\ndists = euclidean_distances(data, squared=True).ravel()\n\n# select only non-identical samples pairs\nnonzero = dists != 0\ndists = dists[nonzero]\n\nfor n_components in n_components_range:\n t0 = time()\n rp = SparseRandomProjection(n_components=n_components)\n projected_data = rp.fit_transform(data)\n print(\n \"Projected %d samples from %d to %d in %0.3fs\"\n % (n_samples, n_features, n_components, time() - t0)\n )\n if hasattr(rp, \"components_\"):\n n_bytes = rp.components_.data.nbytes\n n_bytes += rp.components_.indices.nbytes\n print(\"Random matrix with size: %0.3fMB\" % (n_bytes / 1e6))\n\n projected_dists = euclidean_distances(projected_data, squared=True).ravel()[nonzero]\n\n plt.figure()\n min_dist = min(projected_dists.min(), dists.min())\n max_dist = max(projected_dists.max(), dists.max())\n plt.hexbin(\n dists,\n projected_dists,\n gridsize=100,\n cmap=plt.cm.PuBu,\n extent=[min_dist, max_dist, min_dist, max_dist],\n )\n plt.xlabel(\"Pairwise squared distances in original space\")\n plt.ylabel(\"Pairwise squared distances in projected space\")\n plt.title(\"Pairwise distances distribution for n_components=%d\" % n_components)\n cb = plt.colorbar()\n cb.set_label(\"Sample pairs counts\")\n\n rates = projected_dists / dists\n print(\"Mean distances rate: %0.2f (%0.2f)\" % (np.mean(rates), np.std(rates)))\n\n plt.figure()\n plt.hist(rates, bins=50, range=(0.0, 2.0), edgecolor=\"k\", **density_param)\n plt.xlabel(\"Squared distances rate: projected / original\")\n plt.ylabel(\"Distribution of samples pairs\")\n plt.title(\"Histogram of pairwise distance rates for n_components=%d\" % n_components)\n\n # TODO: compute the expected value of eps and add them to the previous plot\n # as vertical lines / region\n\nplt.show()"
101+
"n_samples, n_features = data.shape\nprint(\n \"Embedding %d samples with dim %d using various random projections\"\n % (n_samples, n_features)\n)\n\nn_components_range = np.array([300, 1000, 10000])\ndists = euclidean_distances(data, squared=True).ravel()\n\n# select only non-identical samples pairs\nnonzero = dists != 0\ndists = dists[nonzero]\n\nfor n_components in n_components_range:\n t0 = time()\n rp = SparseRandomProjection(n_components=n_components)\n projected_data = rp.fit_transform(data)\n print(\n \"Projected %d samples from %d to %d in %0.3fs\"\n % (n_samples, n_features, n_components, time() - t0)\n )\n if hasattr(rp, \"components_\"):\n n_bytes = rp.components_.data.nbytes\n n_bytes += rp.components_.indices.nbytes\n print(\"Random matrix with size: %0.3fMB\" % (n_bytes / 1e6))\n\n projected_dists = euclidean_distances(projected_data, squared=True).ravel()[nonzero]\n\n plt.figure()\n min_dist = min(projected_dists.min(), dists.min())\n max_dist = max(projected_dists.max(), dists.max())\n plt.hexbin(\n dists,\n projected_dists,\n gridsize=100,\n cmap=plt.cm.PuBu,\n extent=[min_dist, max_dist, min_dist, max_dist],\n )\n plt.xlabel(\"Pairwise squared distances in original space\")\n plt.ylabel(\"Pairwise squared distances in projected space\")\n plt.title(\"Pairwise distances distribution for n_components=%d\" % n_components)\n cb = plt.colorbar()\n cb.set_label(\"Sample pairs counts\")\n\n rates = projected_dists / dists\n print(\"Mean distances rate: %0.2f (%0.2f)\" % (np.mean(rates), np.std(rates)))\n\n plt.figure()\n plt.hist(rates, bins=50, range=(0.0, 2.0), edgecolor=\"k\", density=True)\n plt.xlabel(\"Squared distances rate: projected / original\")\n plt.ylabel(\"Distribution of samples pairs\")\n plt.title(\"Histogram of pairwise distance rates for n_components=%d\" % n_components)\n\n # TODO: compute the expected value of eps and add them to the previous plot\n # as vertical lines / region\n\nplt.show()"
102102
]
103103
},
104104
{

dev/_downloads/2a14e362a70d246e83fa6a89ca069cee/plot_sparse_coding.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
import matplotlib.pyplot as plt
2121

2222
from sklearn.decomposition import SparseCoder
23-
from sklearn.utils.fixes import np_version, parse_version
2423

2524

2625
def ricker_function(resolution, center, width):
@@ -72,8 +71,6 @@ def ricker_matrix(width, resolution, n_components):
7271
("Lasso", "lasso_lars", 2, None, "turquoise"),
7372
]
7473
lw = 2
75-
# Avoid FutureWarning about default value change when numpy >= 1.14
76-
lstsq_rcond = None if np_version >= parse_version("1.14") else -1
7774

7875
plt.figure(figsize=(13, 6))
7976
for subplot, (D, title) in enumerate(
@@ -107,7 +104,7 @@ def ricker_matrix(width, resolution, n_components):
107104
)
108105
x = coder.transform(y.reshape(1, -1))
109106
_, idx = np.where(x != 0)
110-
x[0, idx], _, _, _ = np.linalg.lstsq(D[idx, :].T, y, rcond=lstsq_rcond)
107+
x[0, idx], _, _, _ = np.linalg.lstsq(D[idx, :].T, y, rcond=None)
111108
x = np.ravel(np.dot(x, D))
112109
squared_error = np.sum((y - x) ** 2)
113110
plt.plot(

dev/_downloads/2e86a4838807f09bbbb529d9643d45ab/plot_coin_segmentation.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -30,19 +30,12 @@
3030
import numpy as np
3131
from scipy.ndimage.filters import gaussian_filter
3232
import matplotlib.pyplot as plt
33-
import skimage
3433
from skimage.data import coins
3534
from skimage.transform import rescale
3635

3736
from sklearn.feature_extraction import image
3837
from sklearn.cluster import spectral_clustering
39-
from sklearn.utils.fixes import parse_version
4038

41-
# these were introduced in skimage-0.14
42-
if parse_version(skimage.__version__) >= parse_version("0.14"):
43-
rescale_params = {"anti_aliasing": False, "multichannel": False}
44-
else:
45-
rescale_params = {}
4639

4740
# load the coins as a numpy array
4841
orig_coins = coins()
@@ -51,7 +44,9 @@
5144
# Applying a Gaussian filter for smoothing prior to down-scaling
5245
# reduces aliasing artifacts.
5346
smoothened_coins = gaussian_filter(orig_coins, sigma=2)
54-
rescaled_coins = rescale(smoothened_coins, 0.2, mode="reflect", **rescale_params)
47+
rescaled_coins = rescale(
48+
smoothened_coins, 0.2, mode="reflect", anti_aliasing=False, multichannel=False
49+
)
5550

5651
# Convert the image into a graph with the value of the gradient on the
5752
# edges.

dev/_downloads/5eeecece5c41d6edcf4555b5e7c34350/plot_coin_ward_segmentation.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,19 +20,12 @@
2020

2121
import matplotlib.pyplot as plt
2222

23-
import skimage
2423
from skimage.data import coins
2524
from skimage.transform import rescale
2625

2726
from sklearn.feature_extraction.image import grid_to_graph
2827
from sklearn.cluster import AgglomerativeClustering
29-
from sklearn.utils.fixes import parse_version
3028

31-
# these were introduced in skimage-0.14
32-
if parse_version(skimage.__version__) >= parse_version("0.14"):
33-
rescale_params = {"anti_aliasing": False, "multichannel": False}
34-
else:
35-
rescale_params = {}
3629

3730
# #############################################################################
3831
# Generate data
@@ -42,7 +35,9 @@
4235
# Applying a Gaussian filter for smoothing prior to down-scaling
4336
# reduces aliasing artifacts.
4437
smoothened_coins = gaussian_filter(orig_coins, sigma=2)
45-
rescaled_coins = rescale(smoothened_coins, 0.2, mode="reflect", **rescale_params)
38+
rescaled_coins = rescale(
39+
smoothened_coins, 0.2, mode="reflect", anti_aliasing=False, multichannel=False
40+
)
4641

4742
X = np.reshape(rescaled_coins, (-1, 1))
4843

Binary file not shown.

dev/_downloads/7c2f454ae53819802ecec0f2cacd6d51/plot_kde_1d.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Jake Vanderplas <[email protected]>\n#\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom scipy.stats import norm\nfrom sklearn.neighbors import KernelDensity\nfrom sklearn.utils.fixes import parse_version\n\n# `normed` is being deprecated in favor of `density` in histograms\nif parse_version(matplotlib.__version__) >= parse_version(\"2.1\"):\n density_param = {\"density\": True}\nelse:\n density_param = {\"normed\": True}\n\n# ----------------------------------------------------------------------\n# Plot the progression of histograms to kernels\nnp.random.seed(1)\nN = 20\nX = np.concatenate(\n (np.random.normal(0, 1, int(0.3 * N)), np.random.normal(5, 1, int(0.7 * N)))\n)[:, np.newaxis]\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\nbins = np.linspace(-5, 10, 10)\n\nfig, ax = plt.subplots(2, 2, sharex=True, sharey=True)\nfig.subplots_adjust(hspace=0.05, wspace=0.05)\n\n# histogram 1\nax[0, 0].hist(X[:, 0], bins=bins, fc=\"#AAAAFF\", **density_param)\nax[0, 0].text(-3.5, 0.31, \"Histogram\")\n\n# histogram 2\nax[0, 1].hist(X[:, 0], bins=bins + 0.75, fc=\"#AAAAFF\", **density_param)\nax[0, 1].text(-3.5, 0.31, \"Histogram, bins shifted\")\n\n# tophat KDE\nkde = KernelDensity(kernel=\"tophat\", bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 0].fill(X_plot[:, 0], np.exp(log_dens), fc=\"#AAAAFF\")\nax[1, 0].text(-3.5, 0.31, \"Tophat Kernel Density\")\n\n# Gaussian KDE\nkde = KernelDensity(kernel=\"gaussian\", bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 1].fill(X_plot[:, 0], np.exp(log_dens), fc=\"#AAAAFF\")\nax[1, 1].text(-3.5, 0.31, \"Gaussian Kernel Density\")\n\nfor axi in ax.ravel():\n axi.plot(X[:, 0], np.full(X.shape[0], -0.01), \"+k\")\n axi.set_xlim(-4, 9)\n axi.set_ylim(-0.02, 0.34)\n\nfor axi in ax[:, 0]:\n axi.set_ylabel(\"Normalized Density\")\n\nfor axi in ax[1, :]:\n axi.set_xlabel(\"x\")\n\n# ----------------------------------------------------------------------\n# Plot all available kernels\nX_plot = np.linspace(-6, 6, 1000)[:, None]\nX_src = np.zeros((1, 1))\n\nfig, ax = plt.subplots(2, 3, sharex=True, sharey=True)\nfig.subplots_adjust(left=0.05, right=0.95, hspace=0.05, wspace=0.05)\n\n\ndef format_func(x, loc):\n if x == 0:\n return \"0\"\n elif x == 1:\n return \"h\"\n elif x == -1:\n return \"-h\"\n else:\n return \"%ih\" % x\n\n\nfor i, kernel in enumerate(\n [\"gaussian\", \"tophat\", \"epanechnikov\", \"exponential\", \"linear\", \"cosine\"]\n):\n axi = ax.ravel()[i]\n log_dens = KernelDensity(kernel=kernel).fit(X_src).score_samples(X_plot)\n axi.fill(X_plot[:, 0], np.exp(log_dens), \"-k\", fc=\"#AAAAFF\")\n axi.text(-2.6, 0.95, kernel)\n\n axi.xaxis.set_major_formatter(plt.FuncFormatter(format_func))\n axi.xaxis.set_major_locator(plt.MultipleLocator(1))\n axi.yaxis.set_major_locator(plt.NullLocator())\n\n axi.set_ylim(0, 1.05)\n axi.set_xlim(-2.9, 2.9)\n\nax[0, 1].set_title(\"Available Kernels\")\n\n# ----------------------------------------------------------------------\n# Plot a 1D density example\nN = 100\nnp.random.seed(1)\nX = np.concatenate(\n (np.random.normal(0, 1, int(0.3 * N)), np.random.normal(5, 1, int(0.7 * N)))\n)[:, np.newaxis]\n\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\n\ntrue_dens = 0.3 * norm(0, 1).pdf(X_plot[:, 0]) + 0.7 * norm(5, 1).pdf(X_plot[:, 0])\n\nfig, ax = plt.subplots()\nax.fill(X_plot[:, 0], true_dens, fc=\"black\", alpha=0.2, label=\"input distribution\")\ncolors = [\"navy\", \"cornflowerblue\", \"darkorange\"]\nkernels = [\"gaussian\", \"tophat\", \"epanechnikov\"]\nlw = 2\n\nfor color, kernel in zip(colors, kernels):\n kde = KernelDensity(kernel=kernel, bandwidth=0.5).fit(X)\n log_dens = kde.score_samples(X_plot)\n ax.plot(\n X_plot[:, 0],\n np.exp(log_dens),\n color=color,\n lw=lw,\n linestyle=\"-\",\n label=\"kernel = '{0}'\".format(kernel),\n )\n\nax.text(6, 0.38, \"N={0} points\".format(N))\n\nax.legend(loc=\"upper left\")\nax.plot(X[:, 0], -0.005 - 0.01 * np.random.random(X.shape[0]), \"+k\")\n\nax.set_xlim(-4, 9)\nax.set_ylim(-0.02, 0.4)\nplt.show()"
29+
"# Author: Jake Vanderplas <[email protected]>\n#\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom scipy.stats import norm\nfrom sklearn.neighbors import KernelDensity\n\n# ----------------------------------------------------------------------\n# Plot the progression of histograms to kernels\nnp.random.seed(1)\nN = 20\nX = np.concatenate(\n (np.random.normal(0, 1, int(0.3 * N)), np.random.normal(5, 1, int(0.7 * N)))\n)[:, np.newaxis]\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\nbins = np.linspace(-5, 10, 10)\n\nfig, ax = plt.subplots(2, 2, sharex=True, sharey=True)\nfig.subplots_adjust(hspace=0.05, wspace=0.05)\n\n# histogram 1\nax[0, 0].hist(X[:, 0], bins=bins, fc=\"#AAAAFF\", density=True)\nax[0, 0].text(-3.5, 0.31, \"Histogram\")\n\n# histogram 2\nax[0, 1].hist(X[:, 0], bins=bins + 0.75, fc=\"#AAAAFF\", density=True)\nax[0, 1].text(-3.5, 0.31, \"Histogram, bins shifted\")\n\n# tophat KDE\nkde = KernelDensity(kernel=\"tophat\", bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 0].fill(X_plot[:, 0], np.exp(log_dens), fc=\"#AAAAFF\")\nax[1, 0].text(-3.5, 0.31, \"Tophat Kernel Density\")\n\n# Gaussian KDE\nkde = KernelDensity(kernel=\"gaussian\", bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 1].fill(X_plot[:, 0], np.exp(log_dens), fc=\"#AAAAFF\")\nax[1, 1].text(-3.5, 0.31, \"Gaussian Kernel Density\")\n\nfor axi in ax.ravel():\n axi.plot(X[:, 0], np.full(X.shape[0], -0.01), \"+k\")\n axi.set_xlim(-4, 9)\n axi.set_ylim(-0.02, 0.34)\n\nfor axi in ax[:, 0]:\n axi.set_ylabel(\"Normalized Density\")\n\nfor axi in ax[1, :]:\n axi.set_xlabel(\"x\")\n\n# ----------------------------------------------------------------------\n# Plot all available kernels\nX_plot = np.linspace(-6, 6, 1000)[:, None]\nX_src = np.zeros((1, 1))\n\nfig, ax = plt.subplots(2, 3, sharex=True, sharey=True)\nfig.subplots_adjust(left=0.05, right=0.95, hspace=0.05, wspace=0.05)\n\n\ndef format_func(x, loc):\n if x == 0:\n return \"0\"\n elif x == 1:\n return \"h\"\n elif x == -1:\n return \"-h\"\n else:\n return \"%ih\" % x\n\n\nfor i, kernel in enumerate(\n [\"gaussian\", \"tophat\", \"epanechnikov\", \"exponential\", \"linear\", \"cosine\"]\n):\n axi = ax.ravel()[i]\n log_dens = KernelDensity(kernel=kernel).fit(X_src).score_samples(X_plot)\n axi.fill(X_plot[:, 0], np.exp(log_dens), \"-k\", fc=\"#AAAAFF\")\n axi.text(-2.6, 0.95, kernel)\n\n axi.xaxis.set_major_formatter(plt.FuncFormatter(format_func))\n axi.xaxis.set_major_locator(plt.MultipleLocator(1))\n axi.yaxis.set_major_locator(plt.NullLocator())\n\n axi.set_ylim(0, 1.05)\n axi.set_xlim(-2.9, 2.9)\n\nax[0, 1].set_title(\"Available Kernels\")\n\n# ----------------------------------------------------------------------\n# Plot a 1D density example\nN = 100\nnp.random.seed(1)\nX = np.concatenate(\n (np.random.normal(0, 1, int(0.3 * N)), np.random.normal(5, 1, int(0.7 * N)))\n)[:, np.newaxis]\n\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\n\ntrue_dens = 0.3 * norm(0, 1).pdf(X_plot[:, 0]) + 0.7 * norm(5, 1).pdf(X_plot[:, 0])\n\nfig, ax = plt.subplots()\nax.fill(X_plot[:, 0], true_dens, fc=\"black\", alpha=0.2, label=\"input distribution\")\ncolors = [\"navy\", \"cornflowerblue\", \"darkorange\"]\nkernels = [\"gaussian\", \"tophat\", \"epanechnikov\"]\nlw = 2\n\nfor color, kernel in zip(colors, kernels):\n kde = KernelDensity(kernel=kernel, bandwidth=0.5).fit(X)\n log_dens = kde.score_samples(X_plot)\n ax.plot(\n X_plot[:, 0],\n np.exp(log_dens),\n color=color,\n lw=lw,\n linestyle=\"-\",\n label=\"kernel = '{0}'\".format(kernel),\n )\n\nax.text(6, 0.38, \"N={0} points\".format(N))\n\nax.legend(loc=\"upper left\")\nax.plot(X[:, 0], -0.005 - 0.01 * np.random.random(X.shape[0]), \"+k\")\n\nax.set_xlim(-4, 9)\nax.set_ylim(-0.02, 0.4)\nplt.show()"
3030
]
3131
}
3232
],

0 commit comments

Comments
 (0)