Skip to content

Commit e4547fc

Browse files
committed
Pushing the docs to dev/ for branch: main, commit 5273e6eed5f56a1ef85b09a1652c69f450b4159c
1 parent 40861ac commit e4547fc

File tree

1,326 files changed

+7277
-7249
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,326 files changed

+7277
-7249
lines changed

dev/.buildinfo

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: e4973f4ec0b2eee118f5608cb49f1d67
3+
config: 6cf84a4398407de34ef4dc1f83c797ff
44
tags: 645f666f9bcd5a90fca523b33c5a78b7
Binary file not shown.

dev/_downloads/1e0968da80ca868bbdf21c1d0547f68c/plot_lle_digits.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@
8787
},
8888
"outputs": [],
8989
"source": [
90-
"from sklearn.decomposition import TruncatedSVD\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom sklearn.ensemble import RandomTreesEmbedding\nfrom sklearn.manifold import (\n MDS,\n TSNE,\n Isomap,\n LocallyLinearEmbedding,\n SpectralEmbedding,\n)\nfrom sklearn.neighbors import NeighborhoodComponentsAnalysis\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.random_projection import SparseRandomProjection\n\nembeddings = {\n \"Random projection embedding\": SparseRandomProjection(\n n_components=2, random_state=42\n ),\n \"Truncated SVD embedding\": TruncatedSVD(n_components=2),\n \"Linear Discriminant Analysis embedding\": LinearDiscriminantAnalysis(\n n_components=2\n ),\n \"Isomap embedding\": Isomap(n_neighbors=n_neighbors, n_components=2),\n \"Standard LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"standard\"\n ),\n \"Modified LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"modified\"\n ),\n \"Hessian LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"hessian\"\n ),\n \"LTSA LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"ltsa\"\n ),\n \"MDS embedding\": MDS(n_components=2, n_init=1, max_iter=120, n_jobs=2),\n \"Random Trees embedding\": make_pipeline(\n RandomTreesEmbedding(n_estimators=200, max_depth=5, random_state=0),\n TruncatedSVD(n_components=2),\n ),\n \"Spectral embedding\": SpectralEmbedding(\n n_components=2, random_state=0, eigen_solver=\"arpack\"\n ),\n \"t-SNE embedding\": TSNE(\n n_components=2,\n n_iter=500,\n n_iter_without_progress=150,\n n_jobs=2,\n random_state=0,\n ),\n \"NCA embedding\": NeighborhoodComponentsAnalysis(\n n_components=2, init=\"pca\", random_state=0\n ),\n}"
90+
"from sklearn.decomposition import TruncatedSVD\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom sklearn.ensemble import RandomTreesEmbedding\nfrom sklearn.manifold import (\n MDS,\n TSNE,\n Isomap,\n LocallyLinearEmbedding,\n SpectralEmbedding,\n)\nfrom sklearn.neighbors import NeighborhoodComponentsAnalysis\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.random_projection import SparseRandomProjection\n\nembeddings = {\n \"Random projection embedding\": SparseRandomProjection(\n n_components=2, random_state=42\n ),\n \"Truncated SVD embedding\": TruncatedSVD(n_components=2),\n \"Linear Discriminant Analysis embedding\": LinearDiscriminantAnalysis(\n n_components=2\n ),\n \"Isomap embedding\": Isomap(n_neighbors=n_neighbors, n_components=2),\n \"Standard LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"standard\"\n ),\n \"Modified LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"modified\"\n ),\n \"Hessian LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"hessian\"\n ),\n \"LTSA LLE embedding\": LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=\"ltsa\"\n ),\n \"MDS embedding\": MDS(n_components=2, n_init=1, max_iter=120, n_jobs=2),\n \"Random Trees embedding\": make_pipeline(\n RandomTreesEmbedding(n_estimators=200, max_depth=5, random_state=0),\n TruncatedSVD(n_components=2),\n ),\n \"Spectral embedding\": SpectralEmbedding(\n n_components=2, random_state=0, eigen_solver=\"arpack\"\n ),\n \"t-SNE embedding\": TSNE(\n n_components=2,\n max_iter=500,\n n_iter_without_progress=150,\n n_jobs=2,\n random_state=0,\n ),\n \"NCA embedding\": NeighborhoodComponentsAnalysis(\n n_components=2, init=\"pca\", random_state=0\n ),\n}"
9191
]
9292
},
9393
{
Binary file not shown.

dev/_downloads/8b98cea0e0ec1ca3cc503c13ddac0537/plot_t_sne_perplexity.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
},
1616
"outputs": [],
1717
"source": [
18-
"# Author: Narine Kokhlikyan <[email protected]>\n# License: BSD\n\nfrom time import time\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom matplotlib.ticker import NullFormatter\n\nfrom sklearn import datasets, manifold\n\nn_samples = 150\nn_components = 2\n(fig, subplots) = plt.subplots(3, 5, figsize=(15, 8))\nperplexities = [5, 30, 50, 100]\n\nX, y = datasets.make_circles(\n n_samples=n_samples, factor=0.5, noise=0.05, random_state=0\n)\n\nred = y == 0\ngreen = y == 1\n\nax = subplots[0][0]\nax.scatter(X[red, 0], X[red, 1], c=\"r\")\nax.scatter(X[green, 0], X[green, 1], c=\"g\")\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\nfor i, perplexity in enumerate(perplexities):\n ax = subplots[0][i + 1]\n\n t0 = time()\n tsne = manifold.TSNE(\n n_components=n_components,\n init=\"random\",\n random_state=0,\n perplexity=perplexity,\n n_iter=300,\n )\n Y = tsne.fit_transform(X)\n t1 = time()\n print(\"circles, perplexity=%d in %.2g sec\" % (perplexity, t1 - t0))\n ax.set_title(\"Perplexity=%d\" % perplexity)\n ax.scatter(Y[red, 0], Y[red, 1], c=\"r\")\n ax.scatter(Y[green, 0], Y[green, 1], c=\"g\")\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n ax.axis(\"tight\")\n\n# Another example using s-curve\nX, color = datasets.make_s_curve(n_samples, random_state=0)\n\nax = subplots[1][0]\nax.scatter(X[:, 0], X[:, 2], c=color)\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\n\nfor i, perplexity in enumerate(perplexities):\n ax = subplots[1][i + 1]\n\n t0 = time()\n tsne = manifold.TSNE(\n n_components=n_components,\n init=\"random\",\n random_state=0,\n perplexity=perplexity,\n learning_rate=\"auto\",\n n_iter=300,\n )\n Y = tsne.fit_transform(X)\n t1 = time()\n print(\"S-curve, perplexity=%d in %.2g sec\" % (perplexity, t1 - t0))\n\n ax.set_title(\"Perplexity=%d\" % perplexity)\n ax.scatter(Y[:, 0], Y[:, 1], c=color)\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n ax.axis(\"tight\")\n\n\n# Another example using a 2D uniform grid\nx = np.linspace(0, 1, int(np.sqrt(n_samples)))\nxx, yy = np.meshgrid(x, x)\nX = np.hstack(\n [\n xx.ravel().reshape(-1, 1),\n yy.ravel().reshape(-1, 1),\n ]\n)\ncolor = xx.ravel()\nax = subplots[2][0]\nax.scatter(X[:, 0], X[:, 1], c=color)\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\n\nfor i, perplexity in enumerate(perplexities):\n ax = subplots[2][i + 1]\n\n t0 = time()\n tsne = manifold.TSNE(\n n_components=n_components,\n init=\"random\",\n random_state=0,\n perplexity=perplexity,\n n_iter=400,\n )\n Y = tsne.fit_transform(X)\n t1 = time()\n print(\"uniform grid, perplexity=%d in %.2g sec\" % (perplexity, t1 - t0))\n\n ax.set_title(\"Perplexity=%d\" % perplexity)\n ax.scatter(Y[:, 0], Y[:, 1], c=color)\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n ax.axis(\"tight\")\n\n\nplt.show()"
18+
"# Author: Narine Kokhlikyan <[email protected]>\n# License: BSD\n\nfrom time import time\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom matplotlib.ticker import NullFormatter\n\nfrom sklearn import datasets, manifold\n\nn_samples = 150\nn_components = 2\n(fig, subplots) = plt.subplots(3, 5, figsize=(15, 8))\nperplexities = [5, 30, 50, 100]\n\nX, y = datasets.make_circles(\n n_samples=n_samples, factor=0.5, noise=0.05, random_state=0\n)\n\nred = y == 0\ngreen = y == 1\n\nax = subplots[0][0]\nax.scatter(X[red, 0], X[red, 1], c=\"r\")\nax.scatter(X[green, 0], X[green, 1], c=\"g\")\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\nfor i, perplexity in enumerate(perplexities):\n ax = subplots[0][i + 1]\n\n t0 = time()\n tsne = manifold.TSNE(\n n_components=n_components,\n init=\"random\",\n random_state=0,\n perplexity=perplexity,\n max_iter=300,\n )\n Y = tsne.fit_transform(X)\n t1 = time()\n print(\"circles, perplexity=%d in %.2g sec\" % (perplexity, t1 - t0))\n ax.set_title(\"Perplexity=%d\" % perplexity)\n ax.scatter(Y[red, 0], Y[red, 1], c=\"r\")\n ax.scatter(Y[green, 0], Y[green, 1], c=\"g\")\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n ax.axis(\"tight\")\n\n# Another example using s-curve\nX, color = datasets.make_s_curve(n_samples, random_state=0)\n\nax = subplots[1][0]\nax.scatter(X[:, 0], X[:, 2], c=color)\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\n\nfor i, perplexity in enumerate(perplexities):\n ax = subplots[1][i + 1]\n\n t0 = time()\n tsne = manifold.TSNE(\n n_components=n_components,\n init=\"random\",\n random_state=0,\n perplexity=perplexity,\n learning_rate=\"auto\",\n max_iter=300,\n )\n Y = tsne.fit_transform(X)\n t1 = time()\n print(\"S-curve, perplexity=%d in %.2g sec\" % (perplexity, t1 - t0))\n\n ax.set_title(\"Perplexity=%d\" % perplexity)\n ax.scatter(Y[:, 0], Y[:, 1], c=color)\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n ax.axis(\"tight\")\n\n\n# Another example using a 2D uniform grid\nx = np.linspace(0, 1, int(np.sqrt(n_samples)))\nxx, yy = np.meshgrid(x, x)\nX = np.hstack(\n [\n xx.ravel().reshape(-1, 1),\n yy.ravel().reshape(-1, 1),\n ]\n)\ncolor = xx.ravel()\nax = subplots[2][0]\nax.scatter(X[:, 0], X[:, 1], c=color)\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\n\nfor i, perplexity in enumerate(perplexities):\n ax = subplots[2][i + 1]\n\n t0 = time()\n tsne = manifold.TSNE(\n n_components=n_components,\n init=\"random\",\n random_state=0,\n perplexity=perplexity,\n max_iter=400,\n )\n Y = tsne.fit_transform(X)\n t1 = time()\n print(\"uniform grid, perplexity=%d in %.2g sec\" % (perplexity, t1 - t0))\n\n ax.set_title(\"Perplexity=%d\" % perplexity)\n ax.scatter(Y[:, 0], Y[:, 1], c=color)\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n ax.axis(\"tight\")\n\n\nplt.show()"
1919
]
2020
}
2121
],

dev/_downloads/9d97cc4ed755b7f2c7f9311bccc89a00/plot_lle_digits.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def plot_embedding(X, title):
145145
),
146146
"t-SNE embedding": TSNE(
147147
n_components=2,
148-
n_iter=500,
148+
max_iter=500,
149149
n_iter_without_progress=150,
150150
n_jobs=2,
151151
random_state=0,

dev/_downloads/c8db473878b6afea8e75e36dc828f109/plot_compare_methods.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@
170170
},
171171
"outputs": [],
172172
"source": [
173-
"t_sne = manifold.TSNE(\n n_components=n_components,\n perplexity=30,\n init=\"random\",\n n_iter=250,\n random_state=0,\n)\nS_t_sne = t_sne.fit_transform(S_points)\n\nplot_2d(S_t_sne, S_color, \"T-distributed Stochastic \\n Neighbor Embedding\")"
173+
"t_sne = manifold.TSNE(\n n_components=n_components,\n perplexity=30,\n init=\"random\",\n max_iter=250,\n random_state=0,\n)\nS_t_sne = t_sne.fit_transform(S_points)\n\nplot_2d(S_t_sne, S_color, \"T-distributed Stochastic \\n Neighbor Embedding\")"
174174
]
175175
}
176176
],

dev/_downloads/cda53b33015268619bc212d32b7000b9/plot_compare_methods.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ def add_2d_scatter(ax, points, points_color, title=None):
202202
n_components=n_components,
203203
perplexity=30,
204204
init="random",
205-
n_iter=250,
205+
max_iter=250,
206206
random_state=0,
207207
)
208208
S_t_sne = t_sne.fit_transform(S_points)

dev/_downloads/dec20a8d5f622301132b632f5e0bd532/plot_t_sne_perplexity.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@
6363
init="random",
6464
random_state=0,
6565
perplexity=perplexity,
66-
n_iter=300,
66+
max_iter=300,
6767
)
6868
Y = tsne.fit_transform(X)
6969
t1 = time()
@@ -93,7 +93,7 @@
9393
random_state=0,
9494
perplexity=perplexity,
9595
learning_rate="auto",
96-
n_iter=300,
96+
max_iter=300,
9797
)
9898
Y = tsne.fit_transform(X)
9999
t1 = time()
@@ -130,7 +130,7 @@
130130
init="random",
131131
random_state=0,
132132
perplexity=perplexity,
133-
n_iter=400,
133+
max_iter=400,
134134
)
135135
Y = tsne.fit_transform(X)
136136
t1 = time()

dev/_downloads/scikit-learn-docs.zip

7.88 KB
Binary file not shown.

0 commit comments

Comments
 (0)