Skip to content

Commit 0335386

Browse files
committed
Pushing the docs to dev/ for branch: main, commit 51ca717db5065e912cdd6cd838bb08d4d08c2770
1 parent c2297a8 commit 0335386

File tree

1,346 files changed

+5869
-5845
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,346 files changed

+5869
-5845
lines changed

dev/.buildinfo

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: 448b255600df67fa24c43ea45b3cee0d
3+
config: 0196bdeb140758540138776c646854be
44
tags: 645f666f9bcd5a90fca523b33c5a78b7
Binary file not shown.

dev/_downloads/083d8568c199bebbc1a847fc6c917e9e/plot_kernel_approximation.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
},
4141
"outputs": [],
4242
"source": [
43-
"n_samples = len(digits.data)\ndata = digits.data / 16.0\ndata -= data.mean(axis=0)\n\n# We learn the digits on the first half of the digits\ndata_train, targets_train = (data[: n_samples // 2], digits.target[: n_samples // 2])\n\n\n# Now predict the value of the digit on the second half:\ndata_test, targets_test = (data[n_samples // 2 :], digits.target[n_samples // 2 :])\n# data_test = scaler.transform(data_test)\n\n# Create a classifier: a support vector classifier\nkernel_svm = svm.SVC(gamma=0.2)\nlinear_svm = svm.LinearSVC(dual=\"auto\")\n\n# create pipeline from kernel approximation\n# and linear svm\nfeature_map_fourier = RBFSampler(gamma=0.2, random_state=1)\nfeature_map_nystroem = Nystroem(gamma=0.2, random_state=1)\nfourier_approx_svm = pipeline.Pipeline(\n [(\"feature_map\", feature_map_fourier), (\"svm\", svm.LinearSVC(dual=\"auto\"))]\n)\n\nnystroem_approx_svm = pipeline.Pipeline(\n [(\"feature_map\", feature_map_nystroem), (\"svm\", svm.LinearSVC(dual=\"auto\"))]\n)\n\n# fit and predict using linear and kernel svm:\n\nkernel_svm_time = time()\nkernel_svm.fit(data_train, targets_train)\nkernel_svm_score = kernel_svm.score(data_test, targets_test)\nkernel_svm_time = time() - kernel_svm_time\n\nlinear_svm_time = time()\nlinear_svm.fit(data_train, targets_train)\nlinear_svm_score = linear_svm.score(data_test, targets_test)\nlinear_svm_time = time() - linear_svm_time\n\nsample_sizes = 30 * np.arange(1, 10)\nfourier_scores = []\nnystroem_scores = []\nfourier_times = []\nnystroem_times = []\n\nfor D in sample_sizes:\n fourier_approx_svm.set_params(feature_map__n_components=D)\n nystroem_approx_svm.set_params(feature_map__n_components=D)\n start = time()\n nystroem_approx_svm.fit(data_train, targets_train)\n nystroem_times.append(time() - start)\n\n start = time()\n fourier_approx_svm.fit(data_train, targets_train)\n fourier_times.append(time() - start)\n\n fourier_score = fourier_approx_svm.score(data_test, targets_test)\n nystroem_score = nystroem_approx_svm.score(data_test, targets_test)\n nystroem_scores.append(nystroem_score)\n fourier_scores.append(fourier_score)\n\n# plot the results:\nplt.figure(figsize=(16, 4))\naccuracy = plt.subplot(121)\n# second y axis for timings\ntimescale = plt.subplot(122)\n\naccuracy.plot(sample_sizes, nystroem_scores, label=\"Nystroem approx. kernel\")\ntimescale.plot(sample_sizes, nystroem_times, \"--\", label=\"Nystroem approx. kernel\")\n\naccuracy.plot(sample_sizes, fourier_scores, label=\"Fourier approx. kernel\")\ntimescale.plot(sample_sizes, fourier_times, \"--\", label=\"Fourier approx. kernel\")\n\n# horizontal lines for exact rbf and linear kernels:\naccuracy.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [linear_svm_score, linear_svm_score],\n label=\"linear svm\",\n)\ntimescale.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [linear_svm_time, linear_svm_time],\n \"--\",\n label=\"linear svm\",\n)\n\naccuracy.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [kernel_svm_score, kernel_svm_score],\n label=\"rbf svm\",\n)\ntimescale.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [kernel_svm_time, kernel_svm_time],\n \"--\",\n label=\"rbf svm\",\n)\n\n# vertical line for dataset dimensionality = 64\naccuracy.plot([64, 64], [0.7, 1], label=\"n_features\")\n\n# legends and labels\naccuracy.set_title(\"Classification accuracy\")\ntimescale.set_title(\"Training times\")\naccuracy.set_xlim(sample_sizes[0], sample_sizes[-1])\naccuracy.set_xticks(())\naccuracy.set_ylim(np.min(fourier_scores), 1)\ntimescale.set_xlabel(\"Sampling steps = transformed feature dimension\")\naccuracy.set_ylabel(\"Classification accuracy\")\ntimescale.set_ylabel(\"Training time in seconds\")\naccuracy.legend(loc=\"best\")\ntimescale.legend(loc=\"best\")\nplt.tight_layout()\nplt.show()"
43+
"n_samples = len(digits.data)\ndata = digits.data / 16.0\ndata -= data.mean(axis=0)\n\n# We learn the digits on the first half of the digits\ndata_train, targets_train = (data[: n_samples // 2], digits.target[: n_samples // 2])\n\n\n# Now predict the value of the digit on the second half:\ndata_test, targets_test = (data[n_samples // 2 :], digits.target[n_samples // 2 :])\n# data_test = scaler.transform(data_test)\n\n# Create a classifier: a support vector classifier\nkernel_svm = svm.SVC(gamma=0.2)\nlinear_svm = svm.LinearSVC(dual=\"auto\", random_state=42)\n\n# create pipeline from kernel approximation\n# and linear svm\nfeature_map_fourier = RBFSampler(gamma=0.2, random_state=1)\nfeature_map_nystroem = Nystroem(gamma=0.2, random_state=1)\nfourier_approx_svm = pipeline.Pipeline(\n [\n (\"feature_map\", feature_map_fourier),\n (\"svm\", svm.LinearSVC(dual=\"auto\", random_state=42)),\n ]\n)\n\nnystroem_approx_svm = pipeline.Pipeline(\n [\n (\"feature_map\", feature_map_nystroem),\n (\"svm\", svm.LinearSVC(dual=\"auto\", random_state=42)),\n ]\n)\n\n# fit and predict using linear and kernel svm:\n\nkernel_svm_time = time()\nkernel_svm.fit(data_train, targets_train)\nkernel_svm_score = kernel_svm.score(data_test, targets_test)\nkernel_svm_time = time() - kernel_svm_time\n\nlinear_svm_time = time()\nlinear_svm.fit(data_train, targets_train)\nlinear_svm_score = linear_svm.score(data_test, targets_test)\nlinear_svm_time = time() - linear_svm_time\n\nsample_sizes = 30 * np.arange(1, 10)\nfourier_scores = []\nnystroem_scores = []\nfourier_times = []\nnystroem_times = []\n\nfor D in sample_sizes:\n fourier_approx_svm.set_params(feature_map__n_components=D)\n nystroem_approx_svm.set_params(feature_map__n_components=D)\n start = time()\n nystroem_approx_svm.fit(data_train, targets_train)\n nystroem_times.append(time() - start)\n\n start = time()\n fourier_approx_svm.fit(data_train, targets_train)\n fourier_times.append(time() - start)\n\n fourier_score = fourier_approx_svm.score(data_test, targets_test)\n nystroem_score = nystroem_approx_svm.score(data_test, targets_test)\n nystroem_scores.append(nystroem_score)\n fourier_scores.append(fourier_score)\n\n# plot the results:\nplt.figure(figsize=(16, 4))\naccuracy = plt.subplot(121)\n# second y axis for timings\ntimescale = plt.subplot(122)\n\naccuracy.plot(sample_sizes, nystroem_scores, label=\"Nystroem approx. kernel\")\ntimescale.plot(sample_sizes, nystroem_times, \"--\", label=\"Nystroem approx. kernel\")\n\naccuracy.plot(sample_sizes, fourier_scores, label=\"Fourier approx. kernel\")\ntimescale.plot(sample_sizes, fourier_times, \"--\", label=\"Fourier approx. kernel\")\n\n# horizontal lines for exact rbf and linear kernels:\naccuracy.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [linear_svm_score, linear_svm_score],\n label=\"linear svm\",\n)\ntimescale.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [linear_svm_time, linear_svm_time],\n \"--\",\n label=\"linear svm\",\n)\n\naccuracy.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [kernel_svm_score, kernel_svm_score],\n label=\"rbf svm\",\n)\ntimescale.plot(\n [sample_sizes[0], sample_sizes[-1]],\n [kernel_svm_time, kernel_svm_time],\n \"--\",\n label=\"rbf svm\",\n)\n\n# vertical line for dataset dimensionality = 64\naccuracy.plot([64, 64], [0.7, 1], label=\"n_features\")\n\n# legends and labels\naccuracy.set_title(\"Classification accuracy\")\ntimescale.set_title(\"Training times\")\naccuracy.set_xlim(sample_sizes[0], sample_sizes[-1])\naccuracy.set_xticks(())\naccuracy.set_ylim(np.min(fourier_scores), 1)\ntimescale.set_xlabel(\"Sampling steps = transformed feature dimension\")\naccuracy.set_ylabel(\"Classification accuracy\")\ntimescale.set_ylabel(\"Training time in seconds\")\naccuracy.legend(loc=\"best\")\ntimescale.legend(loc=\"best\")\nplt.tight_layout()\nplt.show()"
4444
]
4545
},
4646
{
@@ -58,7 +58,7 @@
5858
},
5959
"outputs": [],
6060
"source": [
61-
"# visualize the decision surface, projected down to the first\n# two principal components of the dataset\npca = PCA(n_components=8).fit(data_train)\n\nX = pca.transform(data_train)\n\n# Generate grid along first two principal components\nmultiples = np.arange(-2, 2, 0.1)\n# steps along first component\nfirst = multiples[:, np.newaxis] * pca.components_[0, :]\n# steps along second component\nsecond = multiples[:, np.newaxis] * pca.components_[1, :]\n# combine\ngrid = first[np.newaxis, :, :] + second[:, np.newaxis, :]\nflat_grid = grid.reshape(-1, data.shape[1])\n\n# title for the plots\ntitles = [\n \"SVC with rbf kernel\",\n \"SVC (linear kernel)\\n with Fourier rbf feature map\\nn_components=100\",\n \"SVC (linear kernel)\\n with Nystroem rbf feature map\\nn_components=100\",\n]\n\nplt.figure(figsize=(18, 7.5))\nplt.rcParams.update({\"font.size\": 14})\n# predict and plot\nfor i, clf in enumerate((kernel_svm, nystroem_approx_svm, fourier_approx_svm)):\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n plt.subplot(1, 3, i + 1)\n Z = clf.predict(flat_grid)\n\n # Put the result into a color plot\n Z = Z.reshape(grid.shape[:-1])\n levels = np.arange(10)\n lv_eps = 0.01 # Adjust a mapping from calculated contour levels to color.\n plt.contourf(\n multiples,\n multiples,\n Z,\n levels=levels - lv_eps,\n cmap=plt.cm.tab10,\n vmin=0,\n vmax=10,\n alpha=0.7,\n )\n plt.axis(\"off\")\n\n # Plot also the training points\n plt.scatter(\n X[:, 0],\n X[:, 1],\n c=targets_train,\n cmap=plt.cm.tab10,\n edgecolors=(0, 0, 0),\n vmin=0,\n vmax=10,\n )\n\n plt.title(titles[i])\nplt.tight_layout()\nplt.show()"
61+
"# visualize the decision surface, projected down to the first\n# two principal components of the dataset\npca = PCA(n_components=8, random_state=42).fit(data_train)\n\nX = pca.transform(data_train)\n\n# Generate grid along first two principal components\nmultiples = np.arange(-2, 2, 0.1)\n# steps along first component\nfirst = multiples[:, np.newaxis] * pca.components_[0, :]\n# steps along second component\nsecond = multiples[:, np.newaxis] * pca.components_[1, :]\n# combine\ngrid = first[np.newaxis, :, :] + second[:, np.newaxis, :]\nflat_grid = grid.reshape(-1, data.shape[1])\n\n# title for the plots\ntitles = [\n \"SVC with rbf kernel\",\n \"SVC (linear kernel)\\n with Fourier rbf feature map\\nn_components=100\",\n \"SVC (linear kernel)\\n with Nystroem rbf feature map\\nn_components=100\",\n]\n\nplt.figure(figsize=(18, 7.5))\nplt.rcParams.update({\"font.size\": 14})\n# predict and plot\nfor i, clf in enumerate((kernel_svm, nystroem_approx_svm, fourier_approx_svm)):\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n plt.subplot(1, 3, i + 1)\n Z = clf.predict(flat_grid)\n\n # Put the result into a color plot\n Z = Z.reshape(grid.shape[:-1])\n levels = np.arange(10)\n lv_eps = 0.01 # Adjust a mapping from calculated contour levels to color.\n plt.contourf(\n multiples,\n multiples,\n Z,\n levels=levels - lv_eps,\n cmap=plt.cm.tab10,\n vmin=0,\n vmax=10,\n alpha=0.7,\n )\n plt.axis(\"off\")\n\n # Plot also the training points\n plt.scatter(\n X[:, 0],\n X[:, 1],\n c=targets_train,\n cmap=plt.cm.tab10,\n edgecolors=(0, 0, 0),\n vmin=0,\n vmax=10,\n )\n\n plt.title(titles[i])\nplt.tight_layout()\nplt.show()"
6262
]
6363
}
6464
],

dev/_downloads/604c0a9de0e1b80dae9e6754fdb27014/plot_manifold_sphere.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
},
1616
"outputs": [],
1717
"source": [
18-
"# Author: Jaques Grobler <[email protected]>\n# License: BSD 3 clause\n\nfrom time import time\n\nimport matplotlib.pyplot as plt\n\n# Unused but required import for doing 3d projections with matplotlib < 3.2\nimport mpl_toolkits.mplot3d # noqa: F401\nimport numpy as np\nfrom matplotlib.ticker import NullFormatter\n\nfrom sklearn import manifold\nfrom sklearn.utils import check_random_state\n\n# Variables for manifold learning.\nn_neighbors = 10\nn_samples = 1000\n\n# Create our sphere.\nrandom_state = check_random_state(0)\np = random_state.rand(n_samples) * (2 * np.pi - 0.55)\nt = random_state.rand(n_samples) * np.pi\n\n# Sever the poles from the sphere.\nindices = (t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8)))\ncolors = p[indices]\nx, y, z = (\n np.sin(t[indices]) * np.cos(p[indices]),\n np.sin(t[indices]) * np.sin(p[indices]),\n np.cos(t[indices]),\n)\n\n# Plot our dataset.\nfig = plt.figure(figsize=(15, 8))\nplt.suptitle(\n \"Manifold Learning with %i points, %i neighbors\" % (1000, n_neighbors), fontsize=14\n)\n\nax = fig.add_subplot(251, projection=\"3d\")\nax.scatter(x, y, z, c=p[indices], cmap=plt.cm.rainbow)\nax.view_init(40, -10)\n\nsphere_data = np.array([x, y, z]).T\n\n# Perform Locally Linear Embedding Manifold learning\nmethods = [\"standard\", \"ltsa\", \"hessian\", \"modified\"]\nlabels = [\"LLE\", \"LTSA\", \"Hessian LLE\", \"Modified LLE\"]\n\nfor i, method in enumerate(methods):\n t0 = time()\n trans_data = (\n manifold.LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=method\n )\n .fit_transform(sphere_data)\n .T\n )\n t1 = time()\n print(\"%s: %.2g sec\" % (methods[i], t1 - t0))\n\n ax = fig.add_subplot(252 + i)\n plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\n plt.title(\"%s (%.2g sec)\" % (labels[i], t1 - t0))\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n plt.axis(\"tight\")\n\n# Perform Isomap Manifold learning.\nt0 = time()\ntrans_data = (\n manifold.Isomap(n_neighbors=n_neighbors, n_components=2)\n .fit_transform(sphere_data)\n .T\n)\nt1 = time()\nprint(\"%s: %.2g sec\" % (\"ISO\", t1 - t0))\n\nax = fig.add_subplot(257)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"%s (%.2g sec)\" % (\"Isomap\", t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\n# Perform Multi-dimensional scaling.\nt0 = time()\nmds = manifold.MDS(2, max_iter=100, n_init=1, normalized_stress=\"auto\")\ntrans_data = mds.fit_transform(sphere_data).T\nt1 = time()\nprint(\"MDS: %.2g sec\" % (t1 - t0))\n\nax = fig.add_subplot(258)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"MDS (%.2g sec)\" % (t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\n# Perform Spectral Embedding.\nt0 = time()\nse = manifold.SpectralEmbedding(n_components=2, n_neighbors=n_neighbors)\ntrans_data = se.fit_transform(sphere_data).T\nt1 = time()\nprint(\"Spectral Embedding: %.2g sec\" % (t1 - t0))\n\nax = fig.add_subplot(259)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"Spectral Embedding (%.2g sec)\" % (t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\n# Perform t-distributed stochastic neighbor embedding.\nt0 = time()\ntsne = manifold.TSNE(n_components=2, random_state=0)\ntrans_data = tsne.fit_transform(sphere_data).T\nt1 = time()\nprint(\"t-SNE: %.2g sec\" % (t1 - t0))\n\nax = fig.add_subplot(2, 5, 10)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"t-SNE (%.2g sec)\" % (t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\nplt.show()"
18+
"# Author: Jaques Grobler <[email protected]>\n# License: BSD 3 clause\n\nfrom time import time\n\nimport matplotlib.pyplot as plt\n\n# Unused but required import for doing 3d projections with matplotlib < 3.2\nimport mpl_toolkits.mplot3d # noqa: F401\nimport numpy as np\nfrom matplotlib.ticker import NullFormatter\n\nfrom sklearn import manifold\nfrom sklearn.utils import check_random_state\n\n# Variables for manifold learning.\nn_neighbors = 10\nn_samples = 1000\n\n# Create our sphere.\nrandom_state = check_random_state(0)\np = random_state.rand(n_samples) * (2 * np.pi - 0.55)\nt = random_state.rand(n_samples) * np.pi\n\n# Sever the poles from the sphere.\nindices = (t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8)))\ncolors = p[indices]\nx, y, z = (\n np.sin(t[indices]) * np.cos(p[indices]),\n np.sin(t[indices]) * np.sin(p[indices]),\n np.cos(t[indices]),\n)\n\n# Plot our dataset.\nfig = plt.figure(figsize=(15, 8))\nplt.suptitle(\n \"Manifold Learning with %i points, %i neighbors\" % (1000, n_neighbors), fontsize=14\n)\n\nax = fig.add_subplot(251, projection=\"3d\")\nax.scatter(x, y, z, c=p[indices], cmap=plt.cm.rainbow)\nax.view_init(40, -10)\n\nsphere_data = np.array([x, y, z]).T\n\n# Perform Locally Linear Embedding Manifold learning\nmethods = [\"standard\", \"ltsa\", \"hessian\", \"modified\"]\nlabels = [\"LLE\", \"LTSA\", \"Hessian LLE\", \"Modified LLE\"]\n\nfor i, method in enumerate(methods):\n t0 = time()\n trans_data = (\n manifold.LocallyLinearEmbedding(\n n_neighbors=n_neighbors, n_components=2, method=method, random_state=42\n )\n .fit_transform(sphere_data)\n .T\n )\n t1 = time()\n print(\"%s: %.2g sec\" % (methods[i], t1 - t0))\n\n ax = fig.add_subplot(252 + i)\n plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\n plt.title(\"%s (%.2g sec)\" % (labels[i], t1 - t0))\n ax.xaxis.set_major_formatter(NullFormatter())\n ax.yaxis.set_major_formatter(NullFormatter())\n plt.axis(\"tight\")\n\n# Perform Isomap Manifold learning.\nt0 = time()\ntrans_data = (\n manifold.Isomap(n_neighbors=n_neighbors, n_components=2)\n .fit_transform(sphere_data)\n .T\n)\nt1 = time()\nprint(\"%s: %.2g sec\" % (\"ISO\", t1 - t0))\n\nax = fig.add_subplot(257)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"%s (%.2g sec)\" % (\"Isomap\", t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\n# Perform Multi-dimensional scaling.\nt0 = time()\nmds = manifold.MDS(2, max_iter=100, n_init=1, normalized_stress=\"auto\", random_state=42)\ntrans_data = mds.fit_transform(sphere_data).T\nt1 = time()\nprint(\"MDS: %.2g sec\" % (t1 - t0))\n\nax = fig.add_subplot(258)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"MDS (%.2g sec)\" % (t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\n# Perform Spectral Embedding.\nt0 = time()\nse = manifold.SpectralEmbedding(\n n_components=2, n_neighbors=n_neighbors, random_state=42\n)\ntrans_data = se.fit_transform(sphere_data).T\nt1 = time()\nprint(\"Spectral Embedding: %.2g sec\" % (t1 - t0))\n\nax = fig.add_subplot(259)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"Spectral Embedding (%.2g sec)\" % (t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\n# Perform t-distributed stochastic neighbor embedding.\nt0 = time()\ntsne = manifold.TSNE(n_components=2, random_state=0)\ntrans_data = tsne.fit_transform(sphere_data).T\nt1 = time()\nprint(\"t-SNE: %.2g sec\" % (t1 - t0))\n\nax = fig.add_subplot(2, 5, 10)\nplt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)\nplt.title(\"t-SNE (%.2g sec)\" % (t1 - t0))\nax.xaxis.set_major_formatter(NullFormatter())\nax.yaxis.set_major_formatter(NullFormatter())\nplt.axis(\"tight\")\n\nplt.show()"
1919
]
2020
}
2121
],
Binary file not shown.

0 commit comments

Comments
 (0)