Skip to content

Commit 438d34f

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 36b688eb04de0172dace761ca63616f18d615542
1 parent f8f9faf commit 438d34f

File tree

1,109 files changed

+3411
-3372
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,109 files changed

+3411
-3372
lines changed
153 Bytes
Binary file not shown.
150 Bytes
Binary file not shown.

dev/_downloads/plot_classification.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import neighbors, datasets\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])\ncmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])\n\nfor weights in ['uniform', 'distance']:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)\n clf.fit(X, y)\n\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure()\n plt.pcolormesh(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,\n edgecolor='k', s=20)\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.title(\"3-Class classification (k = %i, weights = '%s')\"\n % (n_neighbors, weights))\n\nplt.show()"
29+
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import neighbors, datasets\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])\ncmap_bold = ListedColormap(['darkorange', 'c', 'darkblue'])\n\nfor weights in ['uniform', 'distance']:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)\n clf.fit(X, y)\n\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure()\n plt.pcolormesh(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,\n edgecolor='k', s=20)\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.title(\"3-Class classification (k = %i, weights = '%s')\"\n % (n_neighbors, weights))\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_classification.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626
h = .02 # step size in the mesh
2727

2828
# Create color maps
29-
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
30-
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
29+
cmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])
30+
cmap_bold = ListedColormap(['darkorange', 'c', 'darkblue'])
3131

3232
for weights in ['uniform', 'distance']:
3333
# we create an instance of Neighbours Classifier and fit the data.

dev/_downloads/plot_kde_1d.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Jake Vanderplas <[email protected]>\n#\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom distutils.version import LooseVersion\nfrom scipy.stats import norm\nfrom sklearn.neighbors import KernelDensity\n\n# `normed` is being deprecated in favor of `density` in histograms\nif LooseVersion(matplotlib.__version__) >= '2.1':\n density_param = {'density': True}\nelse:\n density_param = {'normed': True}\n\n#----------------------------------------------------------------------\n# Plot the progression of histograms to kernels\nnp.random.seed(1)\nN = 20\nX = np.concatenate((np.random.normal(0, 1, int(0.3 * N)),\n np.random.normal(5, 1, int(0.7 * N))))[:, np.newaxis]\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\nbins = np.linspace(-5, 10, 10)\n\nfig, ax = plt.subplots(2, 2, sharex=True, sharey=True)\nfig.subplots_adjust(hspace=0.05, wspace=0.05)\n\n# histogram 1\nax[0, 0].hist(X[:, 0], bins=bins, fc='#AAAAFF', **density_param)\nax[0, 0].text(-3.5, 0.31, \"Histogram\")\n\n# histogram 2\nax[0, 1].hist(X[:, 0], bins=bins + 0.75, fc='#AAAAFF', **density_param)\nax[0, 1].text(-3.5, 0.31, \"Histogram, bins shifted\")\n\n# tophat KDE\nkde = KernelDensity(kernel='tophat', bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 0].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAAFF')\nax[1, 0].text(-3.5, 0.31, \"Tophat Kernel Density\")\n\n# Gaussian KDE\nkde = KernelDensity(kernel='gaussian', bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 1].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAAFF')\nax[1, 1].text(-3.5, 0.31, \"Gaussian Kernel Density\")\n\nfor axi in ax.ravel():\n axi.plot(X[:, 0], np.full(X.shape[0], -0.01), '+k')\n axi.set_xlim(-4, 9)\n axi.set_ylim(-0.02, 0.34)\n\nfor axi in ax[:, 0]:\n axi.set_ylabel('Normalized Density')\n\nfor axi in ax[1, :]:\n axi.set_xlabel('x')\n\n#----------------------------------------------------------------------\n# Plot all available kernels\nX_plot = np.linspace(-6, 6, 1000)[:, None]\nX_src = np.zeros((1, 1))\n\nfig, ax = plt.subplots(2, 3, sharex=True, sharey=True)\nfig.subplots_adjust(left=0.05, right=0.95, hspace=0.05, wspace=0.05)\n\n\ndef format_func(x, loc):\n if x == 0:\n return '0'\n elif x == 1:\n return 'h'\n elif x == -1:\n return '-h'\n else:\n return '%ih' % x\n\nfor i, kernel in enumerate(['gaussian', 'tophat', 'epanechnikov',\n 'exponential', 'linear', 'cosine']):\n axi = ax.ravel()[i]\n log_dens = KernelDensity(kernel=kernel).fit(X_src).score_samples(X_plot)\n axi.fill(X_plot[:, 0], np.exp(log_dens), '-k', fc='#AAAAFF')\n axi.text(-2.6, 0.95, kernel)\n\n axi.xaxis.set_major_formatter(plt.FuncFormatter(format_func))\n axi.xaxis.set_major_locator(plt.MultipleLocator(1))\n axi.yaxis.set_major_locator(plt.NullLocator())\n\n axi.set_ylim(0, 1.05)\n axi.set_xlim(-2.9, 2.9)\n\nax[0, 1].set_title('Available Kernels')\n\n#----------------------------------------------------------------------\n# Plot a 1D density example\nN = 100\nnp.random.seed(1)\nX = np.concatenate((np.random.normal(0, 1, int(0.3 * N)),\n np.random.normal(5, 1, int(0.7 * N))))[:, np.newaxis]\n\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\n\ntrue_dens = (0.3 * norm(0, 1).pdf(X_plot[:, 0])\n + 0.7 * norm(5, 1).pdf(X_plot[:, 0]))\n\nfig, ax = plt.subplots()\nax.fill(X_plot[:, 0], true_dens, fc='black', alpha=0.2,\n label='input distribution')\n\nfor kernel in ['gaussian', 'tophat', 'epanechnikov']:\n kde = KernelDensity(kernel=kernel, bandwidth=0.5).fit(X)\n log_dens = kde.score_samples(X_plot)\n ax.plot(X_plot[:, 0], np.exp(log_dens), '-',\n label=\"kernel = '{0}'\".format(kernel))\n\nax.text(6, 0.38, \"N={0} points\".format(N))\n\nax.legend(loc='upper left')\nax.plot(X[:, 0], -0.005 - 0.01 * np.random.random(X.shape[0]), '+k')\n\nax.set_xlim(-4, 9)\nax.set_ylim(-0.02, 0.4)\nplt.show()"
29+
"# Author: Jake Vanderplas <[email protected]>\n#\nimport numpy as np\nimport matplotlib\nimport matplotlib.pyplot as plt\nfrom distutils.version import LooseVersion\nfrom scipy.stats import norm\nfrom sklearn.neighbors import KernelDensity\n\n# `normed` is being deprecated in favor of `density` in histograms\nif LooseVersion(matplotlib.__version__) >= '2.1':\n density_param = {'density': True}\nelse:\n density_param = {'normed': True}\n\n# ----------------------------------------------------------------------\n# Plot the progression of histograms to kernels\nnp.random.seed(1)\nN = 20\nX = np.concatenate((np.random.normal(0, 1, int(0.3 * N)),\n np.random.normal(5, 1, int(0.7 * N))))[:, np.newaxis]\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\nbins = np.linspace(-5, 10, 10)\n\nfig, ax = plt.subplots(2, 2, sharex=True, sharey=True)\nfig.subplots_adjust(hspace=0.05, wspace=0.05)\n\n# histogram 1\nax[0, 0].hist(X[:, 0], bins=bins, fc='#AAAAFF', **density_param)\nax[0, 0].text(-3.5, 0.31, \"Histogram\")\n\n# histogram 2\nax[0, 1].hist(X[:, 0], bins=bins + 0.75, fc='#AAAAFF', **density_param)\nax[0, 1].text(-3.5, 0.31, \"Histogram, bins shifted\")\n\n# tophat KDE\nkde = KernelDensity(kernel='tophat', bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 0].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAAFF')\nax[1, 0].text(-3.5, 0.31, \"Tophat Kernel Density\")\n\n# Gaussian KDE\nkde = KernelDensity(kernel='gaussian', bandwidth=0.75).fit(X)\nlog_dens = kde.score_samples(X_plot)\nax[1, 1].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAAFF')\nax[1, 1].text(-3.5, 0.31, \"Gaussian Kernel Density\")\n\nfor axi in ax.ravel():\n axi.plot(X[:, 0], np.full(X.shape[0], -0.01), '+k')\n axi.set_xlim(-4, 9)\n axi.set_ylim(-0.02, 0.34)\n\nfor axi in ax[:, 0]:\n axi.set_ylabel('Normalized Density')\n\nfor axi in ax[1, :]:\n axi.set_xlabel('x')\n\n# ----------------------------------------------------------------------\n# Plot all available kernels\nX_plot = np.linspace(-6, 6, 1000)[:, None]\nX_src = np.zeros((1, 1))\n\nfig, ax = plt.subplots(2, 3, sharex=True, sharey=True)\nfig.subplots_adjust(left=0.05, right=0.95, hspace=0.05, wspace=0.05)\n\n\ndef format_func(x, loc):\n if x == 0:\n return '0'\n elif x == 1:\n return 'h'\n elif x == -1:\n return '-h'\n else:\n return '%ih' % x\n\nfor i, kernel in enumerate(['gaussian', 'tophat', 'epanechnikov',\n 'exponential', 'linear', 'cosine']):\n axi = ax.ravel()[i]\n log_dens = KernelDensity(kernel=kernel).fit(X_src).score_samples(X_plot)\n axi.fill(X_plot[:, 0], np.exp(log_dens), '-k', fc='#AAAAFF')\n axi.text(-2.6, 0.95, kernel)\n\n axi.xaxis.set_major_formatter(plt.FuncFormatter(format_func))\n axi.xaxis.set_major_locator(plt.MultipleLocator(1))\n axi.yaxis.set_major_locator(plt.NullLocator())\n\n axi.set_ylim(0, 1.05)\n axi.set_xlim(-2.9, 2.9)\n\nax[0, 1].set_title('Available Kernels')\n\n# ----------------------------------------------------------------------\n# Plot a 1D density example\nN = 100\nnp.random.seed(1)\nX = np.concatenate((np.random.normal(0, 1, int(0.3 * N)),\n np.random.normal(5, 1, int(0.7 * N))))[:, np.newaxis]\n\nX_plot = np.linspace(-5, 10, 1000)[:, np.newaxis]\n\ntrue_dens = (0.3 * norm(0, 1).pdf(X_plot[:, 0])\n + 0.7 * norm(5, 1).pdf(X_plot[:, 0]))\n\nfig, ax = plt.subplots()\nax.fill(X_plot[:, 0], true_dens, fc='black', alpha=0.2,\n label='input distribution')\ncolors = ['navy', 'cornflowerblue', 'darkorange']\nkernels = ['gaussian', 'tophat', 'epanechnikov']\nlw = 2\n\nfor color, kernel in zip(colors, kernels):\n kde = KernelDensity(kernel=kernel, bandwidth=0.5).fit(X)\n log_dens = kde.score_samples(X_plot)\n ax.plot(X_plot[:, 0], np.exp(log_dens), color=color, lw=lw,\n linestyle='-', label=\"kernel = '{0}'\".format(kernel))\n\nax.text(6, 0.38, \"N={0} points\".format(N))\n\nax.legend(loc='upper left')\nax.plot(X[:, 0], -0.005 - 0.01 * np.random.random(X.shape[0]), '+k')\n\nax.set_xlim(-4, 9)\nax.set_ylim(-0.02, 0.4)\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_kde_1d.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
else:
4242
density_param = {'normed': True}
4343

44-
#----------------------------------------------------------------------
44+
# ----------------------------------------------------------------------
4545
# Plot the progression of histograms to kernels
4646
np.random.seed(1)
4747
N = 20
@@ -84,7 +84,7 @@
8484
for axi in ax[1, :]:
8585
axi.set_xlabel('x')
8686

87-
#----------------------------------------------------------------------
87+
# ----------------------------------------------------------------------
8888
# Plot all available kernels
8989
X_plot = np.linspace(-6, 6, 1000)[:, None]
9090
X_src = np.zeros((1, 1))
@@ -119,7 +119,7 @@ def format_func(x, loc):
119119

120120
ax[0, 1].set_title('Available Kernels')
121121

122-
#----------------------------------------------------------------------
122+
# ----------------------------------------------------------------------
123123
# Plot a 1D density example
124124
N = 100
125125
np.random.seed(1)
@@ -134,12 +134,15 @@ def format_func(x, loc):
134134
fig, ax = plt.subplots()
135135
ax.fill(X_plot[:, 0], true_dens, fc='black', alpha=0.2,
136136
label='input distribution')
137+
colors = ['navy', 'cornflowerblue', 'darkorange']
138+
kernels = ['gaussian', 'tophat', 'epanechnikov']
139+
lw = 2
137140

138-
for kernel in ['gaussian', 'tophat', 'epanechnikov']:
141+
for color, kernel in zip(colors, kernels):
139142
kde = KernelDensity(kernel=kernel, bandwidth=0.5).fit(X)
140143
log_dens = kde.score_samples(X_plot)
141-
ax.plot(X_plot[:, 0], np.exp(log_dens), '-',
142-
label="kernel = '{0}'".format(kernel))
144+
ax.plot(X_plot[:, 0], np.exp(log_dens), color=color, lw=lw,
145+
linestyle='-', label="kernel = '{0}'".format(kernel))
143146

144147
ax.text(6, 0.38, "N={0} points".format(N))
145148

dev/_downloads/plot_nearest_centroid.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import datasets\nfrom sklearn.neighbors import NearestCentroid\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])\ncmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])\n\nfor shrinkage in [None, .2]:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = NearestCentroid(shrink_threshold=shrinkage)\n clf.fit(X, y)\n y_pred = clf.predict(X)\n print(shrinkage, np.mean(y == y_pred))\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure()\n plt.pcolormesh(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,\n edgecolor='k', s=20)\n plt.title(\"3-Class classification (shrink_threshold=%r)\"\n % shrinkage)\n plt.axis('tight')\n\nplt.show()"
29+
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import datasets\nfrom sklearn.neighbors import NearestCentroid\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])\ncmap_bold = ListedColormap(['darkorange', 'c', 'darkblue'])\n\nfor shrinkage in [None, .2]:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = NearestCentroid(shrink_threshold=shrinkage)\n clf.fit(X, y)\n y_pred = clf.predict(X)\n print(shrinkage, np.mean(y == y_pred))\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure()\n plt.pcolormesh(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,\n edgecolor='k', s=20)\n plt.title(\"3-Class classification (shrink_threshold=%r)\"\n % shrinkage)\n plt.axis('tight')\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_nearest_centroid.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626
h = .02 # step size in the mesh
2727

2828
# Create color maps
29-
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
30-
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
29+
cmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])
30+
cmap_bold = ListedColormap(['darkorange', 'c', 'darkblue'])
3131

3232
for shrinkage in [None, .2]:
3333
# we create an instance of Neighbours Classifier and fit the data.

dev/_downloads/plot_regression.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Author: Alexandre Gramfort <[email protected]>\n# Fabian Pedregosa <[email protected]>\n#\n# License: BSD 3 clause (C) INRIA\n\n\n# #############################################################################\n# Generate sample data\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import neighbors\n\nnp.random.seed(0)\nX = np.sort(5 * np.random.rand(40, 1), axis=0)\nT = np.linspace(0, 5, 500)[:, np.newaxis]\ny = np.sin(X).ravel()\n\n# Add noise to targets\ny[::5] += 1 * (0.5 - np.random.rand(8))\n\n# #############################################################################\n# Fit regression model\nn_neighbors = 5\n\nfor i, weights in enumerate(['uniform', 'distance']):\n knn = neighbors.KNeighborsRegressor(n_neighbors, weights=weights)\n y_ = knn.fit(X, y).predict(T)\n\n plt.subplot(2, 1, i + 1)\n plt.scatter(X, y, c='k', label='data')\n plt.plot(T, y_, c='g', label='prediction')\n plt.axis('tight')\n plt.legend()\n plt.title(\"KNeighborsRegressor (k = %i, weights = '%s')\" % (n_neighbors,\n weights))\n\nplt.tight_layout()\nplt.show()"
29+
"print(__doc__)\n\n# Author: Alexandre Gramfort <[email protected]>\n# Fabian Pedregosa <[email protected]>\n#\n# License: BSD 3 clause (C) INRIA\n\n\n# #############################################################################\n# Generate sample data\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import neighbors\n\nnp.random.seed(0)\nX = np.sort(5 * np.random.rand(40, 1), axis=0)\nT = np.linspace(0, 5, 500)[:, np.newaxis]\ny = np.sin(X).ravel()\n\n# Add noise to targets\ny[::5] += 1 * (0.5 - np.random.rand(8))\n\n# #############################################################################\n# Fit regression model\nn_neighbors = 5\n\nfor i, weights in enumerate(['uniform', 'distance']):\n knn = neighbors.KNeighborsRegressor(n_neighbors, weights=weights)\n y_ = knn.fit(X, y).predict(T)\n\n plt.subplot(2, 1, i + 1)\n plt.scatter(X, y, color='darkorange', label='data')\n plt.plot(T, y_, color='navy', label='prediction')\n plt.axis('tight')\n plt.legend()\n plt.title(\"KNeighborsRegressor (k = %i, weights = '%s')\" % (n_neighbors,\n weights))\n\nplt.tight_layout()\nplt.show()"
3030
]
3131
}
3232
],

0 commit comments

Comments
 (0)