Skip to content

Commit 1b311a7

Browse files
committed
Pushing the docs to dev/ for branch: master, commit ef3937bb98a9f3a66801c0471e446767a405c193
1 parent 4600efa commit 1b311a7

File tree

1,209 files changed

+3637
-3621
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,209 files changed

+3637
-3621
lines changed
Binary file not shown.

dev/_downloads/47f024d726d245e034c7690b4664721f/plot_classification.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import neighbors, datasets\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])\ncmap_bold = ListedColormap(['darkorange', 'c', 'darkblue'])\n\nfor weights in ['uniform', 'distance']:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)\n clf.fit(X, y)\n\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure()\n plt.contourf(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,\n edgecolor='k', s=20)\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.title(\"3-Class classification (k = %i, weights = '%s')\"\n % (n_neighbors, weights))\n\nplt.show()"
29+
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import neighbors, datasets\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])\ncmap_bold = ['darkorange', 'c', 'darkblue']\n\nfor weights in ['uniform', 'distance']:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)\n clf.fit(X, y)\n\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure(figsize=(8, 6))\n plt.contourf(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n sns.scatterplot(x=X[:, 0], y=X[:, 1], hue=iris.target_names[y],\n palette=cmap_bold, alpha=1.0, edgecolor=\"black\")\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.title(\"3-Class classification (k = %i, weights = '%s')\"\n % (n_neighbors, weights))\n plt.xlabel(iris.feature_names[0])\n plt.ylabel(iris.feature_names[1])\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/8d0cc737ca20800f70d8aa80d8b8fb7d/plot_classification.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
import numpy as np
1212
import matplotlib.pyplot as plt
13+
import seaborn as sns
1314
from matplotlib.colors import ListedColormap
1415
from sklearn import neighbors, datasets
1516

@@ -27,7 +28,7 @@
2728

2829
# Create color maps
2930
cmap_light = ListedColormap(['orange', 'cyan', 'cornflowerblue'])
30-
cmap_bold = ListedColormap(['darkorange', 'c', 'darkblue'])
31+
cmap_bold = ['darkorange', 'c', 'darkblue']
3132

3233
for weights in ['uniform', 'distance']:
3334
# we create an instance of Neighbours Classifier and fit the data.
@@ -44,15 +45,17 @@
4445

4546
# Put the result into a color plot
4647
Z = Z.reshape(xx.shape)
47-
plt.figure()
48+
plt.figure(figsize=(8, 6))
4849
plt.contourf(xx, yy, Z, cmap=cmap_light)
4950

5051
# Plot also the training points
51-
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,
52-
edgecolor='k', s=20)
52+
sns.scatterplot(x=X[:, 0], y=X[:, 1], hue=iris.target_names[y],
53+
palette=cmap_bold, alpha=1.0, edgecolor="black")
5354
plt.xlim(xx.min(), xx.max())
5455
plt.ylim(yy.min(), yy.max())
5556
plt.title("3-Class classification (k = %i, weights = '%s')"
5657
% (n_neighbors, weights))
58+
plt.xlabel(iris.feature_names[0])
59+
plt.ylabel(iris.feature_names[1])
5760

5861
plt.show()
Binary file not shown.

dev/_downloads/scikit-learn-docs.pdf

65.7 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes

0 commit comments

Comments
 (0)