Skip to content

Commit 3ad7244

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 9743579b5aba2933c207dbcd02f1b4fbf8129490
1 parent c40a74f commit 3ad7244

File tree

950 files changed

+3056
-2975
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

950 files changed

+3056
-2975
lines changed
525 Bytes
Binary file not shown.
486 Bytes
Binary file not shown.

dev/_downloads/plot_iris.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"execution_count": null,
2525
"cell_type": "code",
2626
"source": [
27-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import svm, datasets\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data[:, :2] # we only take the first two features. We could\n # avoid this ugly slicing by using a two-dim dataset\ny = iris.target\n\nh = .02 # step size in the mesh\n\n# we create an instance of SVM and fit out data. We do not scale our\n# data since we want to plot the support vectors\nC = 1.0 # SVM regularization parameter\nsvc = svm.SVC(kernel='linear', C=C).fit(X, y)\nrbf_svc = svm.SVC(kernel='rbf', gamma=0.7, C=C).fit(X, y)\npoly_svc = svm.SVC(kernel='poly', degree=3, C=C).fit(X, y)\nlin_svc = svm.LinearSVC(C=C).fit(X, y)\n\n# create a mesh to plot in\nx_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\ny_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\nxx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n\n# title for the plots\ntitles = ['SVC with linear kernel',\n 'LinearSVC (linear kernel)',\n 'SVC with RBF kernel',\n 'SVC with polynomial (degree 3) kernel']\n\n\nfor i, clf in enumerate((svc, lin_svc, rbf_svc, poly_svc)):\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n plt.subplot(2, 2, i + 1)\n plt.subplots_adjust(wspace=0.4, hspace=0.4)\n\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.contourf(xx, yy, Z, cmap=plt.cm.coolwarm, alpha=0.8)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.coolwarm)\n plt.xlabel('Sepal length')\n plt.ylabel('Sepal width')\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.xticks(())\n plt.yticks(())\n plt.title(titles[i])\n\nplt.show()"
27+
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import svm, datasets\n\n\ndef make_meshgrid(x, y, h=.02):\n \"\"\"Create a mesh of points to plot in\n\n Parameters\n ----------\n x: data to base x-axis meshgrid on\n y: data to base y-axis meshgrid on\n h: stepsize for meshgrid, optional\n\n Returns\n -------\n xx, yy : ndarray\n \"\"\"\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy\n\n\ndef plot_contours(ax, clf, xx, yy, **params):\n \"\"\"Plot the decision boundaries for a classifier.\n\n Parameters\n ----------\n ax: matplotlib axes object\n clf: a classifier\n xx: meshgrid ndarray\n yy: meshgrid ndarray\n params: dictionary of params to pass to contourf, optional\n \"\"\"\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n out = ax.contourf(xx, yy, Z, **params)\n return out\n\n\n# import some data to play with\niris = datasets.load_iris()\n# Take the first two features. We could avoid this by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\n# we create an instance of SVM and fit out data. We do not scale our\n# data since we want to plot the support vectors\nC = 1.0 # SVM regularization parameter\nmodels = (svm.SVC(kernel='linear', C=C),\n svm.LinearSVC(C=C),\n svm.SVC(kernel='rbf', gamma=0.7, C=C),\n svm.SVC(kernel='poly', degree=3, C=C))\nmodels = (clf.fit(X, y) for clf in models)\n\n# title for the plots\ntitles = ('SVC with linear kernel',\n 'LinearSVC (linear kernel)',\n 'SVC with RBF kernel',\n 'SVC with polynomial (degree 3) kernel')\n\n# Set-up 2x2 grid for plotting.\nfig, sub = plt.subplots(2, 2)\nplt.subplots_adjust(wspace=0.4, hspace=0.4)\n\nX0, X1 = X[:, 0], X[:, 1]\nxx, yy = make_meshgrid(X0, X1)\n\nfor clf, title, ax in zip(models, titles, sub.flatten()):\n plot_contours(ax, clf, xx, yy,\n cmap=plt.cm.coolwarm, alpha=0.8)\n ax.scatter(X0, X1, c=y, cmap=plt.cm.coolwarm, s=20, edgecolors='k')\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xlabel('Sepal length')\n ax.set_ylabel('Sepal width')\n ax.set_xticks(())\n ax.set_yticks(())\n ax.set_title(title)\n\nplt.show()"
2828
],
2929
"outputs": [],
3030
"metadata": {

dev/_downloads/plot_iris.py

Lines changed: 66 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -39,55 +39,82 @@
3939
import matplotlib.pyplot as plt
4040
from sklearn import svm, datasets
4141

42+
43+
def make_meshgrid(x, y, h=.02):
44+
"""Create a mesh of points to plot in
45+
46+
Parameters
47+
----------
48+
x: data to base x-axis meshgrid on
49+
y: data to base y-axis meshgrid on
50+
h: stepsize for meshgrid, optional
51+
52+
Returns
53+
-------
54+
xx, yy : ndarray
55+
"""
56+
x_min, x_max = x.min() - 1, x.max() + 1
57+
y_min, y_max = y.min() - 1, y.max() + 1
58+
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
59+
np.arange(y_min, y_max, h))
60+
return xx, yy
61+
62+
63+
def plot_contours(ax, clf, xx, yy, **params):
64+
"""Plot the decision boundaries for a classifier.
65+
66+
Parameters
67+
----------
68+
ax: matplotlib axes object
69+
clf: a classifier
70+
xx: meshgrid ndarray
71+
yy: meshgrid ndarray
72+
params: dictionary of params to pass to contourf, optional
73+
"""
74+
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
75+
Z = Z.reshape(xx.shape)
76+
out = ax.contourf(xx, yy, Z, **params)
77+
return out
78+
79+
4280
# import some data to play with
4381
iris = datasets.load_iris()
44-
X = iris.data[:, :2] # we only take the first two features. We could
45-
# avoid this ugly slicing by using a two-dim dataset
82+
# Take the first two features. We could avoid this by using a two-dim dataset
83+
X = iris.data[:, :2]
4684
y = iris.target
4785

48-
h = .02 # step size in the mesh
49-
5086
# we create an instance of SVM and fit out data. We do not scale our
5187
# data since we want to plot the support vectors
5288
C = 1.0 # SVM regularization parameter
53-
svc = svm.SVC(kernel='linear', C=C).fit(X, y)
54-
rbf_svc = svm.SVC(kernel='rbf', gamma=0.7, C=C).fit(X, y)
55-
poly_svc = svm.SVC(kernel='poly', degree=3, C=C).fit(X, y)
56-
lin_svc = svm.LinearSVC(C=C).fit(X, y)
57-
58-
# create a mesh to plot in
59-
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
60-
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
61-
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
62-
np.arange(y_min, y_max, h))
89+
models = (svm.SVC(kernel='linear', C=C),
90+
svm.LinearSVC(C=C),
91+
svm.SVC(kernel='rbf', gamma=0.7, C=C),
92+
svm.SVC(kernel='poly', degree=3, C=C))
93+
models = (clf.fit(X, y) for clf in models)
6394

6495
# title for the plots
65-
titles = ['SVC with linear kernel',
96+
titles = ('SVC with linear kernel',
6697
'LinearSVC (linear kernel)',
6798
'SVC with RBF kernel',
68-
'SVC with polynomial (degree 3) kernel']
69-
70-
71-
for i, clf in enumerate((svc, lin_svc, rbf_svc, poly_svc)):
72-
# Plot the decision boundary. For that, we will assign a color to each
73-
# point in the mesh [x_min, x_max]x[y_min, y_max].
74-
plt.subplot(2, 2, i + 1)
75-
plt.subplots_adjust(wspace=0.4, hspace=0.4)
76-
77-
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
78-
79-
# Put the result into a color plot
80-
Z = Z.reshape(xx.shape)
81-
plt.contourf(xx, yy, Z, cmap=plt.cm.coolwarm, alpha=0.8)
82-
83-
# Plot also the training points
84-
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.coolwarm)
85-
plt.xlabel('Sepal length')
86-
plt.ylabel('Sepal width')
87-
plt.xlim(xx.min(), xx.max())
88-
plt.ylim(yy.min(), yy.max())
89-
plt.xticks(())
90-
plt.yticks(())
91-
plt.title(titles[i])
99+
'SVC with polynomial (degree 3) kernel')
100+
101+
# Set-up 2x2 grid for plotting.
102+
fig, sub = plt.subplots(2, 2)
103+
plt.subplots_adjust(wspace=0.4, hspace=0.4)
104+
105+
X0, X1 = X[:, 0], X[:, 1]
106+
xx, yy = make_meshgrid(X0, X1)
107+
108+
for clf, title, ax in zip(models, titles, sub.flatten()):
109+
plot_contours(ax, clf, xx, yy,
110+
cmap=plt.cm.coolwarm, alpha=0.8)
111+
ax.scatter(X0, X1, c=y, cmap=plt.cm.coolwarm, s=20, edgecolors='k')
112+
ax.set_xlim(xx.min(), xx.max())
113+
ax.set_ylim(yy.min(), yy.max())
114+
ax.set_xlabel('Sepal length')
115+
ax.set_ylabel('Sepal width')
116+
ax.set_xticks(())
117+
ax.set_yticks(())
118+
ax.set_title(title)
92119

93120
plt.show()

dev/_downloads/scikit-learn-docs.pdf

42.5 KB
Binary file not shown.
-170 Bytes
-170 Bytes
-163 Bytes
-163 Bytes
157 Bytes

0 commit comments

Comments
 (0)