Skip to content

Commit 2a2ef53

Browse files
committed
Pushing the docs to dev/ for branch: master, commit df7942af77fbe98c5436035cb088893964a74026
1 parent b96b240 commit 2a2ef53

File tree

1,110 files changed

+3543
-3021
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,110 files changed

+3543
-3021
lines changed
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
"""
2+
=====================================
3+
Plot the support vectors in LinearSVC
4+
=====================================
5+
6+
Unlike SVC (based on LIBSVM), LinearSVC (based on LIBLINEAR) does not provide
7+
the support vectors. This example demonstrates how to obtain the support
8+
vectors in LinearSVC.
9+
10+
"""
11+
12+
import numpy as np
13+
import matplotlib.pyplot as plt
14+
from sklearn.datasets import make_blobs
15+
from sklearn.svm import LinearSVC
16+
17+
X, y = make_blobs(n_samples=40, centers=2, random_state=0)
18+
19+
plt.figure(figsize=(10, 5))
20+
for i, C in enumerate([1, 100]):
21+
# "hinge" is the standard SVM loss
22+
clf = LinearSVC(C=C, loss="hinge", random_state=42).fit(X, y)
23+
# obtain the support vectors through the decision function
24+
decision_function = clf.decision_function(X)
25+
# we can also calculate the decision function manually
26+
# decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]
27+
support_vector_indices = np.where((2 * y - 1) * decision_function <= 1)[0]
28+
support_vectors = X[support_vector_indices]
29+
30+
plt.subplot(1, 2, i + 1)
31+
plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)
32+
ax = plt.gca()
33+
xlim = ax.get_xlim()
34+
ylim = ax.get_ylim()
35+
xx, yy = np.meshgrid(np.linspace(xlim[0], xlim[1], 50),
36+
np.linspace(ylim[0], ylim[1], 50))
37+
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
38+
Z = Z.reshape(xx.shape)
39+
plt.contour(xx, yy, Z, colors='k', levels=[-1, 0, 1], alpha=0.5,
40+
linestyles=['--', '-', '--'])
41+
plt.scatter(support_vectors[:, 0], support_vectors[:, 1], s=100,
42+
linewidth=1, facecolors='none', edgecolors='k')
43+
plt.title("C=" + str(C))
44+
plt.tight_layout()
45+
plt.show()
Binary file not shown.
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": null,
6+
"metadata": {
7+
"collapsed": false
8+
},
9+
"outputs": [],
10+
"source": [
11+
"%matplotlib inline"
12+
]
13+
},
14+
{
15+
"cell_type": "markdown",
16+
"metadata": {},
17+
"source": [
18+
"\n# Plot the support vectors in LinearSVC\n\n\nUnlike SVC (based on LIBSVM), LinearSVC (based on LIBLINEAR) does not provide\nthe support vectors. This example demonstrates how to obtain the support\nvectors in LinearSVC.\n\n\n"
19+
]
20+
},
21+
{
22+
"cell_type": "code",
23+
"execution_count": null,
24+
"metadata": {
25+
"collapsed": false
26+
},
27+
"outputs": [],
28+
"source": [
29+
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_blobs\nfrom sklearn.svm import LinearSVC\n\nX, y = make_blobs(n_samples=40, centers=2, random_state=0)\n\nplt.figure(figsize=(10, 5))\nfor i, C in enumerate([1, 100]):\n # \"hinge\" is the standard SVM loss\n clf = LinearSVC(C=C, loss=\"hinge\", random_state=42).fit(X, y)\n # obtain the support vectors through the decision function\n decision_function = clf.decision_function(X)\n # we can also calculate the decision function manually\n # decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]\n support_vector_indices = np.where((2 * y - 1) * decision_function <= 1)[0]\n support_vectors = X[support_vector_indices]\n\n plt.subplot(1, 2, i + 1)\n plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)\n ax = plt.gca()\n xlim = ax.get_xlim()\n ylim = ax.get_ylim()\n xx, yy = np.meshgrid(np.linspace(xlim[0], xlim[1], 50),\n np.linspace(ylim[0], ylim[1], 50))\n Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n plt.contour(xx, yy, Z, colors='k', levels=[-1, 0, 1], alpha=0.5,\n linestyles=['--', '-', '--'])\n plt.scatter(support_vectors[:, 0], support_vectors[:, 1], s=100,\n linewidth=1, facecolors='none', edgecolors='k')\n plt.title(\"C=\" + str(C))\nplt.tight_layout()\nplt.show()"
30+
]
31+
}
32+
],
33+
"metadata": {
34+
"kernelspec": {
35+
"display_name": "Python 3",
36+
"language": "python",
37+
"name": "python3"
38+
},
39+
"language_info": {
40+
"codemirror_mode": {
41+
"name": "ipython",
42+
"version": 3
43+
},
44+
"file_extension": ".py",
45+
"mimetype": "text/x-python",
46+
"name": "python",
47+
"nbconvert_exporter": "python",
48+
"pygments_lexer": "ipython3",
49+
"version": "3.7.3"
50+
}
51+
},
52+
"nbformat": 4,
53+
"nbformat_minor": 0
54+
}
Binary file not shown.

dev/_downloads/scikit-learn-docs.pdf

46 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes

0 commit comments

Comments
 (0)