Skip to content

Commit c972cf3

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 8611b9a9af10b46cbf68bb7fc71a44a49ed6ec1f
1 parent 92f1839 commit c972cf3

File tree

1,199 files changed

+3664
-3658
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,199 files changed

+3664
-3658
lines changed

dev/_downloads/036b9372e2e7802453cbb994da7a6786/plot_linearsvc_support_vectors.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,10 @@
2424
decision_function = clf.decision_function(X)
2525
# we can also calculate the decision function manually
2626
# decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]
27-
support_vector_indices = np.where((2 * y - 1) * decision_function <= 1)[0]
27+
# The support vectors are the samples that lie within the margin
28+
# boundaries, whose size is conventionally constrained to 1
29+
support_vector_indices = np.where(
30+
np.abs(decision_function) <= 1 + 1e-15)[0]
2831
support_vectors = X[support_vector_indices]
2932

3033
plt.subplot(1, 2, i + 1)

dev/_downloads/12a392e818ac5fa47dd91461855f3f77/plot_linearsvc_support_vectors.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_blobs\nfrom sklearn.svm import LinearSVC\n\nX, y = make_blobs(n_samples=40, centers=2, random_state=0)\n\nplt.figure(figsize=(10, 5))\nfor i, C in enumerate([1, 100]):\n # \"hinge\" is the standard SVM loss\n clf = LinearSVC(C=C, loss=\"hinge\", random_state=42).fit(X, y)\n # obtain the support vectors through the decision function\n decision_function = clf.decision_function(X)\n # we can also calculate the decision function manually\n # decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]\n support_vector_indices = np.where((2 * y - 1) * decision_function <= 1)[0]\n support_vectors = X[support_vector_indices]\n\n plt.subplot(1, 2, i + 1)\n plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)\n ax = plt.gca()\n xlim = ax.get_xlim()\n ylim = ax.get_ylim()\n xx, yy = np.meshgrid(np.linspace(xlim[0], xlim[1], 50),\n np.linspace(ylim[0], ylim[1], 50))\n Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n plt.contour(xx, yy, Z, colors='k', levels=[-1, 0, 1], alpha=0.5,\n linestyles=['--', '-', '--'])\n plt.scatter(support_vectors[:, 0], support_vectors[:, 1], s=100,\n linewidth=1, facecolors='none', edgecolors='k')\n plt.title(\"C=\" + str(C))\nplt.tight_layout()\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_blobs\nfrom sklearn.svm import LinearSVC\n\nX, y = make_blobs(n_samples=40, centers=2, random_state=0)\n\nplt.figure(figsize=(10, 5))\nfor i, C in enumerate([1, 100]):\n # \"hinge\" is the standard SVM loss\n clf = LinearSVC(C=C, loss=\"hinge\", random_state=42).fit(X, y)\n # obtain the support vectors through the decision function\n decision_function = clf.decision_function(X)\n # we can also calculate the decision function manually\n # decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]\n # The support vectors are the samples that lie within the margin\n # boundaries, whose size is conventionally constrained to 1\n support_vector_indices = np.where(\n np.abs(decision_function) <= 1 + 1e-15)[0]\n support_vectors = X[support_vector_indices]\n\n plt.subplot(1, 2, i + 1)\n plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)\n ax = plt.gca()\n xlim = ax.get_xlim()\n ylim = ax.get_ylim()\n xx, yy = np.meshgrid(np.linspace(xlim[0], xlim[1], 50),\n np.linspace(ylim[0], ylim[1], 50))\n Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n plt.contour(xx, yy, Z, colors='k', levels=[-1, 0, 1], alpha=0.5,\n linestyles=['--', '-', '--'])\n plt.scatter(support_vectors[:, 0], support_vectors[:, 1], s=100,\n linewidth=1, facecolors='none', edgecolors='k')\n plt.title(\"C=\" + str(C))\nplt.tight_layout()\nplt.show()"
3030
]
3131
}
3232
],
Binary file not shown.
Binary file not shown.

dev/_downloads/scikit-learn-docs.pdf

-16.1 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes

0 commit comments

Comments
 (0)