Skip to content

Commit 512a7ce

Browse files
committed
Pushing the docs to dev/ for branch: main, commit d400723a2112f15c5d5b4d40dfac2ed8a19cca5c
1 parent 9434091 commit 512a7ce

File tree

1,308 files changed

+5494
-5194
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,308 files changed

+5494
-5194
lines changed

dev/_downloads/00ae629d652473137a3905a5e08ea815/plot_iris_dtc.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,11 @@
2525
# Display the decision functions of trees trained on all pairs of features.
2626
import numpy as np
2727
import matplotlib.pyplot as plt
28+
29+
from sklearn.datasets import load_iris
2830
from sklearn.tree import DecisionTreeClassifier
31+
from sklearn.inspection import DecisionBoundaryDisplay
32+
2933

3034
# Parameters
3135
n_classes = 3
@@ -42,21 +46,17 @@
4246
clf = DecisionTreeClassifier().fit(X, y)
4347

4448
# Plot the decision boundary
45-
plt.subplot(2, 3, pairidx + 1)
46-
47-
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
48-
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
49-
xx, yy = np.meshgrid(
50-
np.arange(x_min, x_max, plot_step), np.arange(y_min, y_max, plot_step)
51-
)
49+
ax = plt.subplot(2, 3, pairidx + 1)
5250
plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)
53-
54-
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
55-
Z = Z.reshape(xx.shape)
56-
cs = plt.contourf(xx, yy, Z, cmap=plt.cm.RdYlBu)
57-
58-
plt.xlabel(iris.feature_names[pair[0]])
59-
plt.ylabel(iris.feature_names[pair[1]])
51+
DecisionBoundaryDisplay.from_estimator(
52+
clf,
53+
X,
54+
cmap=plt.cm.RdYlBu,
55+
response_method="predict",
56+
ax=ax,
57+
xlabel=iris.feature_names[pair[0]],
58+
ylabel=iris.feature_names[pair[1]],
59+
)
6060

6161
# Plot the training points
6262
for i, color in zip(range(n_classes), plot_colors):

dev/_downloads/036b9372e2e7802453cbb994da7a6786/plot_linearsvc_support_vectors.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import matplotlib.pyplot as plt
1414
from sklearn.datasets import make_blobs
1515
from sklearn.svm import LinearSVC
16+
from sklearn.inspection import DecisionBoundaryDisplay
1617

1718
X, y = make_blobs(n_samples=40, centers=2, random_state=0)
1819

@@ -32,17 +33,12 @@
3233
plt.subplot(1, 2, i + 1)
3334
plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)
3435
ax = plt.gca()
35-
xlim = ax.get_xlim()
36-
ylim = ax.get_ylim()
37-
xx, yy = np.meshgrid(
38-
np.linspace(xlim[0], xlim[1], 50), np.linspace(ylim[0], ylim[1], 50)
39-
)
40-
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
41-
Z = Z.reshape(xx.shape)
42-
plt.contour(
43-
xx,
44-
yy,
45-
Z,
36+
DecisionBoundaryDisplay.from_estimator(
37+
clf,
38+
X,
39+
ax=ax,
40+
grid_resolution=50,
41+
plot_method="contour",
4642
colors="k",
4743
levels=[-1, 0, 1],
4844
alpha=0.5,

dev/_downloads/06ffeb4f0ded6447302acd5a712f8490/plot_nearest_centroid.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import datasets\nfrom sklearn.neighbors import NearestCentroid\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\nh = 0.02 # step size in the mesh\n\n# Create color maps\ncmap_light = ListedColormap([\"orange\", \"cyan\", \"cornflowerblue\"])\ncmap_bold = ListedColormap([\"darkorange\", \"c\", \"darkblue\"])\n\nfor shrinkage in [None, 0.2]:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = NearestCentroid(shrink_threshold=shrinkage)\n clf.fit(X, y)\n y_pred = clf.predict(X)\n print(shrinkage, np.mean(y == y_pred))\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n plt.figure()\n plt.pcolormesh(xx, yy, Z, cmap=cmap_light)\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold, edgecolor=\"k\", s=20)\n plt.title(\"3-Class classification (shrink_threshold=%r)\" % shrinkage)\n plt.axis(\"tight\")\n\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn import datasets\nfrom sklearn.neighbors import NearestCentroid\nfrom sklearn.inspection import DecisionBoundaryDisplay\n\nn_neighbors = 15\n\n# import some data to play with\niris = datasets.load_iris()\n# we only take the first two features. We could avoid this ugly\n# slicing by using a two-dim dataset\nX = iris.data[:, :2]\ny = iris.target\n\n# Create color maps\ncmap_light = ListedColormap([\"orange\", \"cyan\", \"cornflowerblue\"])\ncmap_bold = ListedColormap([\"darkorange\", \"c\", \"darkblue\"])\n\nfor shrinkage in [None, 0.2]:\n # we create an instance of Neighbours Classifier and fit the data.\n clf = NearestCentroid(shrink_threshold=shrinkage)\n clf.fit(X, y)\n y_pred = clf.predict(X)\n print(shrinkage, np.mean(y == y_pred))\n\n _, ax = plt.subplots()\n DecisionBoundaryDisplay.from_estimator(\n clf, X, cmap=cmap_light, ax=ax, response_method=\"predict\"\n )\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold, edgecolor=\"k\", s=20)\n plt.title(\"3-Class classification (shrink_threshold=%r)\" % shrinkage)\n plt.axis(\"tight\")\n\nplt.show()"
3030
]
3131
}
3232
],
Binary file not shown.

dev/_downloads/1160eee327e01cad702c4964e1c69f45/plot_custom_kernel.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import svm, datasets\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data[:, :2] # we only take the first two features. We could\n# avoid this ugly slicing by using a two-dim dataset\nY = iris.target\n\n\ndef my_kernel(X, Y):\n \"\"\"\n We create a custom kernel:\n\n (2 0)\n k(X, Y) = X ( ) Y.T\n (0 1)\n \"\"\"\n M = np.array([[2, 0], [0, 1.0]])\n return np.dot(np.dot(X, M), Y.T)\n\n\nh = 0.02 # step size in the mesh\n\n# we create an instance of SVM and fit out data.\nclf = svm.SVC(kernel=my_kernel)\nclf.fit(X, Y)\n\n# Plot the decision boundary. For that, we will assign a color to each\n# point in the mesh [x_min, x_max]x[y_min, y_max].\nx_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\ny_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\nxx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\nZ = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n\n# Put the result into a color plot\nZ = Z.reshape(xx.shape)\nplt.pcolormesh(xx, yy, Z, cmap=plt.cm.Paired)\n\n# Plot also the training points\nplt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired, edgecolors=\"k\")\nplt.title(\"3-Class classification using Support Vector Machine with custom kernel\")\nplt.axis(\"tight\")\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import svm, datasets\nfrom sklearn.inspection import DecisionBoundaryDisplay\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data[:, :2] # we only take the first two features. We could\n# avoid this ugly slicing by using a two-dim dataset\nY = iris.target\n\n\ndef my_kernel(X, Y):\n \"\"\"\n We create a custom kernel:\n\n (2 0)\n k(X, Y) = X ( ) Y.T\n (0 1)\n \"\"\"\n M = np.array([[2, 0], [0, 1.0]])\n return np.dot(np.dot(X, M), Y.T)\n\n\nh = 0.02 # step size in the mesh\n\n# we create an instance of SVM and fit out data.\nclf = svm.SVC(kernel=my_kernel)\nclf.fit(X, Y)\n\nax = plt.gca()\nDecisionBoundaryDisplay.from_estimator(\n clf,\n X,\n cmap=plt.cm.Paired,\n ax=ax,\n response_method=\"predict\",\n plot_method=\"pcolormesh\",\n shading=\"auto\",\n)\n\n# Plot also the training points\nplt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired, edgecolors=\"k\")\nplt.title(\"3-Class classification using Support Vector Machine with custom kernel\")\nplt.axis(\"tight\")\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/12a392e818ac5fa47dd91461855f3f77/plot_linearsvc_support_vectors.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_blobs\nfrom sklearn.svm import LinearSVC\n\nX, y = make_blobs(n_samples=40, centers=2, random_state=0)\n\nplt.figure(figsize=(10, 5))\nfor i, C in enumerate([1, 100]):\n # \"hinge\" is the standard SVM loss\n clf = LinearSVC(C=C, loss=\"hinge\", random_state=42).fit(X, y)\n # obtain the support vectors through the decision function\n decision_function = clf.decision_function(X)\n # we can also calculate the decision function manually\n # decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]\n # The support vectors are the samples that lie within the margin\n # boundaries, whose size is conventionally constrained to 1\n support_vector_indices = np.where(np.abs(decision_function) <= 1 + 1e-15)[0]\n support_vectors = X[support_vector_indices]\n\n plt.subplot(1, 2, i + 1)\n plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)\n ax = plt.gca()\n xlim = ax.get_xlim()\n ylim = ax.get_ylim()\n xx, yy = np.meshgrid(\n np.linspace(xlim[0], xlim[1], 50), np.linspace(ylim[0], ylim[1], 50)\n )\n Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n plt.contour(\n xx,\n yy,\n Z,\n colors=\"k\",\n levels=[-1, 0, 1],\n alpha=0.5,\n linestyles=[\"--\", \"-\", \"--\"],\n )\n plt.scatter(\n support_vectors[:, 0],\n support_vectors[:, 1],\n s=100,\n linewidth=1,\n facecolors=\"none\",\n edgecolors=\"k\",\n )\n plt.title(\"C=\" + str(C))\nplt.tight_layout()\nplt.show()"
29+
"import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_blobs\nfrom sklearn.svm import LinearSVC\nfrom sklearn.inspection import DecisionBoundaryDisplay\n\nX, y = make_blobs(n_samples=40, centers=2, random_state=0)\n\nplt.figure(figsize=(10, 5))\nfor i, C in enumerate([1, 100]):\n # \"hinge\" is the standard SVM loss\n clf = LinearSVC(C=C, loss=\"hinge\", random_state=42).fit(X, y)\n # obtain the support vectors through the decision function\n decision_function = clf.decision_function(X)\n # we can also calculate the decision function manually\n # decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]\n # The support vectors are the samples that lie within the margin\n # boundaries, whose size is conventionally constrained to 1\n support_vector_indices = np.where(np.abs(decision_function) <= 1 + 1e-15)[0]\n support_vectors = X[support_vector_indices]\n\n plt.subplot(1, 2, i + 1)\n plt.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)\n ax = plt.gca()\n DecisionBoundaryDisplay.from_estimator(\n clf,\n X,\n ax=ax,\n grid_resolution=50,\n plot_method=\"contour\",\n colors=\"k\",\n levels=[-1, 0, 1],\n alpha=0.5,\n linestyles=[\"--\", \"-\", \"--\"],\n )\n plt.scatter(\n support_vectors[:, 0],\n support_vectors[:, 1],\n s=100,\n linewidth=1,\n facecolors=\"none\",\n edgecolors=\"k\",\n )\n plt.title(\"C=\" + str(C))\nplt.tight_layout()\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/12b6dbb270865986bd1c9bbf7ce24cb0/plot_voting_decision_regions.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -25,14 +25,14 @@
2525

2626
from itertools import product
2727

28-
import numpy as np
2928
import matplotlib.pyplot as plt
3029

3130
from sklearn import datasets
3231
from sklearn.tree import DecisionTreeClassifier
3332
from sklearn.neighbors import KNeighborsClassifier
3433
from sklearn.svm import SVC
3534
from sklearn.ensemble import VotingClassifier
35+
from sklearn.inspection import DecisionBoundaryDisplay
3636

3737
# Loading some example data
3838
iris = datasets.load_iris()
@@ -55,22 +55,15 @@
5555
eclf.fit(X, y)
5656

5757
# Plotting decision regions
58-
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
59-
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
60-
xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1), np.arange(y_min, y_max, 0.1))
61-
6258
f, axarr = plt.subplots(2, 2, sharex="col", sharey="row", figsize=(10, 8))
63-
6459
for idx, clf, tt in zip(
6560
product([0, 1], [0, 1]),
6661
[clf1, clf2, clf3, eclf],
6762
["Decision Tree (depth=4)", "KNN (k=7)", "Kernel SVM", "Soft Voting"],
6863
):
69-
70-
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
71-
Z = Z.reshape(xx.shape)
72-
73-
axarr[idx[0], idx[1]].contourf(xx, yy, Z, alpha=0.4)
64+
DecisionBoundaryDisplay.from_estimator(
65+
clf, X, alpha=0.4, ax=axarr[idx[0], idx[1]], response_method="predict"
66+
)
7467
axarr[idx[0], idx[1]].scatter(X[:, 0], X[:, 1], c=y, s=20, edgecolor="k")
7568
axarr[idx[0], idx[1]].set_title(tt)
7669

dev/_downloads/1ee82dc6471486cb5b088fc473cd945b/plot_nearest_centroid.py

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from matplotlib.colors import ListedColormap
1414
from sklearn import datasets
1515
from sklearn.neighbors import NearestCentroid
16+
from sklearn.inspection import DecisionBoundaryDisplay
1617

1718
n_neighbors = 15
1819

@@ -23,8 +24,6 @@
2324
X = iris.data[:, :2]
2425
y = iris.target
2526

26-
h = 0.02 # step size in the mesh
27-
2827
# Create color maps
2928
cmap_light = ListedColormap(["orange", "cyan", "cornflowerblue"])
3029
cmap_bold = ListedColormap(["darkorange", "c", "darkblue"])
@@ -35,17 +34,11 @@
3534
clf.fit(X, y)
3635
y_pred = clf.predict(X)
3736
print(shrinkage, np.mean(y == y_pred))
38-
# Plot the decision boundary. For that, we will assign a color to each
39-
# point in the mesh [x_min, x_max]x[y_min, y_max].
40-
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
41-
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
42-
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
43-
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
44-
45-
# Put the result into a color plot
46-
Z = Z.reshape(xx.shape)
47-
plt.figure()
48-
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
37+
38+
_, ax = plt.subplots()
39+
DecisionBoundaryDisplay.from_estimator(
40+
clf, X, cmap=cmap_light, ax=ax, response_method="predict"
41+
)
4942

5043
# Plot also the training points
5144
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold, edgecolor="k", s=20)

dev/_downloads/2da0534ab0e0c8241033bcc2d912e419/plot_classifier_comparison.py

Lines changed: 10 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,7 @@
4040
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
4141
from sklearn.naive_bayes import GaussianNB
4242
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
43-
44-
h = 0.02 # step size in the mesh
43+
from sklearn.inspection import DecisionBoundaryDisplay
4544

4645
names = [
4746
"Nearest Neighbors",
@@ -95,7 +94,6 @@
9594

9695
x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5
9796
y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5
98-
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
9997

10098
# just plot the dataset first
10199
cm = plt.cm.RdBu
@@ -109,8 +107,8 @@
109107
ax.scatter(
110108
X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6, edgecolors="k"
111109
)
112-
ax.set_xlim(xx.min(), xx.max())
113-
ax.set_ylim(yy.min(), yy.max())
110+
ax.set_xlim(x_min, x_max)
111+
ax.set_ylim(y_min, y_max)
114112
ax.set_xticks(())
115113
ax.set_yticks(())
116114
i += 1
@@ -120,17 +118,9 @@
120118
ax = plt.subplot(len(datasets), len(classifiers) + 1, i)
121119
clf.fit(X_train, y_train)
122120
score = clf.score(X_test, y_test)
123-
124-
# Plot the decision boundary. For that, we will assign a color to each
125-
# point in the mesh [x_min, x_max]x[y_min, y_max].
126-
if hasattr(clf, "decision_function"):
127-
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
128-
else:
129-
Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
130-
131-
# Put the result into a color plot
132-
Z = Z.reshape(xx.shape)
133-
ax.contourf(xx, yy, Z, cmap=cm, alpha=0.8)
121+
DecisionBoundaryDisplay.from_estimator(
122+
clf, X, cmap=cm, alpha=0.8, ax=ax, eps=0.5
123+
)
134124

135125
# Plot the training points
136126
ax.scatter(
@@ -146,15 +136,15 @@
146136
alpha=0.6,
147137
)
148138

149-
ax.set_xlim(xx.min(), xx.max())
150-
ax.set_ylim(yy.min(), yy.max())
139+
ax.set_xlim(x_min, x_max)
140+
ax.set_ylim(y_min, y_max)
151141
ax.set_xticks(())
152142
ax.set_yticks(())
153143
if ds_cnt == 0:
154144
ax.set_title(name)
155145
ax.text(
156-
xx.max() - 0.3,
157-
yy.min() + 0.3,
146+
x_max - 0.3,
147+
y_min + 0.3,
158148
("%.2f" % score).lstrip("0"),
159149
size=15,
160150
horizontalalignment="right",

dev/_downloads/31b78d45b09682c72f57e77cf94a939e/plot_custom_kernel.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import numpy as np
1212
import matplotlib.pyplot as plt
1313
from sklearn import svm, datasets
14+
from sklearn.inspection import DecisionBoundaryDisplay
1415

1516
# import some data to play with
1617
iris = datasets.load_iris()
@@ -37,16 +38,16 @@ def my_kernel(X, Y):
3738
clf = svm.SVC(kernel=my_kernel)
3839
clf.fit(X, Y)
3940

40-
# Plot the decision boundary. For that, we will assign a color to each
41-
# point in the mesh [x_min, x_max]x[y_min, y_max].
42-
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
43-
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
44-
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
45-
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
46-
47-
# Put the result into a color plot
48-
Z = Z.reshape(xx.shape)
49-
plt.pcolormesh(xx, yy, Z, cmap=plt.cm.Paired)
41+
ax = plt.gca()
42+
DecisionBoundaryDisplay.from_estimator(
43+
clf,
44+
X,
45+
cmap=plt.cm.Paired,
46+
ax=ax,
47+
response_method="predict",
48+
plot_method="pcolormesh",
49+
shading="auto",
50+
)
5051

5152
# Plot also the training points
5253
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired, edgecolors="k")

0 commit comments

Comments
 (0)