Skip to content

Commit e054c40

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 0cae688f251e1b21bbc9d6ecca66aa6987fe88a9
1 parent 9c2e80e commit e054c40

File tree

957 files changed

+2935
-2889
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

957 files changed

+2935
-2889
lines changed
510 Bytes
Binary file not shown.
496 Bytes
Binary file not shown.

dev/_downloads/plot_precision_recall.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"execution_count": null,
2525
"cell_type": "code",
2626
"source": [
27-
"print(__doc__)\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom itertools import cycle\n\nfrom sklearn import svm, datasets\nfrom sklearn.metrics import precision_recall_curve\nfrom sklearn.metrics import average_precision_score\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import label_binarize\nfrom sklearn.multiclass import OneVsRestClassifier\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data\ny = iris.target\n\n# setup plot details\ncolors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal'])\nlw = 2\n\n# Binarize the output\ny = label_binarize(y, classes=[0, 1, 2])\nn_classes = y.shape[1]\n\n# Add noisy features\nrandom_state = np.random.RandomState(0)\nn_samples, n_features = X.shape\nX = np.c_[X, random_state.randn(n_samples, 200 * n_features)]\n\n# Split into training and test\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5,\n random_state=random_state)\n\n# Run classifier\nclassifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,\n random_state=random_state))\ny_score = classifier.fit(X_train, y_train).decision_function(X_test)\n\n# Compute Precision-Recall and plot curve\nprecision = dict()\nrecall = dict()\naverage_precision = dict()\nfor i in range(n_classes):\n precision[i], recall[i], _ = precision_recall_curve(y_test[:, i],\n y_score[:, i])\n average_precision[i] = average_precision_score(y_test[:, i], y_score[:, i])\n\n# Compute micro-average ROC curve and ROC area\nprecision[\"micro\"], recall[\"micro\"], _ = precision_recall_curve(y_test.ravel(),\n y_score.ravel())\naverage_precision[\"micro\"] = average_precision_score(y_test, y_score,\n average=\"micro\")\n\n\n# Plot Precision-Recall curve\nplt.clf()\nplt.plot(recall[0], precision[0], lw=lw, color='navy',\n label='Precision-Recall curve')\nplt.xlabel('Recall')\nplt.ylabel('Precision')\nplt.ylim([0.0, 1.05])\nplt.xlim([0.0, 1.0])\nplt.title('Precision-Recall example: AUC={0:0.2f}'.format(average_precision[0]))\nplt.legend(loc=\"lower left\")\nplt.show()\n\n# Plot Precision-Recall curve for each class\nplt.clf()\nplt.plot(recall[\"micro\"], precision[\"micro\"], color='gold', lw=lw,\n label='micro-average Precision-recall curve (area = {0:0.2f})'\n ''.format(average_precision[\"micro\"]))\nfor i, color in zip(range(n_classes), colors):\n plt.plot(recall[i], precision[i], color=color, lw=lw,\n label='Precision-recall curve of class {0} (area = {1:0.2f})'\n ''.format(i, average_precision[i]))\n\nplt.xlim([0.0, 1.0])\nplt.ylim([0.0, 1.05])\nplt.xlabel('Recall')\nplt.ylabel('Precision')\nplt.title('Extension of Precision-Recall curve to multi-class')\nplt.legend(loc=\"lower right\")\nplt.show()"
27+
"print(__doc__)\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom itertools import cycle\n\nfrom sklearn import svm, datasets\nfrom sklearn.metrics import precision_recall_curve\nfrom sklearn.metrics import average_precision_score\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import label_binarize\nfrom sklearn.multiclass import OneVsRestClassifier\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data\ny = iris.target\n\n# setup plot details\ncolors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal'])\nlw = 2\n\n# Binarize the output\ny = label_binarize(y, classes=[0, 1, 2])\nn_classes = y.shape[1]\n\n# Add noisy features\nrandom_state = np.random.RandomState(0)\nn_samples, n_features = X.shape\nX = np.c_[X, random_state.randn(n_samples, 200 * n_features)]\n\n# Split into training and test\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5,\n random_state=random_state)\n\n# Run classifier\nclassifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,\n random_state=random_state))\ny_score = classifier.fit(X_train, y_train).decision_function(X_test)\n\n# Compute Precision-Recall and plot curve\nprecision = dict()\nrecall = dict()\naverage_precision = dict()\nfor i in range(n_classes):\n precision[i], recall[i], _ = precision_recall_curve(y_test[:, i],\n y_score[:, i])\n average_precision[i] = average_precision_score(y_test[:, i], y_score[:, i])\n\n# Compute micro-average ROC curve and ROC area\nprecision[\"micro\"], recall[\"micro\"], _ = precision_recall_curve(y_test.ravel(),\n y_score.ravel())\naverage_precision[\"micro\"] = average_precision_score(y_test, y_score,\n average=\"micro\")\n\n\n# Plot Precision-Recall curve\nplt.clf()\nplt.plot(recall[0], precision[0], lw=lw, color='navy',\n label='Precision-Recall curve')\nplt.xlabel('Recall')\nplt.ylabel('Precision')\nplt.ylim([0.0, 1.05])\nplt.xlim([0.0, 1.0])\nplt.title('Precision-Recall example: AUC={0:0.2f}'.format(average_precision[0]))\nplt.legend(loc=\"lower left\")\nplt.show()\n\n# Plot Precision-Recall curve for each class and iso-f1 curves\nplt.clf()\nf_scores = np.linspace(0.2, 0.8, num=4)\nlines = []\nlabels = []\nfor f_score in f_scores:\n x = np.linspace(0.01, 1)\n y = f_score * x / (2 * x - f_score)\n l, = plt.plot(x[y >= 0], y[y >= 0], color='gray', alpha=0.2)\n plt.annotate('f1={0:0.1f}'.format(f_score), xy=(0.9, y[45] + 0.02))\n\nlines.append(l)\nlabels.append('iso-f1 curves')\nl, = plt.plot(recall[\"micro\"], precision[\"micro\"], color='gold', lw=lw)\nlines.append(l)\nlabels.append('micro-average Precision-recall curve (area = {0:0.2f})'\n ''.format(average_precision[\"micro\"]))\nfor i, color in zip(range(n_classes), colors):\n l, = plt.plot(recall[i], precision[i], color=color, lw=lw)\n lines.append(l)\n labels.append('Precision-recall curve of class {0} (area = {1:0.2f})'\n ''.format(i, average_precision[i]))\n\nfig = plt.gcf()\nfig.set_size_inches(7, 7)\nfig.subplots_adjust(bottom=0.25)\nplt.xlim([0.0, 1.0])\nplt.ylim([0.0, 1.05])\nplt.xlabel('Recall')\nplt.ylabel('Precision')\nplt.title('Extension of Precision-Recall curve to multi-class')\nplt.figlegend(lines, labels, loc='lower center')\nplt.show()"
2828
],
2929
"outputs": [],
3030
"metadata": {

dev/_downloads/plot_precision_recall.py

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -139,20 +139,36 @@
139139
plt.legend(loc="lower left")
140140
plt.show()
141141

142-
# Plot Precision-Recall curve for each class
142+
# Plot Precision-Recall curve for each class and iso-f1 curves
143143
plt.clf()
144-
plt.plot(recall["micro"], precision["micro"], color='gold', lw=lw,
145-
label='micro-average Precision-recall curve (area = {0:0.2f})'
146-
''.format(average_precision["micro"]))
144+
f_scores = np.linspace(0.2, 0.8, num=4)
145+
lines = []
146+
labels = []
147+
for f_score in f_scores:
148+
x = np.linspace(0.01, 1)
149+
y = f_score * x / (2 * x - f_score)
150+
l, = plt.plot(x[y >= 0], y[y >= 0], color='gray', alpha=0.2)
151+
plt.annotate('f1={0:0.1f}'.format(f_score), xy=(0.9, y[45] + 0.02))
152+
153+
lines.append(l)
154+
labels.append('iso-f1 curves')
155+
l, = plt.plot(recall["micro"], precision["micro"], color='gold', lw=lw)
156+
lines.append(l)
157+
labels.append('micro-average Precision-recall curve (area = {0:0.2f})'
158+
''.format(average_precision["micro"]))
147159
for i, color in zip(range(n_classes), colors):
148-
plt.plot(recall[i], precision[i], color=color, lw=lw,
149-
label='Precision-recall curve of class {0} (area = {1:0.2f})'
150-
''.format(i, average_precision[i]))
151-
160+
l, = plt.plot(recall[i], precision[i], color=color, lw=lw)
161+
lines.append(l)
162+
labels.append('Precision-recall curve of class {0} (area = {1:0.2f})'
163+
''.format(i, average_precision[i]))
164+
165+
fig = plt.gcf()
166+
fig.set_size_inches(7, 7)
167+
fig.subplots_adjust(bottom=0.25)
152168
plt.xlim([0.0, 1.0])
153169
plt.ylim([0.0, 1.05])
154170
plt.xlabel('Recall')
155171
plt.ylabel('Precision')
156172
plt.title('Extension of Precision-Recall curve to multi-class')
157-
plt.legend(loc="lower right")
173+
plt.figlegend(lines, labels, loc='lower center')
158174
plt.show()

dev/_downloads/scikit-learn-docs.pdf

15 KB
Binary file not shown.
24 Bytes
24 Bytes
129 Bytes
129 Bytes
-48 Bytes

0 commit comments

Comments
 (0)