Skip to content

Commit 74c8c50

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 19e2772cac4a58de36e44a3edd89a9010e18765b
1 parent 61b1c1d commit 74c8c50

File tree

1,100 files changed

+4363
-4094
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,100 files changed

+4363
-4094
lines changed
-10 Bytes
Binary file not shown.
-10 Bytes
Binary file not shown.

dev/_downloads/plot_roc_crossval.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\nimport numpy as np\nfrom scipy import interp\nimport matplotlib.pyplot as plt\nfrom itertools import cycle\n\nfrom sklearn import svm, datasets\nfrom sklearn.metrics import roc_curve, auc\nfrom sklearn.model_selection import StratifiedKFold\n\n# #############################################################################\n# Data IO and generation\n\n# Import some data to play with\niris = datasets.load_iris()\nX = iris.data\ny = iris.target\nX, y = X[y != 2], y[y != 2]\nn_samples, n_features = X.shape\n\n# Add noisy features\nrandom_state = np.random.RandomState(0)\nX = np.c_[X, random_state.randn(n_samples, 200 * n_features)]\n\n# #############################################################################\n# Classification and ROC analysis\n\n# Run classifier with cross-validation and plot ROC curves\ncv = StratifiedKFold(n_splits=6)\nclassifier = svm.SVC(kernel='linear', probability=True,\n random_state=random_state)\n\ntprs = []\naucs = []\nmean_fpr = np.linspace(0, 1, 100)\n\ni = 0\nfor train, test in cv.split(X, y):\n probas_ = classifier.fit(X[train], y[train]).predict_proba(X[test])\n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = roc_curve(y[test], probas_[:, 1])\n tprs.append(interp(mean_fpr, fpr, tpr))\n tprs[-1][0] = 0.0\n roc_auc = auc(fpr, tpr)\n aucs.append(roc_auc)\n plt.plot(fpr, tpr, lw=1, alpha=0.3,\n label='ROC fold %d (AUC = %0.2f)' % (i, roc_auc))\n\n i += 1\nplt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',\n label='Luck', alpha=.8)\n\nmean_tpr = np.mean(tprs, axis=0)\nmean_tpr[-1] = 1.0\nmean_auc = auc(mean_fpr, mean_tpr)\nstd_auc = np.std(aucs)\nplt.plot(mean_fpr, mean_tpr, color='b',\n label=r'Mean ROC (AUC = %0.2f $\\pm$ %0.2f)' % (mean_auc, std_auc),\n lw=2, alpha=.8)\n\nstd_tpr = np.std(tprs, axis=0)\ntprs_upper = np.minimum(mean_tpr + std_tpr, 1)\ntprs_lower = np.maximum(mean_tpr - std_tpr, 0)\nplt.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2,\n label=r'$\\pm$ 1 std. dev.')\n\nplt.xlim([-0.05, 1.05])\nplt.ylim([-0.05, 1.05])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Receiver operating characteristic example')\nplt.legend(loc=\"lower right\")\nplt.show()"
29+
"print(__doc__)\n\nimport numpy as np\nfrom scipy import interp\nimport matplotlib.pyplot as plt\nfrom itertools import cycle\n\nfrom sklearn import svm, datasets\nfrom sklearn.metrics import roc_curve, auc\nfrom sklearn.model_selection import StratifiedKFold\n\n# #############################################################################\n# Data IO and generation\n\n# Import some data to play with\niris = datasets.load_iris()\nX = iris.data\ny = iris.target\nX, y = X[y != 2], y[y != 2]\nn_samples, n_features = X.shape\n\n# Add noisy features\nrandom_state = np.random.RandomState(0)\nX = np.c_[X, random_state.randn(n_samples, 200 * n_features)]\n\n# #############################################################################\n# Classification and ROC analysis\n\n# Run classifier with cross-validation and plot ROC curves\ncv = StratifiedKFold(n_splits=6)\nclassifier = svm.SVC(kernel='linear', probability=True,\n random_state=random_state)\n\ntprs = []\naucs = []\nmean_fpr = np.linspace(0, 1, 100)\n\ni = 0\nfor train, test in cv.split(X, y):\n probas_ = classifier.fit(X[train], y[train]).predict_proba(X[test])\n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = roc_curve(y[test], probas_[:, 1])\n tprs.append(interp(mean_fpr, fpr, tpr))\n tprs[-1][0] = 0.0\n roc_auc = auc(fpr, tpr)\n aucs.append(roc_auc)\n plt.plot(fpr, tpr, lw=1, alpha=0.3,\n label='ROC fold %d (AUC = %0.2f)' % (i, roc_auc))\n\n i += 1\nplt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',\n label='Chance', alpha=.8)\n\nmean_tpr = np.mean(tprs, axis=0)\nmean_tpr[-1] = 1.0\nmean_auc = auc(mean_fpr, mean_tpr)\nstd_auc = np.std(aucs)\nplt.plot(mean_fpr, mean_tpr, color='b',\n label=r'Mean ROC (AUC = %0.2f $\\pm$ %0.2f)' % (mean_auc, std_auc),\n lw=2, alpha=.8)\n\nstd_tpr = np.std(tprs, axis=0)\ntprs_upper = np.minimum(mean_tpr + std_tpr, 1)\ntprs_lower = np.maximum(mean_tpr - std_tpr, 0)\nplt.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2,\n label=r'$\\pm$ 1 std. dev.')\n\nplt.xlim([-0.05, 1.05])\nplt.ylim([-0.05, 1.05])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Receiver operating characteristic example')\nplt.legend(loc=\"lower right\")\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_roc_crossval.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080

8181
i += 1
8282
plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',
83-
label='Luck', alpha=.8)
83+
label='Chance', alpha=.8)
8484

8585
mean_tpr = np.mean(tprs, axis=0)
8686
mean_tpr[-1] = 1.0

dev/_downloads/plot_transformed_target.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"cell_type": "markdown",
1616
"metadata": {},
1717
"source": [
18-
"\n# Effect of transforming the targets in regression model\n\n\nIn this example, we give an overview of the\n:class:`sklearn.preprocessing.TransformedTargetRegressor`. Two examples\nillustrate the benefit of transforming the targets before learning a linear\nregression model. The first example uses synthetic data while the second\nexample is based on the Boston housing data set.\n\n\n"
18+
"\n# Effect of transforming the targets in regression model\n\n\nIn this example, we give an overview of the\n:class:`sklearn.compose.TransformedTargetRegressor`. Two examples\nillustrate the benefit of transforming the targets before learning a linear\nregression model. The first example uses synthetic data while the second\nexample is based on the Boston housing data set.\n\n\n"
1919
]
2020
},
2121
{
@@ -44,7 +44,7 @@
4444
},
4545
"outputs": [],
4646
"source": [
47-
"from sklearn.datasets import make_regression\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.linear_model import RidgeCV\nfrom sklearn.preprocessing import TransformedTargetRegressor\nfrom sklearn.metrics import median_absolute_error, r2_score"
47+
"from sklearn.datasets import make_regression\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.linear_model import RidgeCV\nfrom sklearn.compose import TransformedTargetRegressor\nfrom sklearn.metrics import median_absolute_error, r2_score"
4848
]
4949
},
5050
{

dev/_downloads/plot_transformed_target.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
======================================================
88
99
In this example, we give an overview of the
10-
:class:`sklearn.preprocessing.TransformedTargetRegressor`. Two examples
10+
:class:`sklearn.compose.TransformedTargetRegressor`. Two examples
1111
illustrate the benefit of transforming the targets before learning a linear
1212
regression model. The first example uses synthetic data while the second
1313
example is based on the Boston housing data set.
@@ -31,7 +31,7 @@
3131
from sklearn.datasets import make_regression
3232
from sklearn.model_selection import train_test_split
3333
from sklearn.linear_model import RidgeCV
34-
from sklearn.preprocessing import TransformedTargetRegressor
34+
from sklearn.compose import TransformedTargetRegressor
3535
from sklearn.metrics import median_absolute_error, r2_score
3636

3737
###############################################################################

dev/_downloads/scikit-learn-docs.pdf

-42 KB
Binary file not shown.

0 commit comments

Comments
 (0)