Skip to content

Commit 8f26d8e

Browse files
committed
Pushing the docs to dev/ for branch: master, commit f4cd388569dca52e0af7b4a0d83cd87072be6c76
1 parent d1ccfd7 commit 8f26d8e

File tree

1,213 files changed

+3664
-3662
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,213 files changed

+3664
-3662
lines changed
Binary file not shown.

dev/_downloads/7ce809adad0d67b96c1df3b7fcd74567/plot_mlp_alpha.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n\n# Author: Issam H. Laradji\n# License: BSD 3 clause\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.datasets import make_moons, make_circles, make_classification\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.pipeline import make_pipeline\n\nh = .02 # step size in the mesh\n\nalphas = np.logspace(-5, 3, 5)\nnames = ['alpha ' + str(i) for i in alphas]\n\nclassifiers = []\nfor i in alphas:\n classifiers.append(make_pipeline(\n StandardScaler(),\n MLPClassifier(solver='lbfgs', alpha=i,\n random_state=1, max_iter=2000,\n early_stopping=True,\n hidden_layer_sizes=[100, 100])\n ))\n\nX, y = make_classification(n_features=2, n_redundant=0, n_informative=2,\n random_state=0, n_clusters_per_class=1)\nrng = np.random.RandomState(2)\nX += 2 * rng.uniform(size=X.shape)\nlinearly_separable = (X, y)\n\ndatasets = [make_moons(noise=0.3, random_state=0),\n make_circles(noise=0.2, factor=0.5, random_state=1),\n linearly_separable]\n\nfigure = plt.figure(figsize=(17, 9))\ni = 1\n# iterate over datasets\nfor X, y in datasets:\n # preprocess dataset, split into training and test part\n X = StandardScaler().fit_transform(X)\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.4)\n\n x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5\n y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n\n # just plot the dataset first\n cm = plt.cm.RdBu\n cm_bright = ListedColormap(['#FF0000', '#0000FF'])\n ax = plt.subplot(len(datasets), len(classifiers) + 1, i)\n # Plot the training points\n ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)\n # and testing points\n ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xticks(())\n ax.set_yticks(())\n i += 1\n\n # iterate over classifiers\n for name, clf in zip(names, classifiers):\n ax = plt.subplot(len(datasets), len(classifiers) + 1, i)\n clf.fit(X_train, y_train)\n score = clf.score(X_test, y_test)\n\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max]x[y_min, y_max].\n if hasattr(clf, \"decision_function\"):\n Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n else:\n Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)\n\n # Plot also the training points\n ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright,\n edgecolors='black', s=25)\n # and testing points\n ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright,\n alpha=0.6, edgecolors='black', s=25)\n\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xticks(())\n ax.set_yticks(())\n ax.set_title(name)\n ax.text(xx.max() - .3, yy.min() + .3, ('%.2f' % score).lstrip('0'),\n size=15, horizontalalignment='right')\n i += 1\n\nfigure.subplots_adjust(left=.02, right=.98)\nplt.show()"
29+
"print(__doc__)\n\n\n# Author: Issam H. Laradji\n# License: BSD 3 clause\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\nfrom matplotlib.colors import ListedColormap\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.datasets import make_moons, make_circles, make_classification\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.pipeline import make_pipeline\n\nh = .02 # step size in the mesh\n\nalphas = np.logspace(-1, 1, 5)\n\nclassifiers = []\nnames = []\nfor alpha in alphas:\n classifiers.append(make_pipeline(\n StandardScaler(),\n MLPClassifier(\n solver='lbfgs', alpha=alpha, random_state=1, max_iter=2000,\n early_stopping=True, hidden_layer_sizes=[100, 100],\n )\n ))\n names.append(f\"alpha {alpha:.2f}\")\n\nX, y = make_classification(n_features=2, n_redundant=0, n_informative=2,\n random_state=0, n_clusters_per_class=1)\nrng = np.random.RandomState(2)\nX += 2 * rng.uniform(size=X.shape)\nlinearly_separable = (X, y)\n\ndatasets = [make_moons(noise=0.3, random_state=0),\n make_circles(noise=0.2, factor=0.5, random_state=1),\n linearly_separable]\n\nfigure = plt.figure(figsize=(17, 9))\ni = 1\n# iterate over datasets\nfor X, y in datasets:\n # split into training and test part\n X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.4)\n\n x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5\n y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n\n # just plot the dataset first\n cm = plt.cm.RdBu\n cm_bright = ListedColormap(['#FF0000', '#0000FF'])\n ax = plt.subplot(len(datasets), len(classifiers) + 1, i)\n # Plot the training points\n ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)\n # and testing points\n ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xticks(())\n ax.set_yticks(())\n i += 1\n\n # iterate over classifiers\n for name, clf in zip(names, classifiers):\n ax = plt.subplot(len(datasets), len(classifiers) + 1, i)\n clf.fit(X_train, y_train)\n score = clf.score(X_test, y_test)\n\n # Plot the decision boundary. For that, we will assign a color to each\n # point in the mesh [x_min, x_max] x [y_min, y_max].\n if hasattr(clf, \"decision_function\"):\n Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\n else:\n Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]\n\n # Put the result into a color plot\n Z = Z.reshape(xx.shape)\n ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)\n\n # Plot also the training points\n ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright,\n edgecolors='black', s=25)\n # and testing points\n ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright,\n alpha=0.6, edgecolors='black', s=25)\n\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xticks(())\n ax.set_yticks(())\n ax.set_title(name)\n ax.text(xx.max() - .3, yy.min() + .3, ('%.2f' % score).lstrip('0'),\n size=15, horizontalalignment='right')\n i += 1\n\nfigure.subplots_adjust(left=.02, right=.98)\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/8af613cc7180fed274715982abcd696d/plot_mlp_alpha.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -32,18 +32,19 @@
3232

3333
h = .02 # step size in the mesh
3434

35-
alphas = np.logspace(-5, 3, 5)
36-
names = ['alpha ' + str(i) for i in alphas]
35+
alphas = np.logspace(-1, 1, 5)
3736

3837
classifiers = []
39-
for i in alphas:
38+
names = []
39+
for alpha in alphas:
4040
classifiers.append(make_pipeline(
41-
StandardScaler(),
42-
MLPClassifier(solver='lbfgs', alpha=i,
43-
random_state=1, max_iter=2000,
44-
early_stopping=True,
45-
hidden_layer_sizes=[100, 100])
46-
))
41+
StandardScaler(),
42+
MLPClassifier(
43+
solver='lbfgs', alpha=alpha, random_state=1, max_iter=2000,
44+
early_stopping=True, hidden_layer_sizes=[100, 100],
45+
)
46+
))
47+
names.append(f"alpha {alpha:.2f}")
4748

4849
X, y = make_classification(n_features=2, n_redundant=0, n_informative=2,
4950
random_state=0, n_clusters_per_class=1)
@@ -59,8 +60,7 @@
5960
i = 1
6061
# iterate over datasets
6162
for X, y in datasets:
62-
# preprocess dataset, split into training and test part
63-
X = StandardScaler().fit_transform(X)
63+
# split into training and test part
6464
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.4)
6565

6666
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
@@ -89,7 +89,7 @@
8989
score = clf.score(X_test, y_test)
9090

9191
# Plot the decision boundary. For that, we will assign a color to each
92-
# point in the mesh [x_min, x_max]x[y_min, y_max].
92+
# point in the mesh [x_min, x_max] x [y_min, y_max].
9393
if hasattr(clf, "decision_function"):
9494
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
9595
else:
Binary file not shown.

dev/_downloads/scikit-learn-docs.pdf

-36.5 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes
-408 Bytes
-408 Bytes
-176 Bytes
-176 Bytes

0 commit comments

Comments
 (0)