Skip to content

Commit cc23663

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 24d4b2c2f3bae149407db57182643405426f275c
1 parent 37a4fa8 commit cc23663

File tree

1,086 files changed

+3389
-3346
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,086 files changed

+3389
-3346
lines changed
377 Bytes
Binary file not shown.
360 Bytes
Binary file not shown.

dev/_downloads/plot_mlp_training_curves.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\nimport matplotlib.pyplot as plt\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn import datasets\n\n# different learning rate schedules and momentum parameters\nparams = [{'solver': 'sgd', 'learning_rate': 'constant', 'momentum': 0,\n 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'constant', 'momentum': .9,\n 'nesterovs_momentum': False, 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'constant', 'momentum': .9,\n 'nesterovs_momentum': True, 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'invscaling', 'momentum': 0,\n 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'invscaling', 'momentum': .9,\n 'nesterovs_momentum': True, 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'invscaling', 'momentum': .9,\n 'nesterovs_momentum': False, 'learning_rate_init': 0.2},\n {'solver': 'adam', 'learning_rate_init': 0.01}]\n\nlabels = [\"constant learning-rate\", \"constant with momentum\",\n \"constant with Nesterov's momentum\",\n \"inv-scaling learning-rate\", \"inv-scaling with momentum\",\n \"inv-scaling with Nesterov's momentum\", \"adam\"]\n\nplot_args = [{'c': 'red', 'linestyle': '-'},\n {'c': 'green', 'linestyle': '-'},\n {'c': 'blue', 'linestyle': '-'},\n {'c': 'red', 'linestyle': '--'},\n {'c': 'green', 'linestyle': '--'},\n {'c': 'blue', 'linestyle': '--'},\n {'c': 'black', 'linestyle': '-'}]\n\n\ndef plot_on_dataset(X, y, ax, name):\n # for each dataset, plot learning for each learning strategy\n print(\"\\nlearning on dataset %s\" % name)\n ax.set_title(name)\n X = MinMaxScaler().fit_transform(X)\n mlps = []\n if name == \"digits\":\n # digits is larger but converges fairly quickly\n max_iter = 15\n else:\n max_iter = 400\n\n for label, param in zip(labels, params):\n print(\"training: %s\" % label)\n mlp = MLPClassifier(verbose=0, random_state=0,\n max_iter=max_iter, **param)\n mlp.fit(X, y)\n mlps.append(mlp)\n print(\"Training set score: %f\" % mlp.score(X, y))\n print(\"Training set loss: %f\" % mlp.loss_)\n for mlp, label, args in zip(mlps, labels, plot_args):\n ax.plot(mlp.loss_curve_, label=label, **args)\n\n\nfig, axes = plt.subplots(2, 2, figsize=(15, 10))\n# load / generate some toy datasets\niris = datasets.load_iris()\ndigits = datasets.load_digits()\ndata_sets = [(iris.data, iris.target),\n (digits.data, digits.target),\n datasets.make_circles(noise=0.2, factor=0.5, random_state=1),\n datasets.make_moons(noise=0.3, random_state=0)]\n\nfor ax, data, name in zip(axes.ravel(), data_sets, ['iris', 'digits',\n 'circles', 'moons']):\n plot_on_dataset(*data, ax=ax, name=name)\n\nfig.legend(ax.get_lines(), labels, ncol=3, loc=\"upper center\")\nplt.show()"
29+
"print(__doc__)\n\nimport warnings\n\nimport matplotlib.pyplot as plt\n\nfrom sklearn.neural_network import MLPClassifier\nfrom sklearn.preprocessing import MinMaxScaler\nfrom sklearn import datasets\nfrom sklearn.exceptions import ConvergenceWarning\n\n# different learning rate schedules and momentum parameters\nparams = [{'solver': 'sgd', 'learning_rate': 'constant', 'momentum': 0,\n 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'constant', 'momentum': .9,\n 'nesterovs_momentum': False, 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'constant', 'momentum': .9,\n 'nesterovs_momentum': True, 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'invscaling', 'momentum': 0,\n 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'invscaling', 'momentum': .9,\n 'nesterovs_momentum': True, 'learning_rate_init': 0.2},\n {'solver': 'sgd', 'learning_rate': 'invscaling', 'momentum': .9,\n 'nesterovs_momentum': False, 'learning_rate_init': 0.2},\n {'solver': 'adam', 'learning_rate_init': 0.01}]\n\nlabels = [\"constant learning-rate\", \"constant with momentum\",\n \"constant with Nesterov's momentum\",\n \"inv-scaling learning-rate\", \"inv-scaling with momentum\",\n \"inv-scaling with Nesterov's momentum\", \"adam\"]\n\nplot_args = [{'c': 'red', 'linestyle': '-'},\n {'c': 'green', 'linestyle': '-'},\n {'c': 'blue', 'linestyle': '-'},\n {'c': 'red', 'linestyle': '--'},\n {'c': 'green', 'linestyle': '--'},\n {'c': 'blue', 'linestyle': '--'},\n {'c': 'black', 'linestyle': '-'}]\n\n\ndef plot_on_dataset(X, y, ax, name):\n # for each dataset, plot learning for each learning strategy\n print(\"\\nlearning on dataset %s\" % name)\n ax.set_title(name)\n\n X = MinMaxScaler().fit_transform(X)\n mlps = []\n if name == \"digits\":\n # digits is larger but converges fairly quickly\n max_iter = 15\n else:\n max_iter = 400\n\n for label, param in zip(labels, params):\n print(\"training: %s\" % label)\n mlp = MLPClassifier(verbose=0, random_state=0,\n max_iter=max_iter, **param)\n\n # some parameter combinations will not converge as can be seen on the\n # plots so they are ignored here\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=ConvergenceWarning,\n module=\"sklearn\")\n mlp.fit(X, y)\n\n mlps.append(mlp)\n print(\"Training set score: %f\" % mlp.score(X, y))\n print(\"Training set loss: %f\" % mlp.loss_)\n for mlp, label, args in zip(mlps, labels, plot_args):\n ax.plot(mlp.loss_curve_, label=label, **args)\n\n\nfig, axes = plt.subplots(2, 2, figsize=(15, 10))\n# load / generate some toy datasets\niris = datasets.load_iris()\ndigits = datasets.load_digits()\ndata_sets = [(iris.data, iris.target),\n (digits.data, digits.target),\n datasets.make_circles(noise=0.2, factor=0.5, random_state=1),\n datasets.make_moons(noise=0.3, random_state=0)]\n\nfor ax, data, name in zip(axes.ravel(), data_sets, ['iris', 'digits',\n 'circles', 'moons']):\n plot_on_dataset(*data, ax=ax, name=name)\n\nfig.legend(ax.get_lines(), labels, ncol=3, loc=\"upper center\")\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_mlp_training_curves.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,15 @@
1414
"""
1515

1616
print(__doc__)
17+
18+
import warnings
19+
1720
import matplotlib.pyplot as plt
21+
1822
from sklearn.neural_network import MLPClassifier
1923
from sklearn.preprocessing import MinMaxScaler
2024
from sklearn import datasets
25+
from sklearn.exceptions import ConvergenceWarning
2126

2227
# different learning rate schedules and momentum parameters
2328
params = [{'solver': 'sgd', 'learning_rate': 'constant', 'momentum': 0,
@@ -52,6 +57,7 @@ def plot_on_dataset(X, y, ax, name):
5257
# for each dataset, plot learning for each learning strategy
5358
print("\nlearning on dataset %s" % name)
5459
ax.set_title(name)
60+
5561
X = MinMaxScaler().fit_transform(X)
5662
mlps = []
5763
if name == "digits":
@@ -64,12 +70,19 @@ def plot_on_dataset(X, y, ax, name):
6470
print("training: %s" % label)
6571
mlp = MLPClassifier(verbose=0, random_state=0,
6672
max_iter=max_iter, **param)
67-
mlp.fit(X, y)
73+
74+
# some parameter combinations will not converge as can be seen on the
75+
# plots so they are ignored here
76+
with warnings.catch_warnings():
77+
warnings.filterwarnings("ignore", category=ConvergenceWarning,
78+
module="sklearn")
79+
mlp.fit(X, y)
80+
6881
mlps.append(mlp)
6982
print("Training set score: %f" % mlp.score(X, y))
7083
print("Training set loss: %f" % mlp.loss_)
7184
for mlp, label, args in zip(mlps, labels, plot_args):
72-
ax.plot(mlp.loss_curve_, label=label, **args)
85+
ax.plot(mlp.loss_curve_, label=label, **args)
7386

7487

7588
fig, axes = plt.subplots(2, 2, figsize=(15, 10))

dev/_downloads/scikit-learn-docs.pdf

8.13 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes
-230 Bytes
-230 Bytes
948 Bytes
948 Bytes

0 commit comments

Comments
 (0)