Skip to content

Commit 7df9bad

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 0e3bb17e6263a391c8409558634b75f437eb3f27
1 parent d78b002 commit 7df9bad

File tree

1,080 files changed

+3280
-3280
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,080 files changed

+3280
-3280
lines changed
-65 Bytes
Binary file not shown.
-65 Bytes
Binary file not shown.

dev/_downloads/plot_gmm_covariances.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Ron Weiss <[email protected]>, Gael Varoquaux\n# Modified by Thierry Guillemot <[email protected]>\n# License: BSD 3 clause\n\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\n\nimport numpy as np\n\nfrom sklearn import datasets\nfrom sklearn.mixture import GaussianMixture\nfrom sklearn.model_selection import StratifiedKFold\n\nprint(__doc__)\n\ncolors = ['navy', 'turquoise', 'darkorange']\n\n\ndef make_ellipses(gmm, ax):\n for n, color in enumerate(colors):\n if gmm.covariance_type == 'full':\n covariances = gmm.covariances_[n][:2, :2]\n elif gmm.covariance_type == 'tied':\n covariances = gmm.covariances_[:2, :2]\n elif gmm.covariance_type == 'diag':\n covariances = np.diag(gmm.covariances_[n][:2])\n elif gmm.covariance_type == 'spherical':\n covariances = np.eye(gmm.means_.shape[1]) * gmm.covariances_[n]\n v, w = np.linalg.eigh(covariances)\n u = w[0] / np.linalg.norm(w[0])\n angle = np.arctan2(u[1], u[0])\n angle = 180 * angle / np.pi # convert to degrees\n v = 2. * np.sqrt(2.) * np.sqrt(v)\n ell = mpl.patches.Ellipse(gmm.means_[n, :2], v[0], v[1],\n 180 + angle, color=color)\n ell.set_clip_box(ax.bbox)\n ell.set_alpha(0.5)\n ax.add_artist(ell)\n ax.set_aspect('equal', 'datalim')\n\niris = datasets.load_iris()\n\n# Break up the dataset into non-overlapping training (75%) and testing\n# (25%) sets.\nskf = StratifiedKFold(n_splits=4)\n# Only take the first fold.\ntrain_index, test_index = next(iter(skf.split(iris.data, iris.target)))\n\n\nX_train = iris.data[train_index]\ny_train = iris.target[train_index]\nX_test = iris.data[test_index]\ny_test = iris.target[test_index]\n\nn_classes = len(np.unique(y_train))\n\n# Try GMMs using different types of covariances.\nestimators = dict((cov_type, GaussianMixture(n_components=n_classes,\n covariance_type=cov_type, max_iter=20, random_state=0))\n for cov_type in ['spherical', 'diag', 'tied', 'full'])\n\nn_estimators = len(estimators)\n\nplt.figure(figsize=(3 * n_estimators // 2, 6))\nplt.subplots_adjust(bottom=.01, top=0.95, hspace=.15, wspace=.05,\n left=.01, right=.99)\n\n\nfor index, (name, estimator) in enumerate(estimators.items()):\n # Since we have class labels for the training data, we can\n # initialize the GMM parameters in a supervised manner.\n estimator.means_init = np.array([X_train[y_train == i].mean(axis=0)\n for i in range(n_classes)])\n\n # Train the other parameters using the EM algorithm.\n estimator.fit(X_train)\n\n h = plt.subplot(2, n_estimators // 2, index + 1)\n make_ellipses(estimator, h)\n\n for n, color in enumerate(colors):\n data = iris.data[iris.target == n]\n plt.scatter(data[:, 0], data[:, 1], s=0.8, color=color,\n label=iris.target_names[n])\n # Plot the test data with crosses\n for n, color in enumerate(colors):\n data = X_test[y_test == n]\n plt.scatter(data[:, 0], data[:, 1], marker='x', color=color)\n\n y_train_pred = estimator.predict(X_train)\n train_accuracy = np.mean(y_train_pred.ravel() == y_train.ravel()) * 100\n plt.text(0.05, 0.9, 'Train accuracy: %.1f' % train_accuracy,\n transform=h.transAxes)\n\n y_test_pred = estimator.predict(X_test)\n test_accuracy = np.mean(y_test_pred.ravel() == y_test.ravel()) * 100\n plt.text(0.05, 0.8, 'Test accuracy: %.1f' % test_accuracy,\n transform=h.transAxes)\n\n plt.xticks(())\n plt.yticks(())\n plt.title(name)\n\nplt.legend(scatterpoints=1, loc='lower right', prop=dict(size=12))\n\n\nplt.show()"
29+
"# Author: Ron Weiss <[email protected]>, Gael Varoquaux\n# Modified by Thierry Guillemot <[email protected]>\n# License: BSD 3 clause\n\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\n\nimport numpy as np\n\nfrom sklearn import datasets\nfrom sklearn.mixture import GaussianMixture\nfrom sklearn.model_selection import StratifiedKFold\n\nprint(__doc__)\n\ncolors = ['navy', 'turquoise', 'darkorange']\n\n\ndef make_ellipses(gmm, ax):\n for n, color in enumerate(colors):\n if gmm.covariance_type == 'full':\n covariances = gmm.covariances_[n][:2, :2]\n elif gmm.covariance_type == 'tied':\n covariances = gmm.covariances_[:2, :2]\n elif gmm.covariance_type == 'diag':\n covariances = np.diag(gmm.covariances_[n][:2])\n elif gmm.covariance_type == 'spherical':\n covariances = np.eye(gmm.means_.shape[1]) * gmm.covariances_[n]\n v, w = np.linalg.eigh(covariances)\n u = w[0] / np.linalg.norm(w[0])\n angle = np.arctan2(u[1], u[0])\n angle = 180 * angle / np.pi # convert to degrees\n v = 2. * np.sqrt(2.) * np.sqrt(v)\n ell = mpl.patches.Ellipse(gmm.means_[n, :2], v[0], v[1],\n 180 + angle, color=color)\n ell.set_clip_box(ax.bbox)\n ell.set_alpha(0.5)\n ax.add_artist(ell)\n ax.set_aspect('equal', 'datalim')\n\niris = datasets.load_iris()\n\n# Break up the dataset into non-overlapping training (75%) and testing\n# (25%) sets.\nskf = StratifiedKFold(n_splits=4)\n# Only take the first fold.\ntrain_index, test_index = next(iter(skf.split(iris.data, iris.target)))\n\n\nX_train = iris.data[train_index]\ny_train = iris.target[train_index]\nX_test = iris.data[test_index]\ny_test = iris.target[test_index]\n\nn_classes = len(np.unique(y_train))\n\n# Try GMMs using different types of covariances.\nestimators = {cov_type: GaussianMixture(n_components=n_classes,\n covariance_type=cov_type, max_iter=20, random_state=0)\n for cov_type in ['spherical', 'diag', 'tied', 'full']}\n\nn_estimators = len(estimators)\n\nplt.figure(figsize=(3 * n_estimators // 2, 6))\nplt.subplots_adjust(bottom=.01, top=0.95, hspace=.15, wspace=.05,\n left=.01, right=.99)\n\n\nfor index, (name, estimator) in enumerate(estimators.items()):\n # Since we have class labels for the training data, we can\n # initialize the GMM parameters in a supervised manner.\n estimator.means_init = np.array([X_train[y_train == i].mean(axis=0)\n for i in range(n_classes)])\n\n # Train the other parameters using the EM algorithm.\n estimator.fit(X_train)\n\n h = plt.subplot(2, n_estimators // 2, index + 1)\n make_ellipses(estimator, h)\n\n for n, color in enumerate(colors):\n data = iris.data[iris.target == n]\n plt.scatter(data[:, 0], data[:, 1], s=0.8, color=color,\n label=iris.target_names[n])\n # Plot the test data with crosses\n for n, color in enumerate(colors):\n data = X_test[y_test == n]\n plt.scatter(data[:, 0], data[:, 1], marker='x', color=color)\n\n y_train_pred = estimator.predict(X_train)\n train_accuracy = np.mean(y_train_pred.ravel() == y_train.ravel()) * 100\n plt.text(0.05, 0.9, 'Train accuracy: %.1f' % train_accuracy,\n transform=h.transAxes)\n\n y_test_pred = estimator.predict(X_test)\n test_accuracy = np.mean(y_test_pred.ravel() == y_test.ravel()) * 100\n plt.text(0.05, 0.8, 'Test accuracy: %.1f' % test_accuracy,\n transform=h.transAxes)\n\n plt.xticks(())\n plt.yticks(())\n plt.title(name)\n\nplt.legend(scatterpoints=1, loc='lower right', prop=dict(size=12))\n\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gmm_covariances.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,9 @@ def make_ellipses(gmm, ax):
8383
n_classes = len(np.unique(y_train))
8484

8585
# Try GMMs using different types of covariances.
86-
estimators = dict((cov_type, GaussianMixture(n_components=n_classes,
87-
covariance_type=cov_type, max_iter=20, random_state=0))
88-
for cov_type in ['spherical', 'diag', 'tied', 'full'])
86+
estimators = {cov_type: GaussianMixture(n_components=n_classes,
87+
covariance_type=cov_type, max_iter=20, random_state=0)
88+
for cov_type in ['spherical', 'diag', 'tied', 'full']}
8989

9090
n_estimators = len(estimators)
9191

dev/_downloads/plot_gpr_noisy_targets.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Author: Vincent Dubourg <[email protected]>\n# Jake Vanderplas <[email protected]>\n# Jan Hendrik Metzen <[email protected]>s\n# License: BSD 3 clause\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import RBF, ConstantKernel as C\n\nnp.random.seed(1)\n\n\ndef f(x):\n \"\"\"The function to predict.\"\"\"\n return x * np.sin(x)\n\n# ----------------------------------------------------------------------\n# First the noiseless case\nX = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T\n\n# Observations\ny = f(X).ravel()\n\n# Mesh the input space for evaluations of the real function, the prediction and\n# its MSE\nx = np.atleast_2d(np.linspace(0, 10, 1000)).T\n\n# Instantiate a Gaussian Process model\nkernel = C(1.0, (1e-3, 1e3)) * RBF(10, (1e-2, 1e2))\ngp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=9)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=r'$f(x) = x\\,\\sin(x)$')\nplt.plot(X, y, 'r.', markersize=10, label=u'Observations')\nplt.plot(x, y_pred, 'b-', label=u'Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\n# ----------------------------------------------------------------------\n# now the noisy case\nX = np.linspace(0.1, 9.9, 20)\nX = np.atleast_2d(X).T\n\n# Observations and noise\ny = f(X).ravel()\ndy = 0.5 + 1.0 * np.random.random(y.shape)\nnoise = np.random.normal(0, dy)\ny += noise\n\n# Instantiate a Gaussian Process model\ngp = GaussianProcessRegressor(kernel=kernel, alpha=dy ** 2,\n n_restarts_optimizer=10)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=r'$f(x) = x\\,\\sin(x)$')\nplt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')\nplt.plot(x, y_pred, 'b-', label=u'Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\nplt.show()"
29+
"print(__doc__)\n\n# Author: Vincent Dubourg <[email protected]>\n# Jake Vanderplas <[email protected]>\n# Jan Hendrik Metzen <[email protected]>s\n# License: BSD 3 clause\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import RBF, ConstantKernel as C\n\nnp.random.seed(1)\n\n\ndef f(x):\n \"\"\"The function to predict.\"\"\"\n return x * np.sin(x)\n\n# ----------------------------------------------------------------------\n# First the noiseless case\nX = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T\n\n# Observations\ny = f(X).ravel()\n\n# Mesh the input space for evaluations of the real function, the prediction and\n# its MSE\nx = np.atleast_2d(np.linspace(0, 10, 1000)).T\n\n# Instantiate a Gaussian Process model\nkernel = C(1.0, (1e-3, 1e3)) * RBF(10, (1e-2, 1e2))\ngp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=9)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=r'$f(x) = x\\,\\sin(x)$')\nplt.plot(X, y, 'r.', markersize=10, label='Observations')\nplt.plot(x, y_pred, 'b-', label='Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\n# ----------------------------------------------------------------------\n# now the noisy case\nX = np.linspace(0.1, 9.9, 20)\nX = np.atleast_2d(X).T\n\n# Observations and noise\ny = f(X).ravel()\ndy = 0.5 + 1.0 * np.random.random(y.shape)\nnoise = np.random.normal(0, dy)\ny += noise\n\n# Instantiate a Gaussian Process model\ngp = GaussianProcessRegressor(kernel=kernel, alpha=dy ** 2,\n n_restarts_optimizer=10)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=r'$f(x) = x\\,\\sin(x)$')\nplt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label='Observations')\nplt.plot(x, y_pred, 'b-', label='Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gpr_noisy_targets.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,8 @@ def f(x):
6363
# the MSE
6464
plt.figure()
6565
plt.plot(x, f(x), 'r:', label=r'$f(x) = x\,\sin(x)$')
66-
plt.plot(X, y, 'r.', markersize=10, label=u'Observations')
67-
plt.plot(x, y_pred, 'b-', label=u'Prediction')
66+
plt.plot(X, y, 'r.', markersize=10, label='Observations')
67+
plt.plot(x, y_pred, 'b-', label='Prediction')
6868
plt.fill(np.concatenate([x, x[::-1]]),
6969
np.concatenate([y_pred - 1.9600 * sigma,
7070
(y_pred + 1.9600 * sigma)[::-1]]),
@@ -99,8 +99,8 @@ def f(x):
9999
# the MSE
100100
plt.figure()
101101
plt.plot(x, f(x), 'r:', label=r'$f(x) = x\,\sin(x)$')
102-
plt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
103-
plt.plot(x, y_pred, 'b-', label=u'Prediction')
102+
plt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label='Observations')
103+
plt.plot(x, y_pred, 'b-', label='Prediction')
104104
plt.fill(np.concatenate([x, x[::-1]]),
105105
np.concatenate([y_pred - 1.9600 * sigma,
106106
(y_pred + 1.9600 * sigma)[::-1]]),

0 commit comments

Comments
 (0)