Skip to content

Commit d49cd83

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 2afcc3692633dc80dc5454dd0eef3ae077b4c4cf
1 parent 6b963f2 commit d49cd83

File tree

1,024 files changed

+3125
-3125
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,024 files changed

+3125
-3125
lines changed
-6 Bytes
Binary file not shown.
-6 Bytes
Binary file not shown.

dev/_downloads/plot_gpr_noisy_targets.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Author: Vincent Dubourg <[email protected]>\n# Jake Vanderplas <[email protected]>\n# Jan Hendrik Metzen <[email protected]>s\n# License: BSD 3 clause\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import RBF, ConstantKernel as C\n\nnp.random.seed(1)\n\n\ndef f(x):\n \"\"\"The function to predict.\"\"\"\n return x * np.sin(x)\n\n# ----------------------------------------------------------------------\n# First the noiseless case\nX = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T\n\n# Observations\ny = f(X).ravel()\n\n# Mesh the input space for evaluations of the real function, the prediction and\n# its MSE\nx = np.atleast_2d(np.linspace(0, 10, 1000)).T\n\n# Instanciate a Gaussian Process model\nkernel = C(1.0, (1e-3, 1e3)) * RBF(10, (1e-2, 1e2))\ngp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=9)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=u'$f(x) = x\\,\\sin(x)$')\nplt.plot(X, y, 'r.', markersize=10, label=u'Observations')\nplt.plot(x, y_pred, 'b-', label=u'Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\n# ----------------------------------------------------------------------\n# now the noisy case\nX = np.linspace(0.1, 9.9, 20)\nX = np.atleast_2d(X).T\n\n# Observations and noise\ny = f(X).ravel()\ndy = 0.5 + 1.0 * np.random.random(y.shape)\nnoise = np.random.normal(0, dy)\ny += noise\n\n# Instanciate a Gaussian Process model\ngp = GaussianProcessRegressor(kernel=kernel, alpha=(dy / y) ** 2,\n n_restarts_optimizer=10)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=u'$f(x) = x\\,\\sin(x)$')\nplt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')\nplt.plot(x, y_pred, 'b-', label=u'Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\nplt.show()"
29+
"print(__doc__)\n\n# Author: Vincent Dubourg <[email protected]>\n# Jake Vanderplas <[email protected]>\n# Jan Hendrik Metzen <[email protected]>s\n# License: BSD 3 clause\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import RBF, ConstantKernel as C\n\nnp.random.seed(1)\n\n\ndef f(x):\n \"\"\"The function to predict.\"\"\"\n return x * np.sin(x)\n\n# ----------------------------------------------------------------------\n# First the noiseless case\nX = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T\n\n# Observations\ny = f(X).ravel()\n\n# Mesh the input space for evaluations of the real function, the prediction and\n# its MSE\nx = np.atleast_2d(np.linspace(0, 10, 1000)).T\n\n# Instanciate a Gaussian Process model\nkernel = C(1.0, (1e-3, 1e3)) * RBF(10, (1e-2, 1e2))\ngp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=9)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=u'$f(x) = x\\,\\sin(x)$')\nplt.plot(X, y, 'r.', markersize=10, label=u'Observations')\nplt.plot(x, y_pred, 'b-', label=u'Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\n# ----------------------------------------------------------------------\n# now the noisy case\nX = np.linspace(0.1, 9.9, 20)\nX = np.atleast_2d(X).T\n\n# Observations and noise\ny = f(X).ravel()\ndy = 0.5 + 1.0 * np.random.random(y.shape)\nnoise = np.random.normal(0, dy)\ny += noise\n\n# Instantiate a Gaussian Process model\ngp = GaussianProcessRegressor(kernel=kernel, alpha=dy ** 2,\n n_restarts_optimizer=10)\n\n# Fit to data using Maximum Likelihood Estimation of the parameters\ngp.fit(X, y)\n\n# Make the prediction on the meshed x-axis (ask for MSE as well)\ny_pred, sigma = gp.predict(x, return_std=True)\n\n# Plot the function, the prediction and the 95% confidence interval based on\n# the MSE\nplt.figure()\nplt.plot(x, f(x), 'r:', label=u'$f(x) = x\\,\\sin(x)$')\nplt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')\nplt.plot(x, y_pred, 'b-', label=u'Prediction')\nplt.fill(np.concatenate([x, x[::-1]]),\n np.concatenate([y_pred - 1.9600 * sigma,\n (y_pred + 1.9600 * sigma)[::-1]]),\n alpha=.5, fc='b', ec='None', label='95% confidence interval')\nplt.xlabel('$x$')\nplt.ylabel('$f(x)$')\nplt.ylim(-10, 20)\nplt.legend(loc='upper left')\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gpr_noisy_targets.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,8 +85,8 @@ def f(x):
8585
noise = np.random.normal(0, dy)
8686
y += noise
8787

88-
# Instanciate a Gaussian Process model
89-
gp = GaussianProcessRegressor(kernel=kernel, alpha=(dy / y) ** 2,
88+
# Instantiate a Gaussian Process model
89+
gp = GaussianProcessRegressor(kernel=kernel, alpha=dy ** 2,
9090
n_restarts_optimizer=10)
9191

9292
# Fit to data using Maximum Likelihood Estimation of the parameters

dev/_downloads/scikit-learn-docs.pdf

-17.4 KB
Binary file not shown.
-239 Bytes
-239 Bytes
480 Bytes
480 Bytes
29 Bytes

0 commit comments

Comments
 (0)