Skip to content

Commit 090f2d6

Browse files
committed
Pushing the docs to dev/ for branch: master, commit b74a76c11e18a45f8385a896763335da1c697dd6
1 parent f24036d commit 090f2d6

File tree

1,079 files changed

+3450
-3450
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,079 files changed

+3450
-3450
lines changed
0 Bytes
Binary file not shown.
0 Bytes
Binary file not shown.

dev/_downloads/plot_gpr_prior_posterior.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic,\n ExpSineSquared, DotProduct,\n ConstantKernel)\n\n\nkernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)),\n 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1),\n 1.0 * ExpSineSquared(length_scale=1.0, periodicity=3.0,\n length_scale_bounds=(0.1, 10.0),\n periodicity_bounds=(1.0, 10.0)),\n ConstantKernel(0.1, (0.01, 10.0))\n * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.0, 10.0)) ** 2),\n 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0),\n nu=1.5)]\n\nfor fig_index, kernel in enumerate(kernels):\n # Specify Gaussian Process\n gp = GaussianProcessRegressor(kernel=kernel)\n\n # Plot prior\n plt.figure(fig_index, figsize=(8, 8))\n plt.subplot(2, 1, 1)\n X_ = np.linspace(0, 5, 100)\n y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)\n plt.plot(X_, y_mean, 'k', lw=3, zorder=9)\n plt.fill_between(X_, y_mean - y_std, y_mean + y_std,\n alpha=0.2, color='k')\n y_samples = gp.sample_y(X_[:, np.newaxis], 10)\n plt.plot(X_, y_samples, lw=1)\n plt.xlim(0, 5)\n plt.ylim(-3, 3)\n plt.title(\"Prior (kernel: %s)\" % kernel, fontsize=12)\n\n # Generate data and fit GP\n rng = np.random.RandomState(4)\n X = rng.uniform(0, 5, 10)[:, np.newaxis]\n y = np.sin((X[:, 0] - 2.5) ** 2)\n gp.fit(X, y)\n\n # Plot posterior\n plt.subplot(2, 1, 2)\n X_ = np.linspace(0, 5, 100)\n y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)\n plt.plot(X_, y_mean, 'k', lw=3, zorder=9)\n plt.fill_between(X_, y_mean - y_std, y_mean + y_std,\n alpha=0.2, color='k')\n\n y_samples = gp.sample_y(X_[:, np.newaxis], 10)\n plt.plot(X_, y_samples, lw=1)\n plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))\n plt.xlim(0, 5)\n plt.ylim(-3, 3)\n plt.title(\"Posterior (kernel: %s)\\n Log-Likelihood: %.3f\"\n % (gp.kernel_, gp.log_marginal_likelihood(gp.kernel_.theta)),\n fontsize=12)\n plt.tight_layout()\n\nplt.show()"
29+
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic,\n ExpSineSquared, DotProduct,\n ConstantKernel)\n\n\nkernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)),\n 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1),\n 1.0 * ExpSineSquared(length_scale=1.0, periodicity=3.0,\n length_scale_bounds=(0.1, 10.0),\n periodicity_bounds=(1.0, 10.0)),\n ConstantKernel(0.1, (0.01, 10.0))\n * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2),\n 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0),\n nu=1.5)]\n\nfor fig_index, kernel in enumerate(kernels):\n # Specify Gaussian Process\n gp = GaussianProcessRegressor(kernel=kernel)\n\n # Plot prior\n plt.figure(fig_index, figsize=(8, 8))\n plt.subplot(2, 1, 1)\n X_ = np.linspace(0, 5, 100)\n y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)\n plt.plot(X_, y_mean, 'k', lw=3, zorder=9)\n plt.fill_between(X_, y_mean - y_std, y_mean + y_std,\n alpha=0.2, color='k')\n y_samples = gp.sample_y(X_[:, np.newaxis], 10)\n plt.plot(X_, y_samples, lw=1)\n plt.xlim(0, 5)\n plt.ylim(-3, 3)\n plt.title(\"Prior (kernel: %s)\" % kernel, fontsize=12)\n\n # Generate data and fit GP\n rng = np.random.RandomState(4)\n X = rng.uniform(0, 5, 10)[:, np.newaxis]\n y = np.sin((X[:, 0] - 2.5) ** 2)\n gp.fit(X, y)\n\n # Plot posterior\n plt.subplot(2, 1, 2)\n X_ = np.linspace(0, 5, 100)\n y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)\n plt.plot(X_, y_mean, 'k', lw=3, zorder=9)\n plt.fill_between(X_, y_mean - y_std, y_mean + y_std,\n alpha=0.2, color='k')\n\n y_samples = gp.sample_y(X_[:, np.newaxis], 10)\n plt.plot(X_, y_samples, lw=1)\n plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))\n plt.xlim(0, 5)\n plt.ylim(-3, 3)\n plt.title(\"Posterior (kernel: %s)\\n Log-Likelihood: %.3f\"\n % (gp.kernel_, gp.log_marginal_likelihood(gp.kernel_.theta)),\n fontsize=12)\n plt.tight_layout()\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gpr_prior_posterior.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
length_scale_bounds=(0.1, 10.0),
3030
periodicity_bounds=(1.0, 10.0)),
3131
ConstantKernel(0.1, (0.01, 10.0))
32-
* (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.0, 10.0)) ** 2),
32+
* (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2),
3333
1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0),
3434
nu=1.5)]
3535

dev/_downloads/scikit-learn-docs.pdf

-38.6 KB
Binary file not shown.

dev/_images/iris.png

0 Bytes

0 commit comments

Comments
 (0)