Skip to content

Commit d6e52ea

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 78149c4085198a93dcac3154ed3d839209007183
1 parent 3614654 commit d6e52ea

File tree

900 files changed

+2768
-2765
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

900 files changed

+2768
-2765
lines changed
39 Bytes
Binary file not shown.
38 Bytes
Binary file not shown.

dev/_downloads/plot_gpc_iris.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"execution_count": null,
2525
"cell_type": "code",
2626
"source": [
27-
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import datasets\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data[:, :2] # we only take the first two features.\ny = np.array(iris.target, dtype=int)\n\nh = .02 # step size in the mesh\n\nkernel = 1.0 * RBF([1.0])\ngpc_rbf_isotropic = GaussianProcessClassifier(kernel=kernel).fit(X, y)\nkernel = 1.0 * RBF([1.0, 1.0])\ngpc_rbf_anisotropic = GaussianProcessClassifier(kernel=kernel).fit(X, y)\n\n# create a mesh to plot in\nx_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\ny_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\nxx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n\ntitles = [\"Isotropic RBF\", \"Anisotropic RBF\"]\nplt.figure(figsize=(10, 5))\nfor i, clf in enumerate((gpc_rbf_isotropic, gpc_rbf_anisotropic)):\n # Plot the predicted probabilities. For that, we will assign a color to\n # each point in the mesh [x_min, m_max]x[y_min, y_max].\n plt.subplot(1, 2, i + 1)\n\n Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape((xx.shape[0], xx.shape[1], 3))\n plt.imshow(Z, extent=(x_min, x_max, y_min, y_max), origin=\"lower\")\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=np.array([\"r\", \"g\", \"b\"])[y])\n plt.xlabel('Sepal length')\n plt.ylabel('Sepal width')\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.xticks(())\n plt.yticks(())\n plt.title(\"%s, LML: %.3f\" %\n (titles[i], clf.log_marginal_likelihood(clf.kernel_.theta)))\n\nplt.tight_layout()\nplt.show()"
27+
"print(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn import datasets\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF\n\n# import some data to play with\niris = datasets.load_iris()\nX = iris.data[:, :2] # we only take the first two features.\ny = np.array(iris.target, dtype=int)\n\nh = .02 # step size in the mesh\n\nkernel = 1.0 * RBF([1.0])\ngpc_rbf_isotropic = GaussianProcessClassifier(kernel=kernel).fit(X, y)\nkernel = 1.0 * RBF([1.0, 1.0])\ngpc_rbf_anisotropic = GaussianProcessClassifier(kernel=kernel).fit(X, y)\n\n# create a mesh to plot in\nx_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\ny_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\nxx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n\ntitles = [\"Isotropic RBF\", \"Anisotropic RBF\"]\nplt.figure(figsize=(10, 5))\nfor i, clf in enumerate((gpc_rbf_isotropic, gpc_rbf_anisotropic)):\n # Plot the predicted probabilities. For that, we will assign a color to\n # each point in the mesh [x_min, m_max]x[y_min, y_max].\n plt.subplot(1, 2, i + 1)\n\n Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])\n\n # Put the result into a color plot\n Z = Z.reshape((xx.shape[0], xx.shape[1], 3))\n plt.imshow(Z, extent=(x_min, x_max, y_min, y_max), origin=\"lower\")\n\n # Plot also the training points\n plt.scatter(X[:, 0], X[:, 1], c=np.array([\"r\", \"g\", \"b\"])[y],\n edgecolors=(0, 0, 0))\n plt.xlabel('Sepal length')\n plt.ylabel('Sepal width')\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())\n plt.xticks(())\n plt.yticks(())\n plt.title(\"%s, LML: %.3f\" %\n (titles[i], clf.log_marginal_likelihood(clf.kernel_.theta)))\n\nplt.tight_layout()\nplt.show()"
2828
],
2929
"outputs": [],
3030
"metadata": {

dev/_downloads/plot_gpc_iris.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,8 @@
4848
plt.imshow(Z, extent=(x_min, x_max, y_min, y_max), origin="lower")
4949

5050
# Plot also the training points
51-
plt.scatter(X[:, 0], X[:, 1], c=np.array(["r", "g", "b"])[y])
51+
plt.scatter(X[:, 0], X[:, 1], c=np.array(["r", "g", "b"])[y],
52+
edgecolors=(0, 0, 0))
5253
plt.xlabel('Sepal length')
5354
plt.ylabel('Sepal width')
5455
plt.xlim(xx.min(), xx.max())

dev/_downloads/scikit-learn-docs.pdf

27.3 KB
Binary file not shown.
-378 Bytes
-378 Bytes
297 Bytes
297 Bytes
2.78 KB

0 commit comments

Comments
 (0)