Skip to content

Commit f428e92

Browse files
committed
Pushing the docs to dev/ for branch: master, commit d9b525ac6bd2171532e9145f1c2a03f4f0862ea8
1 parent be74266 commit f428e92

File tree

993 files changed

+2908
-2899
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

993 files changed

+2908
-2899
lines changed
200 Bytes
Binary file not shown.
197 Bytes
Binary file not shown.

dev/_downloads/plot_gpc.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.metrics.classification import accuracy_score, log_loss\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF\n\n\n# Generate data\ntrain_size = 50\nrng = np.random.RandomState(0)\nX = rng.uniform(0, 5, 100)[:, np.newaxis]\ny = np.array(X[:, 0] > 2.5, dtype=int)\n\n# Specify Gaussian Processes with fixed and optimized hyperparameters\ngp_fix = GaussianProcessClassifier(kernel=1.0 * RBF(length_scale=1.0),\n optimizer=None)\ngp_fix.fit(X[:train_size], y[:train_size])\n\ngp_opt = GaussianProcessClassifier(kernel=1.0 * RBF(length_scale=1.0))\ngp_opt.fit(X[:train_size], y[:train_size])\n\nprint(\"Log Marginal Likelihood (initial): %.3f\"\n % gp_fix.log_marginal_likelihood(gp_fix.kernel_.theta))\nprint(\"Log Marginal Likelihood (optimized): %.3f\"\n % gp_opt.log_marginal_likelihood(gp_opt.kernel_.theta))\n\nprint(\"Accuracy: %.3f (initial) %.3f (optimized)\"\n % (accuracy_score(y[:train_size], gp_fix.predict(X[:train_size])),\n accuracy_score(y[:train_size], gp_opt.predict(X[:train_size]))))\nprint(\"Log-loss: %.3f (initial) %.3f (optimized)\"\n % (log_loss(y[:train_size], gp_fix.predict_proba(X[:train_size])[:, 1]),\n log_loss(y[:train_size], gp_opt.predict_proba(X[:train_size])[:, 1])))\n\n\n# Plot posteriors\nplt.figure(0)\nplt.scatter(X[:train_size, 0], y[:train_size], c='k', label=\"Train data\")\nplt.scatter(X[train_size:, 0], y[train_size:], c='g', label=\"Test data\")\nX_ = np.linspace(0, 5, 100)\nplt.plot(X_, gp_fix.predict_proba(X_[:, np.newaxis])[:, 1], 'r',\n label=\"Initial kernel: %s\" % gp_fix.kernel_)\nplt.plot(X_, gp_opt.predict_proba(X_[:, np.newaxis])[:, 1], 'b',\n label=\"Optimized kernel: %s\" % gp_opt.kernel_)\nplt.xlabel(\"Feature\")\nplt.ylabel(\"Class 1 probability\")\nplt.xlim(0, 5)\nplt.ylim(-0.25, 1.5)\nplt.legend(loc=\"best\")\n\n# Plot LML landscape\nplt.figure(1)\ntheta0 = np.logspace(0, 8, 30)\ntheta1 = np.logspace(-1, 1, 29)\nTheta0, Theta1 = np.meshgrid(theta0, theta1)\nLML = [[gp_opt.log_marginal_likelihood(np.log([Theta0[i, j], Theta1[i, j]]))\n for i in range(Theta0.shape[0])] for j in range(Theta0.shape[1])]\nLML = np.array(LML).T\nplt.plot(np.exp(gp_fix.kernel_.theta)[0], np.exp(gp_fix.kernel_.theta)[1],\n 'ko', zorder=10)\nplt.plot(np.exp(gp_opt.kernel_.theta)[0], np.exp(gp_opt.kernel_.theta)[1],\n 'ko', zorder=10)\nplt.pcolor(Theta0, Theta1, LML)\nplt.xscale(\"log\")\nplt.yscale(\"log\")\nplt.colorbar()\nplt.xlabel(\"Magnitude\")\nplt.ylabel(\"Length-scale\")\nplt.title(\"Log-marginal-likelihood\")\n\nplt.show()"
29+
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\n\nfrom sklearn.metrics.classification import accuracy_score, log_loss\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF\n\n\n# Generate data\ntrain_size = 50\nrng = np.random.RandomState(0)\nX = rng.uniform(0, 5, 100)[:, np.newaxis]\ny = np.array(X[:, 0] > 2.5, dtype=int)\n\n# Specify Gaussian Processes with fixed and optimized hyperparameters\ngp_fix = GaussianProcessClassifier(kernel=1.0 * RBF(length_scale=1.0),\n optimizer=None)\ngp_fix.fit(X[:train_size], y[:train_size])\n\ngp_opt = GaussianProcessClassifier(kernel=1.0 * RBF(length_scale=1.0))\ngp_opt.fit(X[:train_size], y[:train_size])\n\nprint(\"Log Marginal Likelihood (initial): %.3f\"\n % gp_fix.log_marginal_likelihood(gp_fix.kernel_.theta))\nprint(\"Log Marginal Likelihood (optimized): %.3f\"\n % gp_opt.log_marginal_likelihood(gp_opt.kernel_.theta))\n\nprint(\"Accuracy: %.3f (initial) %.3f (optimized)\"\n % (accuracy_score(y[:train_size], gp_fix.predict(X[:train_size])),\n accuracy_score(y[:train_size], gp_opt.predict(X[:train_size]))))\nprint(\"Log-loss: %.3f (initial) %.3f (optimized)\"\n % (log_loss(y[:train_size], gp_fix.predict_proba(X[:train_size])[:, 1]),\n log_loss(y[:train_size], gp_opt.predict_proba(X[:train_size])[:, 1])))\n\n\n# Plot posteriors\nplt.figure(0)\nplt.scatter(X[:train_size, 0], y[:train_size], c='k', label=\"Train data\",\n edgecolors=(0, 0, 0))\nplt.scatter(X[train_size:, 0], y[train_size:], c='g', label=\"Test data\",\n edgecolors=(0, 0, 0))\nX_ = np.linspace(0, 5, 100)\nplt.plot(X_, gp_fix.predict_proba(X_[:, np.newaxis])[:, 1], 'r',\n label=\"Initial kernel: %s\" % gp_fix.kernel_)\nplt.plot(X_, gp_opt.predict_proba(X_[:, np.newaxis])[:, 1], 'b',\n label=\"Optimized kernel: %s\" % gp_opt.kernel_)\nplt.xlabel(\"Feature\")\nplt.ylabel(\"Class 1 probability\")\nplt.xlim(0, 5)\nplt.ylim(-0.25, 1.5)\nplt.legend(loc=\"best\")\n\n# Plot LML landscape\nplt.figure(1)\ntheta0 = np.logspace(0, 8, 30)\ntheta1 = np.logspace(-1, 1, 29)\nTheta0, Theta1 = np.meshgrid(theta0, theta1)\nLML = [[gp_opt.log_marginal_likelihood(np.log([Theta0[i, j], Theta1[i, j]]))\n for i in range(Theta0.shape[0])] for j in range(Theta0.shape[1])]\nLML = np.array(LML).T\nplt.plot(np.exp(gp_fix.kernel_.theta)[0], np.exp(gp_fix.kernel_.theta)[1],\n 'ko', zorder=10)\nplt.plot(np.exp(gp_opt.kernel_.theta)[0], np.exp(gp_opt.kernel_.theta)[1],\n 'ko', zorder=10)\nplt.pcolor(Theta0, Theta1, LML)\nplt.xscale(\"log\")\nplt.yscale(\"log\")\nplt.colorbar()\nplt.xlabel(\"Magnitude\")\nplt.ylabel(\"Length-scale\")\nplt.title(\"Log-marginal-likelihood\")\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gpc.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,10 @@
6464

6565
# Plot posteriors
6666
plt.figure(0)
67-
plt.scatter(X[:train_size, 0], y[:train_size], c='k', label="Train data")
68-
plt.scatter(X[train_size:, 0], y[train_size:], c='g', label="Test data")
67+
plt.scatter(X[:train_size, 0], y[:train_size], c='k', label="Train data",
68+
edgecolors=(0, 0, 0))
69+
plt.scatter(X[train_size:, 0], y[train_size:], c='g', label="Test data",
70+
edgecolors=(0, 0, 0))
6971
X_ = np.linspace(0, 5, 100)
7072
plt.plot(X_, gp_fix.predict_proba(X_[:, np.newaxis])[:, 1], 'r',
7173
label="Initial kernel: %s" % gp_fix.kernel_)

dev/_downloads/plot_gpc_xor.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF, DotProduct\n\n\nxx, yy = np.meshgrid(np.linspace(-3, 3, 50),\n np.linspace(-3, 3, 50))\nrng = np.random.RandomState(0)\nX = rng.randn(200, 2)\nY = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)\n\n# fit the model\nplt.figure(figsize=(10, 5))\nkernels = [1.0 * RBF(length_scale=1.0), 1.0 * DotProduct(sigma_0=1.0)**2]\nfor i, kernel in enumerate(kernels):\n clf = GaussianProcessClassifier(kernel=kernel, warm_start=True).fit(X, Y)\n\n # plot the decision function for each datapoint on the grid\n Z = clf.predict_proba(np.vstack((xx.ravel(), yy.ravel())).T)[:, 1]\n Z = Z.reshape(xx.shape)\n\n plt.subplot(1, 2, i + 1)\n image = plt.imshow(Z, interpolation='nearest',\n extent=(xx.min(), xx.max(), yy.min(), yy.max()),\n aspect='auto', origin='lower', cmap=plt.cm.PuOr_r)\n contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,\n linetypes='--')\n plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired)\n plt.xticks(())\n plt.yticks(())\n plt.axis([-3, 3, -3, 3])\n plt.colorbar(image)\n plt.title(\"%s\\n Log-Marginal-Likelihood:%.3f\"\n % (clf.kernel_, clf.log_marginal_likelihood(clf.kernel_.theta)),\n fontsize=12)\n\nplt.tight_layout()\nplt.show()"
29+
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.gaussian_process import GaussianProcessClassifier\nfrom sklearn.gaussian_process.kernels import RBF, DotProduct\n\n\nxx, yy = np.meshgrid(np.linspace(-3, 3, 50),\n np.linspace(-3, 3, 50))\nrng = np.random.RandomState(0)\nX = rng.randn(200, 2)\nY = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)\n\n# fit the model\nplt.figure(figsize=(10, 5))\nkernels = [1.0 * RBF(length_scale=1.0), 1.0 * DotProduct(sigma_0=1.0)**2]\nfor i, kernel in enumerate(kernels):\n clf = GaussianProcessClassifier(kernel=kernel, warm_start=True).fit(X, Y)\n\n # plot the decision function for each datapoint on the grid\n Z = clf.predict_proba(np.vstack((xx.ravel(), yy.ravel())).T)[:, 1]\n Z = Z.reshape(xx.shape)\n\n plt.subplot(1, 2, i + 1)\n image = plt.imshow(Z, interpolation='nearest',\n extent=(xx.min(), xx.max(), yy.min(), yy.max()),\n aspect='auto', origin='lower', cmap=plt.cm.PuOr_r)\n contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,\n linetypes='--')\n plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired,\n edgecolors=(0, 0, 0))\n plt.xticks(())\n plt.yticks(())\n plt.axis([-3, 3, -3, 3])\n plt.colorbar(image)\n plt.title(\"%s\\n Log-Marginal-Likelihood:%.3f\"\n % (clf.kernel_, clf.log_marginal_likelihood(clf.kernel_.theta)),\n fontsize=12)\n\nplt.tight_layout()\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gpc_xor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,8 @@
4444
aspect='auto', origin='lower', cmap=plt.cm.PuOr_r)
4545
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
4646
linetypes='--')
47-
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired)
47+
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired,
48+
edgecolors=(0, 0, 0))
4849
plt.xticks(())
4950
plt.yticks(())
5051
plt.axis([-3, 3, -3, 3])

dev/_downloads/plot_gpr_noisy.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.colors import LogNorm\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import RBF, WhiteKernel\n\n\nrng = np.random.RandomState(0)\nX = rng.uniform(0, 5, 20)[:, np.newaxis]\ny = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])\n\n# First run\nplt.figure(0)\nkernel = 1.0 * RBF(length_scale=100.0, length_scale_bounds=(1e-2, 1e3)) \\\n + WhiteKernel(noise_level=1, noise_level_bounds=(1e-10, 1e+1))\ngp = GaussianProcessRegressor(kernel=kernel,\n alpha=0.0).fit(X, y)\nX_ = np.linspace(0, 5, 100)\ny_mean, y_cov = gp.predict(X_[:, np.newaxis], return_cov=True)\nplt.plot(X_, y_mean, 'k', lw=3, zorder=9)\nplt.fill_between(X_, y_mean - np.sqrt(np.diag(y_cov)),\n y_mean + np.sqrt(np.diag(y_cov)),\n alpha=0.5, color='k')\nplt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)\nplt.scatter(X[:, 0], y, c='r', s=50, zorder=10)\nplt.title(\"Initial: %s\\nOptimum: %s\\nLog-Marginal-Likelihood: %s\"\n % (kernel, gp.kernel_,\n gp.log_marginal_likelihood(gp.kernel_.theta)))\nplt.tight_layout()\n\n# Second run\nplt.figure(1)\nkernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \\\n + WhiteKernel(noise_level=1e-5, noise_level_bounds=(1e-10, 1e+1))\ngp = GaussianProcessRegressor(kernel=kernel,\n alpha=0.0).fit(X, y)\nX_ = np.linspace(0, 5, 100)\ny_mean, y_cov = gp.predict(X_[:, np.newaxis], return_cov=True)\nplt.plot(X_, y_mean, 'k', lw=3, zorder=9)\nplt.fill_between(X_, y_mean - np.sqrt(np.diag(y_cov)),\n y_mean + np.sqrt(np.diag(y_cov)),\n alpha=0.5, color='k')\nplt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)\nplt.scatter(X[:, 0], y, c='r', s=50, zorder=10)\nplt.title(\"Initial: %s\\nOptimum: %s\\nLog-Marginal-Likelihood: %s\"\n % (kernel, gp.kernel_,\n gp.log_marginal_likelihood(gp.kernel_.theta)))\nplt.tight_layout()\n\n# Plot LML landscape\nplt.figure(2)\ntheta0 = np.logspace(-2, 3, 49)\ntheta1 = np.logspace(-2, 0, 50)\nTheta0, Theta1 = np.meshgrid(theta0, theta1)\nLML = [[gp.log_marginal_likelihood(np.log([0.36, Theta0[i, j], Theta1[i, j]]))\n for i in range(Theta0.shape[0])] for j in range(Theta0.shape[1])]\nLML = np.array(LML).T\n\nvmin, vmax = (-LML).min(), (-LML).max()\nvmax = 50\nplt.contour(Theta0, Theta1, -LML,\n levels=np.logspace(np.log10(vmin), np.log10(vmax), 50),\n norm=LogNorm(vmin=vmin, vmax=vmax))\nplt.colorbar()\nplt.xscale(\"log\")\nplt.yscale(\"log\")\nplt.xlabel(\"Length-scale\")\nplt.ylabel(\"Noise-level\")\nplt.title(\"Log-marginal-likelihood\")\nplt.tight_layout()\n\nplt.show()"
29+
"print(__doc__)\n\n# Authors: Jan Hendrik Metzen <[email protected]>\n#\n# License: BSD 3 clause\n\nimport numpy as np\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.colors import LogNorm\n\nfrom sklearn.gaussian_process import GaussianProcessRegressor\nfrom sklearn.gaussian_process.kernels import RBF, WhiteKernel\n\n\nrng = np.random.RandomState(0)\nX = rng.uniform(0, 5, 20)[:, np.newaxis]\ny = 0.5 * np.sin(3 * X[:, 0]) + rng.normal(0, 0.5, X.shape[0])\n\n# First run\nplt.figure(0)\nkernel = 1.0 * RBF(length_scale=100.0, length_scale_bounds=(1e-2, 1e3)) \\\n + WhiteKernel(noise_level=1, noise_level_bounds=(1e-10, 1e+1))\ngp = GaussianProcessRegressor(kernel=kernel,\n alpha=0.0).fit(X, y)\nX_ = np.linspace(0, 5, 100)\ny_mean, y_cov = gp.predict(X_[:, np.newaxis], return_cov=True)\nplt.plot(X_, y_mean, 'k', lw=3, zorder=9)\nplt.fill_between(X_, y_mean - np.sqrt(np.diag(y_cov)),\n y_mean + np.sqrt(np.diag(y_cov)),\n alpha=0.5, color='k')\nplt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)\nplt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))\nplt.title(\"Initial: %s\\nOptimum: %s\\nLog-Marginal-Likelihood: %s\"\n % (kernel, gp.kernel_,\n gp.log_marginal_likelihood(gp.kernel_.theta)))\nplt.tight_layout()\n\n# Second run\nplt.figure(1)\nkernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-2, 1e3)) \\\n + WhiteKernel(noise_level=1e-5, noise_level_bounds=(1e-10, 1e+1))\ngp = GaussianProcessRegressor(kernel=kernel,\n alpha=0.0).fit(X, y)\nX_ = np.linspace(0, 5, 100)\ny_mean, y_cov = gp.predict(X_[:, np.newaxis], return_cov=True)\nplt.plot(X_, y_mean, 'k', lw=3, zorder=9)\nplt.fill_between(X_, y_mean - np.sqrt(np.diag(y_cov)),\n y_mean + np.sqrt(np.diag(y_cov)),\n alpha=0.5, color='k')\nplt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)\nplt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))\nplt.title(\"Initial: %s\\nOptimum: %s\\nLog-Marginal-Likelihood: %s\"\n % (kernel, gp.kernel_,\n gp.log_marginal_likelihood(gp.kernel_.theta)))\nplt.tight_layout()\n\n# Plot LML landscape\nplt.figure(2)\ntheta0 = np.logspace(-2, 3, 49)\ntheta1 = np.logspace(-2, 0, 50)\nTheta0, Theta1 = np.meshgrid(theta0, theta1)\nLML = [[gp.log_marginal_likelihood(np.log([0.36, Theta0[i, j], Theta1[i, j]]))\n for i in range(Theta0.shape[0])] for j in range(Theta0.shape[1])]\nLML = np.array(LML).T\n\nvmin, vmax = (-LML).min(), (-LML).max()\nvmax = 50\nlevel = np.around(np.logspace(np.log10(vmin), np.log10(vmax), 50), decimals=1)\nplt.contour(Theta0, Theta1, -LML,\n levels=level, norm=LogNorm(vmin=vmin, vmax=vmax))\nplt.colorbar()\nplt.xscale(\"log\")\nplt.yscale(\"log\")\nplt.xlabel(\"Length-scale\")\nplt.ylabel(\"Noise-level\")\nplt.title(\"Log-marginal-likelihood\")\nplt.tight_layout()\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/plot_gpr_noisy.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
y_mean + np.sqrt(np.diag(y_cov)),
4848
alpha=0.5, color='k')
4949
plt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)
50-
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10)
50+
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))
5151
plt.title("Initial: %s\nOptimum: %s\nLog-Marginal-Likelihood: %s"
5252
% (kernel, gp.kernel_,
5353
gp.log_marginal_likelihood(gp.kernel_.theta)))
@@ -66,7 +66,7 @@
6666
y_mean + np.sqrt(np.diag(y_cov)),
6767
alpha=0.5, color='k')
6868
plt.plot(X_, 0.5*np.sin(3*X_), 'r', lw=3, zorder=9)
69-
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10)
69+
plt.scatter(X[:, 0], y, c='r', s=50, zorder=10, edgecolors=(0, 0, 0))
7070
plt.title("Initial: %s\nOptimum: %s\nLog-Marginal-Likelihood: %s"
7171
% (kernel, gp.kernel_,
7272
gp.log_marginal_likelihood(gp.kernel_.theta)))
@@ -83,9 +83,9 @@
8383

8484
vmin, vmax = (-LML).min(), (-LML).max()
8585
vmax = 50
86+
level = np.around(np.logspace(np.log10(vmin), np.log10(vmax), 50), decimals=1)
8687
plt.contour(Theta0, Theta1, -LML,
87-
levels=np.logspace(np.log10(vmin), np.log10(vmax), 50),
88-
norm=LogNorm(vmin=vmin, vmax=vmax))
88+
levels=level, norm=LogNorm(vmin=vmin, vmax=vmax))
8989
plt.colorbar()
9090
plt.xscale("log")
9191
plt.yscale("log")

0 commit comments

Comments
 (0)