Skip to content

Commit 2edbd26

Browse files
committed
Pushing the docs for revision for branch: master, commit 2dd5a9a1b08f3b70e96dc4959af96d218a4c2c78
1 parent 4f45332 commit 2edbd26

File tree

843 files changed

+2953
-2954
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

843 files changed

+2953
-2954
lines changed

dev/_downloads/mlcomp_sparse_document_classification.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
import os
4545
import numpy as np
4646
import scipy.sparse as sp
47-
import pylab as pl
47+
import matplotlib.pyplot as plt
4848

4949
from sklearn.datasets import load_mlcomp
5050
from sklearn.feature_extraction.text import TfidfVectorizer
@@ -121,9 +121,9 @@ def benchmark(clf_class, params, name):
121121
print(cm)
122122

123123
# Show confusion matrix
124-
pl.matshow(cm)
125-
pl.title('Confusion matrix of the %s classifier' % name)
126-
pl.colorbar()
124+
plt.matshow(cm)
125+
plt.title('Confusion matrix of the %s classifier' % name)
126+
plt.colorbar()
127127

128128

129129
print("Testbenching a linear classifier...")
@@ -142,4 +142,4 @@ def benchmark(clf_class, params, name):
142142

143143
benchmark(MultinomialNB, parameters, 'MultinomialNB')
144144

145-
pl.show()
145+
plt.show()

dev/_downloads/plot_digits_classification.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
# The data that we are interested in is made of 8x8 images of digits, let's
2828
# have a look at the first 4 images, stored in the `images` attribute of the
2929
# dataset. If we were working from image files, we could load them using
30-
# pylab.imread. Note that each image must have the same size. For these
30+
# matplotlib.pyplot.imread. Note that each image must have the same size. For these
3131
# images, we know which digit they represent: it is given in the 'target' of
3232
# the dataset.
3333
images_and_labels = list(zip(digits.images, digits.target))

dev/_downloads/plot_gpc_isoprobability.py

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
import numpy as np
2020

21-
from matplotlib import pyplot as pl
21+
from matplotlib import pyplot as plt
2222
from matplotlib import cm
2323

2424
from sklearn.gaussian_process import GaussianProcessClassifier
@@ -64,39 +64,39 @@ def g(x):
6464
y_prob = y_prob.reshape((res, res))
6565

6666
# Plot the probabilistic classification iso-values
67-
fig = pl.figure(1)
67+
fig = plt.figure(1)
6868
ax = fig.gca()
6969
ax.axes.set_aspect('equal')
70-
pl.xticks([])
71-
pl.yticks([])
70+
plt.xticks([])
71+
plt.yticks([])
7272
ax.set_xticklabels([])
7373
ax.set_yticklabels([])
74-
pl.xlabel('$x_1$')
75-
pl.ylabel('$x_2$')
74+
plt.xlabel('$x_1$')
75+
plt.ylabel('$x_2$')
7676

77-
cax = pl.imshow(y_prob, cmap=cm.gray_r, alpha=0.8,
78-
extent=(-lim, lim, -lim, lim))
79-
norm = pl.matplotlib.colors.Normalize(vmin=0., vmax=0.9)
80-
cb = pl.colorbar(cax, ticks=[0., 0.2, 0.4, 0.6, 0.8, 1.], norm=norm)
77+
cax = plt.imshow(y_prob, cmap=cm.gray_r, alpha=0.8,
78+
extent=(-lim, lim, -lim, lim))
79+
norm = plt.matplotlib.colors.Normalize(vmin=0., vmax=0.9)
80+
cb = plt.colorbar(cax, ticks=[0., 0.2, 0.4, 0.6, 0.8, 1.], norm=norm)
8181
cb.set_label('${\\rm \mathbb{P}}\left[\widehat{G}(\mathbf{x}) \leq 0\\right]$')
82-
pl.clim(0, 1)
82+
plt.clim(0, 1)
8383

84-
pl.plot(X[y <= 0, 0], X[y <= 0, 1], 'r.', markersize=12)
84+
plt.plot(X[y <= 0, 0], X[y <= 0, 1], 'r.', markersize=12)
8585

86-
pl.plot(X[y > 0, 0], X[y > 0, 1], 'b.', markersize=12)
86+
plt.plot(X[y > 0, 0], X[y > 0, 1], 'b.', markersize=12)
8787

88-
cs = pl.contour(x1, x2, y_true, [0.], colors='k', linestyles='dashdot')
88+
cs = plt.contour(x1, x2, y_true, [0.], colors='k', linestyles='dashdot')
8989

90-
cs = pl.contour(x1, x2, y_prob, [0.666], colors='b',
91-
linestyles='solid')
92-
pl.clabel(cs, fontsize=11)
90+
cs = plt.contour(x1, x2, y_prob, [0.666], colors='b',
91+
linestyles='solid')
92+
plt.clabel(cs, fontsize=11)
9393

94-
cs = pl.contour(x1, x2, y_prob, [0.5], colors='k',
95-
linestyles='dashed')
96-
pl.clabel(cs, fontsize=11)
94+
cs = plt.contour(x1, x2, y_prob, [0.5], colors='k',
95+
linestyles='dashed')
96+
plt.clabel(cs, fontsize=11)
9797

98-
cs = pl.contour(x1, x2, y_prob, [0.334], colors='r',
99-
linestyles='solid')
100-
pl.clabel(cs, fontsize=11)
98+
cs = plt.contour(x1, x2, y_prob, [0.334], colors='r',
99+
linestyles='solid')
100+
plt.clabel(cs, fontsize=11)
101101

102-
pl.show()
102+
plt.show()

dev/_downloads/plot_gpr_noisy_targets.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
# Licence: BSD 3 clause
2727

2828
import numpy as np
29-
from matplotlib import pyplot as pl
29+
from matplotlib import pyplot as plt
3030

3131
from sklearn.gaussian_process import GaussianProcessRegressor
3232
from sklearn.gaussian_process.kernels import RBF, ConstantKernel as C
@@ -61,18 +61,18 @@ def f(x):
6161

6262
# Plot the function, the prediction and the 95% confidence interval based on
6363
# the MSE
64-
fig = pl.figure()
65-
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
66-
pl.plot(X, y, 'r.', markersize=10, label=u'Observations')
67-
pl.plot(x, y_pred, 'b-', label=u'Prediction')
68-
pl.fill(np.concatenate([x, x[::-1]]),
69-
np.concatenate([y_pred - 1.9600 * sigma,
70-
(y_pred + 1.9600 * sigma)[::-1]]),
71-
alpha=.5, fc='b', ec='None', label='95% confidence interval')
72-
pl.xlabel('$x$')
73-
pl.ylabel('$f(x)$')
74-
pl.ylim(-10, 20)
75-
pl.legend(loc='upper left')
64+
fig = plt.figure()
65+
plt.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
66+
plt.plot(X, y, 'r.', markersize=10, label=u'Observations')
67+
plt.plot(x, y_pred, 'b-', label=u'Prediction')
68+
plt.fill(np.concatenate([x, x[::-1]]),
69+
np.concatenate([y_pred - 1.9600 * sigma,
70+
(y_pred + 1.9600 * sigma)[::-1]]),
71+
alpha=.5, fc='b', ec='None', label='95% confidence interval')
72+
plt.xlabel('$x$')
73+
plt.ylabel('$f(x)$')
74+
plt.ylim(-10, 20)
75+
plt.legend(loc='upper left')
7676

7777
# ----------------------------------------------------------------------
7878
# now the noisy case
@@ -97,17 +97,17 @@ def f(x):
9797

9898
# Plot the function, the prediction and the 95% confidence interval based on
9999
# the MSE
100-
fig = pl.figure()
101-
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
102-
pl.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
103-
pl.plot(x, y_pred, 'b-', label=u'Prediction')
104-
pl.fill(np.concatenate([x, x[::-1]]),
105-
np.concatenate([y_pred - 1.9600 * sigma,
106-
(y_pred + 1.9600 * sigma)[::-1]]),
107-
alpha=.5, fc='b', ec='None', label='95% confidence interval')
108-
pl.xlabel('$x$')
109-
pl.ylabel('$f(x)$')
110-
pl.ylim(-10, 20)
111-
pl.legend(loc='upper left')
112-
113-
pl.show()
100+
fig = plt.figure()
101+
plt.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
102+
plt.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
103+
plt.plot(x, y_pred, 'b-', label=u'Prediction')
104+
plt.fill(np.concatenate([x, x[::-1]]),
105+
np.concatenate([y_pred - 1.9600 * sigma,
106+
(y_pred + 1.9600 * sigma)[::-1]]),
107+
alpha=.5, fc='b', ec='None', label='95% confidence interval')
108+
plt.xlabel('$x$')
109+
plt.ylabel('$f(x)$')
110+
plt.ylim(-10, 20)
111+
plt.legend(loc='upper left')
112+
113+
plt.show()

dev/_downloads/plot_sparse_coding.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
print(__doc__)
1818

1919
import numpy as np
20-
import matplotlib.pylab as plt
20+
import matplotlib.pyplot as plt
2121

2222
from sklearn.decomposition import SparseCoder
2323

78 Bytes
78 Bytes
190 Bytes
190 Bytes
74 Bytes

0 commit comments

Comments
 (0)