Skip to content

Commit fe0c90a

Browse files
committed
Pushing the docs for revision for branch: master, commit 37c8d0297be0f63cf5ceea55259eed3629023885
1 parent a82a41b commit fe0c90a

File tree

2,590 files changed

+11840
-8877
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,590 files changed

+11840
-8877
lines changed

dev/_downloads/face_recognition.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
1111
.. _LFW: http://vis-www.cs.umass.edu/lfw/
1212
13-
Expected results for the top 5 most represented people in the dataset::
13+
Expected results for the top 5 most represented people in the dataset:
1414
1515
================== ============ ======= ========== =======
1616
precision recall f1-score support

dev/_downloads/plot_approximate_nearest_neighbors_scalability.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@
8989

9090
for i in range(n_iter):
9191
# pick one query at random to study query time variability in LSHForest
92-
query = queries[rng.randint(0, n_queries)]
92+
query = queries[[rng.randint(0, n_queries)]]
9393

9494
t0 = time.time()
9595
exact_neighbors = nbrs.kneighbors(query, return_distance=False)

dev/_downloads/plot_compare_methods.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@
113113
Y = tsne.fit_transform(X)
114114
t1 = time()
115115
print("t-SNE: %.2g sec" % (t1 - t0))
116-
ax = fig.add_subplot(250)
116+
ax = fig.add_subplot(2, 5, 10)
117117
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
118118
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
119119
ax.xaxis.set_major_formatter(NullFormatter())

dev/_downloads/plot_feature_agglomeration_vs_univariate_selection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@
8585
clf = GridSearchCV(clf, {'anova__percentile': [5, 10, 20]}, cv=cv)
8686
clf.fit(X, y) # set the best parameters
8787
coef_ = clf.best_estimator_.steps[-1][1].coef_
88-
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_)
88+
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_.reshape(1, -1))
8989
coef_selection_ = coef_.reshape(size, size)
9090

9191
###############################################################################

dev/_downloads/plot_gradient_boosting_regression.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
# compute test set deviance
4848
test_score = np.zeros((params['n_estimators'],), dtype=np.float64)
4949

50-
for i, y_pred in enumerate(clf.staged_decision_function(X_test)):
50+
for i, y_pred in enumerate(clf.staged_predict(X_test)):
5151
test_score[i] = clf.loss_(y_test, y_pred)
5252

5353
plt.figure(figsize=(12, 6))

dev/_downloads/plot_lasso_coordinate_descent_path.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# Author: Alexandre Gramfort <[email protected]>
1414
# License: BSD 3 clause
1515

16+
from itertools import cycle
1617
import numpy as np
1718
import matplotlib.pyplot as plt
1819

@@ -47,9 +48,13 @@
4748

4849
plt.figure(1)
4950
ax = plt.gca()
50-
ax.set_color_cycle(2 * ['b', 'r', 'g', 'c', 'k'])
51-
l1 = plt.plot(-np.log10(alphas_lasso), coefs_lasso.T)
52-
l2 = plt.plot(-np.log10(alphas_enet), coefs_enet.T, linestyle='--')
51+
52+
colors = cycle(['b', 'r', 'g', 'c', 'k'])
53+
neg_log_alphas_lasso = -np.log10(alphas_lasso)
54+
neg_log_alphas_enet = -np.log10(alphas_enet)
55+
for coef_l, coef_e, c in zip(coefs_lasso, coefs_enet, colors):
56+
l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c)
57+
l2 = plt.plot(neg_log_alphas_enet, coef_e, linestyle='--', c=c)
5358

5459
plt.xlabel('-Log(alpha)')
5560
plt.ylabel('coefficients')
@@ -60,10 +65,10 @@
6065

6166
plt.figure(2)
6267
ax = plt.gca()
63-
ax.set_color_cycle(2 * ['b', 'r', 'g', 'c', 'k'])
64-
l1 = plt.plot(-np.log10(alphas_lasso), coefs_lasso.T)
65-
l2 = plt.plot(-np.log10(alphas_positive_lasso), coefs_positive_lasso.T,
66-
linestyle='--')
68+
neg_log_alphas_positive_lasso = -np.log10(alphas_positive_lasso)
69+
for coef_l, coef_pl, c in zip(coefs_lasso, coefs_positive_lasso, colors):
70+
l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c)
71+
l2 = plt.plot(neg_log_alphas_positive_lasso, coef_pl, linestyle='--', c=c)
6772

6873
plt.xlabel('-Log(alpha)')
6974
plt.ylabel('coefficients')
@@ -74,10 +79,10 @@
7479

7580
plt.figure(3)
7681
ax = plt.gca()
77-
ax.set_color_cycle(2 * ['b', 'r', 'g', 'c', 'k'])
78-
l1 = plt.plot(-np.log10(alphas_enet), coefs_enet.T)
79-
l2 = plt.plot(-np.log10(alphas_positive_enet), coefs_positive_enet.T,
80-
linestyle='--')
82+
neg_log_alphas_positive_enet = -np.log10(alphas_positive_enet)
83+
for (coef_e, coef_pe, c) in zip(coefs_enet, coefs_positive_enet, colors):
84+
l1 = plt.plot(neg_log_alphas_enet, coef_e, c=c)
85+
l2 = plt.plot(neg_log_alphas_positive_enet, coef_pe, linestyle='--', c=c)
8186

8287
plt.xlabel('-Log(alpha)')
8388
plt.ylabel('coefficients')

dev/_downloads/plot_lda_qda.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ def plot_data(lda, X, y, y_pred, fig_index):
6767
X0, X1 = X[y == 0], X[y == 1]
6868
X0_tp, X0_fp = X0[tp0], X0[~tp0]
6969
X1_tp, X1_fp = X1[tp1], X1[~tp1]
70-
xmin, xmax = X[:, 0].min(), X[:, 0].max()
71-
ymin, ymax = X[:, 1].min(), X[:, 1].max()
7270

7371
# class 0: dots
7472
plt.plot(X0_tp[:, 0], X0_tp[:, 1], 'o', color='red')
@@ -133,8 +131,8 @@ def plot_qda_cov(qda, splot):
133131
plt.axis('tight')
134132

135133
# Quadratic Discriminant Analysis
136-
qda = QuadraticDiscriminantAnalysis()
137-
y_pred = qda.fit(X, y, store_covariances=True).predict(X)
134+
qda = QuadraticDiscriminantAnalysis(store_covariances=True)
135+
y_pred = qda.fit(X, y).predict(X)
138136
splot = plot_data(qda, X, y, y_pred, fig_index=2 * i + 2)
139137
plot_qda_cov(qda, splot)
140138
plt.axis('tight')
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
"""
2+
====================================================
3+
Plot multinomial and One-vs-Rest Logistic Regression
4+
====================================================
5+
6+
Plot decision surface of multinomial and One-vs-Rest Logistic Regression.
7+
The hyperplanes corresponding to the three One-vs-Rest (OVR) classifiers
8+
are represented by the dashed lines.
9+
"""
10+
print(__doc__)
11+
# Authors: Tom Dupre la Tour <[email protected]>
12+
# Licence: BSD 3 clause
13+
14+
import numpy as np
15+
import matplotlib.pyplot as plt
16+
from sklearn.datasets import make_blobs
17+
from sklearn.linear_model import LogisticRegression
18+
19+
# make 3-class dataset for classification
20+
centers = [[-5, 0], [0, 1.5], [5, -1]]
21+
X, y = make_blobs(n_samples=1000, centers=centers, random_state=40)
22+
transformation = [[0.4, 0.2], [-0.4, 1.2]]
23+
X = np.dot(X, transformation)
24+
25+
for multi_class in ('multinomial', 'ovr'):
26+
clf = LogisticRegression(solver='sag', max_iter=100, random_state=42,
27+
multi_class=multi_class).fit(X, y)
28+
29+
# print the training scores
30+
print("training score : %.3f (%s)" % (clf.score(X, y), multi_class))
31+
32+
# create a mesh to plot in
33+
h = .02 # step size in the mesh
34+
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
35+
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
36+
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
37+
np.arange(y_min, y_max, h))
38+
39+
# Plot the decision boundary. For that, we will assign a color to each
40+
# point in the mesh [x_min, m_max]x[y_min, y_max].
41+
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
42+
# Put the result into a color plot
43+
Z = Z.reshape(xx.shape)
44+
plt.figure()
45+
plt.contourf(xx, yy, Z, cmap=plt.cm.Paired)
46+
plt.title("Decision surface of LogisticRegression (%s)" % multi_class)
47+
plt.axis('tight')
48+
49+
# Plot also the training points
50+
colors = "bry"
51+
for i, color in zip(clf.classes_, colors):
52+
idx = np.where(y == i)
53+
plt.scatter(X[idx, 0], X[idx, 1], c=color, cmap=plt.cm.Paired)
54+
55+
# Plot the three one-against-all classifiers
56+
xmin, xmax = plt.xlim()
57+
ymin, ymax = plt.ylim()
58+
coef = clf.coef_
59+
intercept = clf.intercept_
60+
61+
def plot_hyperplane(c, color):
62+
def line(x0):
63+
return (-(x0 * coef[c, 0]) - intercept[c]) / coef[c, 1]
64+
plt.plot([xmin, xmax], [line(xmin), line(xmax)],
65+
ls="--", color=color)
66+
67+
for i, color in zip(clf.classes_, colors):
68+
plot_hyperplane(i, color)
69+
70+
plt.show()

dev/_downloads/plot_manifold_sphere.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@
103103
print("%s: %.2g sec" % ('ISO', t1 - t0))
104104

105105
ax = fig.add_subplot(257)
106-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
106+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
107107
plt.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
108108
ax.xaxis.set_major_formatter(NullFormatter())
109109
ax.yaxis.set_major_formatter(NullFormatter())
@@ -117,7 +117,7 @@
117117
print("MDS: %.2g sec" % (t1 - t0))
118118

119119
ax = fig.add_subplot(258)
120-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
120+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
121121
plt.title("MDS (%.2g sec)" % (t1 - t0))
122122
ax.xaxis.set_major_formatter(NullFormatter())
123123
ax.yaxis.set_major_formatter(NullFormatter())
@@ -132,7 +132,7 @@
132132
print("Spectral Embedding: %.2g sec" % (t1 - t0))
133133

134134
ax = fig.add_subplot(259)
135-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
135+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
136136
plt.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
137137
ax.xaxis.set_major_formatter(NullFormatter())
138138
ax.yaxis.set_major_formatter(NullFormatter())
@@ -145,8 +145,8 @@
145145
t1 = time()
146146
print("t-SNE: %.2g sec" % (t1 - t0))
147147

148-
ax = fig.add_subplot(250)
149-
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
148+
ax = fig.add_subplot(2, 5, 10)
149+
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
150150
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
151151
ax.xaxis.set_major_formatter(NullFormatter())
152152
ax.yaxis.set_major_formatter(NullFormatter())

dev/_downloads/plot_ols.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@
5050

5151
# The coefficients
5252
print('Coefficients: \n', regr.coef_)
53-
# The mean square error
54-
print("Residual sum of squares: %.2f"
53+
# The mean squared error
54+
print("Mean squared error: %.2f"
5555
% np.mean((regr.predict(diabetes_X_test) - diabetes_y_test) ** 2))
5656
# Explained variance score: 1 is perfect prediction
5757
print('Variance score: %.2f' % regr.score(diabetes_X_test, diabetes_y_test))

0 commit comments

Comments
 (0)