Skip to content

Commit 38fb67f

Browse files
committed
Rebuild dev docs at master=8aa9c19
1 parent 7a4ba21 commit 38fb67f

File tree

327 files changed

+1871
-1815
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

327 files changed

+1871
-1815
lines changed

dev/_downloads/plot_ard.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,25 +58,27 @@
5858
# weights
5959
plt.figure(figsize=(6, 5))
6060
plt.title("Weights of the model")
61-
plt.plot(clf.coef_, 'b-', label="ARD estimate")
62-
plt.plot(ols.coef_, 'r--', label="OLS estimate")
63-
plt.plot(w, 'g-', label="Ground truth")
61+
plt.plot(clf.coef_, color='darkblue', linestyle='-', linewidth=2,
62+
label="ARD estimate")
63+
plt.plot(ols.coef_, color='yellowgreen', linestyle=':', linewidth=2,
64+
label="OLS estimate")
65+
plt.plot(w, color='orange', linestyle='-', linewidth=2, label="Ground truth")
6466
plt.xlabel("Features")
6567
plt.ylabel("Values of the weights")
6668
plt.legend(loc=1)
6769

6870
plt.figure(figsize=(6, 5))
6971
plt.title("Histogram of the weights")
70-
plt.hist(clf.coef_, bins=n_features, log=True)
71-
plt.plot(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
72-
'ro', label="Relevant features")
72+
plt.hist(clf.coef_, bins=n_features, color='navy', log=True)
73+
plt.scatter(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
74+
color='gold', marker='o', label="Relevant features")
7375
plt.ylabel("Features")
7476
plt.xlabel("Values of the weights")
7577
plt.legend(loc=1)
7678

7779
plt.figure(figsize=(6, 5))
7880
plt.title("Marginal log-likelihood")
79-
plt.plot(clf.scores_)
81+
plt.plot(clf.scores_, color='navy', linewidth=2)
8082
plt.ylabel("Score")
8183
plt.xlabel("Iterations")
8284
plt.show()

dev/_downloads/plot_bayesian_ridge.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -52,27 +52,29 @@
5252

5353
###############################################################################
5454
# Plot true weights, estimated weights and histogram of the weights
55+
lw = 2
5556
plt.figure(figsize=(6, 5))
5657
plt.title("Weights of the model")
57-
plt.plot(clf.coef_, 'b-', label="Bayesian Ridge estimate")
58-
plt.plot(w, 'g-', label="Ground truth")
59-
plt.plot(ols.coef_, 'r--', label="OLS estimate")
58+
plt.plot(clf.coef_, color='lightgreen', linewidth=lw,
59+
label="Bayesian Ridge estimate")
60+
plt.plot(w, color='gold', linewidth=lw, label="Ground truth")
61+
plt.plot(ols.coef_, color='navy', linestyle='--', label="OLS estimate")
6062
plt.xlabel("Features")
6163
plt.ylabel("Values of the weights")
6264
plt.legend(loc="best", prop=dict(size=12))
6365

6466
plt.figure(figsize=(6, 5))
6567
plt.title("Histogram of the weights")
66-
plt.hist(clf.coef_, bins=n_features, log=True)
67-
plt.plot(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
68-
'ro', label="Relevant features")
68+
plt.hist(clf.coef_, bins=n_features, color='gold', log=True)
69+
plt.scatter(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
70+
color='navy', label="Relevant features")
6971
plt.ylabel("Features")
7072
plt.xlabel("Values of the weights")
71-
plt.legend(loc="lower left")
73+
plt.legend(loc="upper left")
7274

7375
plt.figure(figsize=(6, 5))
7476
plt.title("Marginal log-likelihood")
75-
plt.plot(clf.scores_)
77+
plt.plot(clf.scores_, color='navy', linewidth=lw)
7678
plt.ylabel("Score")
7779
plt.xlabel("Iterations")
7880
plt.show()

dev/_downloads/plot_lasso_and_elasticnet.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,11 @@
5858
print(enet)
5959
print("r^2 on test data : %f" % r2_score_enet)
6060

61-
plt.plot(enet.coef_, label='Elastic net coefficients')
62-
plt.plot(lasso.coef_, label='Lasso coefficients')
63-
plt.plot(coef, '--', label='original coefficients')
61+
plt.plot(enet.coef_, color='lightgreen', linewidth=2,
62+
label='Elastic net coefficients')
63+
plt.plot(lasso.coef_, color='gold', linewidth=2,
64+
label='Lasso coefficients')
65+
plt.plot(coef, '--', color='navy', label='original coefficients')
6466
plt.legend(loc='best')
6567
plt.title("Lasso R^2: %f, Elastic Net R^2: %f"
6668
% (r2_score_lasso, r2_score_enet))

dev/_downloads/plot_lda.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,9 @@ def generate_data(n_samples, n_features):
5858
features_samples_ratio = np.array(n_features_range) / n_train
5959

6060
plt.plot(features_samples_ratio, acc_clf1, linewidth=2,
61-
label="Linear Discriminant Analysis with shrinkage", color='r')
61+
label="Linear Discriminant Analysis with shrinkage", color='navy')
6262
plt.plot(features_samples_ratio, acc_clf2, linewidth=2,
63-
label="Linear Discriminant Analysis", color='g')
63+
label="Linear Discriminant Analysis", color='gold')
6464

6565
plt.xlabel('n_features / n_samples')
6666
plt.ylabel('Classification accuracy')

dev/_downloads/plot_multi_task_lasso_support.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,13 @@
5656

5757
feature_to_plot = 0
5858
plt.figure()
59-
plt.plot(coef[:, feature_to_plot], 'k', label='Ground truth')
60-
plt.plot(coef_lasso_[:, feature_to_plot], 'g', label='Lasso')
61-
plt.plot(coef_multi_task_lasso_[:, feature_to_plot],
62-
'r', label='MultiTaskLasso')
59+
lw = 2
60+
plt.plot(coef[:, feature_to_plot], color='seagreen', linewidth=lw,
61+
label='Ground truth')
62+
plt.plot(coef_lasso_[:, feature_to_plot], color='cornflowerblue', linewidth=lw,
63+
label='Lasso')
64+
plt.plot(coef_multi_task_lasso_[:, feature_to_plot], color='gold', linewidth=lw,
65+
label='MultiTaskLasso')
6366
plt.legend(loc='upper center')
6467
plt.axis('tight')
6568
plt.ylim([-1.1, 1.1])

dev/_downloads/plot_polynomial_interpolation.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,14 +54,18 @@ def f(x):
5454
X = x[:, np.newaxis]
5555
X_plot = x_plot[:, np.newaxis]
5656

57-
plt.plot(x_plot, f(x_plot), label="ground truth")
58-
plt.scatter(x, y, label="training points")
57+
colors = ['teal', 'yellowgreen', 'gold']
58+
lw = 2
59+
plt.plot(x_plot, f(x_plot), color='cornflowerblue', linewidth=lw,
60+
label="ground truth")
61+
plt.scatter(x, y, color='navy', s=30, marker='o', label="training points")
5962

60-
for degree in [3, 4, 5]:
63+
for count, degree in enumerate([3, 4, 5]):
6164
model = make_pipeline(PolynomialFeatures(degree), Ridge())
6265
model.fit(X, y)
6366
y_plot = model.predict(X_plot)
64-
plt.plot(x_plot, y_plot, label="degree %d" % degree)
67+
plt.plot(x_plot, y_plot, color=colors[count], linewidth=lw,
68+
label="degree %d" % degree)
6569

6670
plt.legend(loc='lower left')
6771

dev/_downloads/plot_ransac.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,14 @@
4545
print("Estimated coefficients (true, normal, RANSAC):")
4646
print(coef, model.coef_, model_ransac.estimator_.coef_)
4747

48-
plt.plot(X[inlier_mask], y[inlier_mask], '.g', label='Inliers')
49-
plt.plot(X[outlier_mask], y[outlier_mask], '.r', label='Outliers')
50-
plt.plot(line_X, line_y, '-k', label='Linear regressor')
51-
plt.plot(line_X, line_y_ransac, '-b', label='RANSAC regressor')
48+
lw = 2
49+
plt.scatter(X[inlier_mask], y[inlier_mask], color='yellowgreen', marker='.',
50+
label='Inliers')
51+
plt.scatter(X[outlier_mask], y[outlier_mask], color='gold', marker='.',
52+
label='Outliers')
53+
plt.plot(line_X, line_y, color='navy', linestyle='-', linewidth=lw,
54+
label='Linear regressor')
55+
plt.plot(line_X, line_y_ransac, color='cornflowerblue', linestyle='-',
56+
linewidth=lw, label='RANSAC regressor')
5257
plt.legend(loc='lower right')
5358
plt.show()

dev/_downloads/plot_robust_fit.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -59,28 +59,30 @@
5959
estimators = [('OLS', linear_model.LinearRegression()),
6060
('Theil-Sen', linear_model.TheilSenRegressor(random_state=42)),
6161
('RANSAC', linear_model.RANSACRegressor(random_state=42)), ]
62-
62+
colors = {'OLS': 'turquoise', 'Theil-Sen': 'gold', 'RANSAC': 'lightgreen'}
63+
linestyle = {'OLS': '-', 'Theil-Sen': '-.', 'RANSAC': '--'}
64+
lw = 3
6365
x_plot = np.linspace(X.min(), X.max())
64-
6566
for title, this_X, this_y in [
66-
('Modeling errors only', X, y),
67-
('Corrupt X, small deviants', X_errors, y),
68-
('Corrupt y, small deviants', X, y_errors),
69-
('Corrupt X, large deviants', X_errors_large, y),
70-
('Corrupt y, large deviants', X, y_errors_large)]:
67+
('Modeling Errors Only', X, y),
68+
('Corrupt X, Small Deviants', X_errors, y),
69+
('Corrupt y, Small Deviants', X, y_errors),
70+
('Corrupt X, Large Deviants', X_errors_large, y),
71+
('Corrupt y, Large Deviants', X, y_errors_large)]:
7172
plt.figure(figsize=(5, 4))
72-
plt.plot(this_X[:, 0], this_y, 'k+')
73+
plt.plot(this_X[:, 0], this_y, 'b+')
7374

7475
for name, estimator in estimators:
7576
model = make_pipeline(PolynomialFeatures(3), estimator)
7677
model.fit(this_X, this_y)
7778
mse = metrics.mean_squared_error(model.predict(X_test), y_test)
7879
y_plot = model.predict(x_plot[:, np.newaxis])
79-
plt.plot(x_plot, y_plot,
80-
label='%s: error = %.3f' % (name, mse))
80+
plt.plot(x_plot, y_plot, color=colors[name], linestyle=linestyle[name],
81+
linewidth=lw, label='%s: error = %.3f' % (name, mse))
8182

82-
plt.legend(loc='best', frameon=False,
83-
title='Error: mean absolute deviation\n to non corrupt data')
83+
legend_title = 'Error of Mean\nAbsolute Deviation\nto Non-corrupt Data'
84+
legend = plt.legend(loc='upper right', frameon=False, title=legend_title,
85+
fontsize='x-small')
8486
plt.xlim(-4, 10.2)
8587
plt.ylim(-2, 10.2)
8688
plt.title(title)

dev/_downloads/plot_sgd_loss_functions.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,18 +22,19 @@ def modified_huber_loss(y_true, y_pred):
2222

2323
xmin, xmax = -4, 4
2424
xx = np.linspace(xmin, xmax, 100)
25-
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
25+
lw = 2
26+
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], color='gold', lw=lw,
2627
label="Zero-one loss")
27-
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), 'g-',
28+
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), color='teal', lw=lw,
2829
label="Hinge loss")
29-
plt.plot(xx, -np.minimum(xx, 0), 'm-',
30+
plt.plot(xx, -np.minimum(xx, 0), color='yellowgreen', lw=lw,
3031
label="Perceptron loss")
31-
plt.plot(xx, np.log2(1 + np.exp(-xx)), 'r-',
32+
plt.plot(xx, np.log2(1 + np.exp(-xx)), color='cornflowerblue', lw=lw,
3233
label="Log loss")
33-
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, 'b-',
34+
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, color='orange', lw=lw,
3435
label="Squared hinge loss")
35-
plt.plot(xx, modified_huber_loss(xx, 1), 'y--',
36-
label="Modified Huber loss")
36+
plt.plot(xx, modified_huber_loss(xx, 1), color='darkorchid', lw=lw,
37+
linestyle='--', label="Modified Huber loss")
3738
plt.ylim((0, 8))
3839
plt.legend(loc="upper right")
3940
plt.xlabel(r"Decision function $f(x)$")

dev/_downloads/plot_sgd_penalties.py

Lines changed: 19 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -44,20 +44,25 @@ def cross(ext):
4444

4545
cross(1.2)
4646

47-
plt.plot(xs, l1(xs), "r-", label="L1")
48-
plt.plot(xs, -1.0 * l1(xs), "r-")
49-
plt.plot(-1 * xs, l1(xs), "r-")
50-
plt.plot(-1 * xs, -1.0 * l1(xs), "r-")
51-
52-
plt.plot(xs, l2(xs), "b-", label="L2")
53-
plt.plot(xs, -1.0 * l2(xs), "b-")
54-
plt.plot(-1 * xs, l2(xs), "b-")
55-
plt.plot(-1 * xs, -1.0 * l2(xs), "b-")
56-
57-
plt.plot(xs, el(xs, alpha), "y-", label="Elastic Net")
58-
plt.plot(xs, -1.0 * el(xs, alpha), "y-")
59-
plt.plot(-1 * xs, el(xs, alpha), "y-")
60-
plt.plot(-1 * xs, -1.0 * el(xs, alpha), "y-")
47+
l1_color = "navy"
48+
l2_color = "c"
49+
elastic_net_color = "darkorange"
50+
lw = 2
51+
52+
plt.plot(xs, l1(xs), color=l1_color, label="L1", lw=lw)
53+
plt.plot(xs, -1.0 * l1(xs), color=l1_color, lw=lw)
54+
plt.plot(-1 * xs, l1(xs), color=l1_color, lw=lw)
55+
plt.plot(-1 * xs, -1.0 * l1(xs), color=l1_color, lw=lw)
56+
57+
plt.plot(xs, l2(xs), color=l2_color, label="L2", lw=lw)
58+
plt.plot(xs, -1.0 * l2(xs), color=l2_color, lw=lw)
59+
plt.plot(-1 * xs, l2(xs), color=l2_color, lw=lw)
60+
plt.plot(-1 * xs, -1.0 * l2(xs), color=l2_color, lw=lw)
61+
62+
plt.plot(xs, el(xs, alpha), color=elastic_net_color, label="Elastic Net", lw=lw)
63+
plt.plot(xs, -1.0 * el(xs, alpha), color=elastic_net_color, lw=lw)
64+
plt.plot(-1 * xs, el(xs, alpha), color=elastic_net_color, lw=lw)
65+
plt.plot(-1 * xs, -1.0 * el(xs, alpha), color=elastic_net_color, lw=lw)
6166

6267
plt.xlabel(r"$w_0$")
6368
plt.ylabel(r"$w_1$")

0 commit comments

Comments
 (0)