Skip to content

Commit faeba8d

Browse files
committed
Pushing the docs to dev/ for branch: main, commit 9cfacf1540a991461b91617c779c69753a1ee4c0
1 parent 74293ba commit faeba8d

File tree

1,251 files changed

+3984
-3955
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,251 files changed

+3984
-3955
lines changed
Binary file not shown.
Binary file not shown.

dev/_downloads/9676f328f9e6c3e55f218a33cea5586f/plot_huber_vs_ridge.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,15 +43,15 @@
4343
colors = ['r-', 'b-', 'y-', 'm-']
4444

4545
x = np.linspace(X.min(), X.max(), 7)
46-
epsilon_values = [1.35, 1.5, 1.75, 1.9]
46+
epsilon_values = [1, 1.5, 1.75, 1.9]
4747
for k, epsilon in enumerate(epsilon_values):
4848
huber = HuberRegressor(alpha=0.0, epsilon=epsilon)
4949
huber.fit(X, y)
5050
coef_ = huber.coef_ * x + huber.intercept_
5151
plt.plot(x, coef_, colors[k], label="huber loss, %s" % epsilon)
5252

5353
# Fit a ridge regressor to compare it to huber regressor.
54-
ridge = Ridge(alpha=0.0, random_state=0, normalize=True)
54+
ridge = Ridge(alpha=0.0, random_state=0)
5555
ridge.fit(X, y)
5656
coef_ridge = ridge.coef_
5757
coef_ = ridge.coef_ * x + ridge.intercept_

dev/_downloads/c6a456b2390718e4dc79945608262e0b/plot_huber_vs_ridge.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Authors: Manoj Kumar [email protected]\n# License: BSD 3 clause\n\nprint(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.datasets import make_regression\nfrom sklearn.linear_model import HuberRegressor, Ridge\n\n# Generate toy data.\nrng = np.random.RandomState(0)\nX, y = make_regression(n_samples=20, n_features=1, random_state=0, noise=4.0,\n bias=100.0)\n\n# Add four strong outliers to the dataset.\nX_outliers = rng.normal(0, 0.5, size=(4, 1))\ny_outliers = rng.normal(0, 2.0, size=4)\nX_outliers[:2, :] += X.max() + X.mean() / 4.\nX_outliers[2:, :] += X.min() - X.mean() / 4.\ny_outliers[:2] += y.min() - y.mean() / 4.\ny_outliers[2:] += y.max() + y.mean() / 4.\nX = np.vstack((X, X_outliers))\ny = np.concatenate((y, y_outliers))\nplt.plot(X, y, 'b.')\n\n# Fit the huber regressor over a series of epsilon values.\ncolors = ['r-', 'b-', 'y-', 'm-']\n\nx = np.linspace(X.min(), X.max(), 7)\nepsilon_values = [1.35, 1.5, 1.75, 1.9]\nfor k, epsilon in enumerate(epsilon_values):\n huber = HuberRegressor(alpha=0.0, epsilon=epsilon)\n huber.fit(X, y)\n coef_ = huber.coef_ * x + huber.intercept_\n plt.plot(x, coef_, colors[k], label=\"huber loss, %s\" % epsilon)\n\n# Fit a ridge regressor to compare it to huber regressor.\nridge = Ridge(alpha=0.0, random_state=0, normalize=True)\nridge.fit(X, y)\ncoef_ridge = ridge.coef_\ncoef_ = ridge.coef_ * x + ridge.intercept_\nplt.plot(x, coef_, 'g-', label=\"ridge regression\")\n\nplt.title(\"Comparison of HuberRegressor vs Ridge\")\nplt.xlabel(\"X\")\nplt.ylabel(\"y\")\nplt.legend(loc=0)\nplt.show()"
29+
"# Authors: Manoj Kumar [email protected]\n# License: BSD 3 clause\n\nprint(__doc__)\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.datasets import make_regression\nfrom sklearn.linear_model import HuberRegressor, Ridge\n\n# Generate toy data.\nrng = np.random.RandomState(0)\nX, y = make_regression(n_samples=20, n_features=1, random_state=0, noise=4.0,\n bias=100.0)\n\n# Add four strong outliers to the dataset.\nX_outliers = rng.normal(0, 0.5, size=(4, 1))\ny_outliers = rng.normal(0, 2.0, size=4)\nX_outliers[:2, :] += X.max() + X.mean() / 4.\nX_outliers[2:, :] += X.min() - X.mean() / 4.\ny_outliers[:2] += y.min() - y.mean() / 4.\ny_outliers[2:] += y.max() + y.mean() / 4.\nX = np.vstack((X, X_outliers))\ny = np.concatenate((y, y_outliers))\nplt.plot(X, y, 'b.')\n\n# Fit the huber regressor over a series of epsilon values.\ncolors = ['r-', 'b-', 'y-', 'm-']\n\nx = np.linspace(X.min(), X.max(), 7)\nepsilon_values = [1, 1.5, 1.75, 1.9]\nfor k, epsilon in enumerate(epsilon_values):\n huber = HuberRegressor(alpha=0.0, epsilon=epsilon)\n huber.fit(X, y)\n coef_ = huber.coef_ * x + huber.intercept_\n plt.plot(x, coef_, colors[k], label=\"huber loss, %s\" % epsilon)\n\n# Fit a ridge regressor to compare it to huber regressor.\nridge = Ridge(alpha=0.0, random_state=0)\nridge.fit(X, y)\ncoef_ridge = ridge.coef_\ncoef_ = ridge.coef_ * x + ridge.intercept_\nplt.plot(x, coef_, 'g-', label=\"ridge regression\")\n\nplt.title(\"Comparison of HuberRegressor vs Ridge\")\nplt.xlabel(\"X\")\nplt.ylabel(\"y\")\nplt.legend(loc=0)\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/scikit-learn-docs.zip

3.96 KB
Binary file not shown.

dev/_images/binder_badge_logo.png

0 Bytes

dev/_images/iris.png

0 Bytes
52 Bytes
52 Bytes
119 Bytes

0 commit comments

Comments
 (0)