Skip to content

Commit 9434091

Browse files
committed
Pushing the docs to dev/ for branch: main, commit d4aad64b1eb2e42e76f49db2ccfbe4b4660d092b
1 parent 5112ef5 commit 9434091

File tree

1,221 files changed

+4319
-4319
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,221 files changed

+4319
-4319
lines changed
Binary file not shown.

dev/_downloads/1f682002d8b68c290d9d03599368e83d/plot_lasso_coordinate_descent_path.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@
7777

7878
plt.figure(3)
7979
neg_log_alphas_positive_enet = -np.log10(alphas_positive_enet)
80-
for (coef_e, coef_pe, c) in zip(coefs_enet, coefs_positive_enet, colors):
80+
for coef_e, coef_pe, c in zip(coefs_enet, coefs_positive_enet, colors):
8181
l1 = plt.plot(neg_log_alphas_enet, coef_e, c=c)
8282
l2 = plt.plot(neg_log_alphas_positive_enet, coef_pe, linestyle="--", c=c)
8383

dev/_downloads/28df2114703b91f224d70205e9b75a7d/plot_concentration_prior.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Thierry Guillemot <[email protected]>\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\n\nfrom sklearn.mixture import BayesianGaussianMixture\n\n\ndef plot_ellipses(ax, weights, means, covars):\n for n in range(means.shape[0]):\n eig_vals, eig_vecs = np.linalg.eigh(covars[n])\n unit_eig_vec = eig_vecs[0] / np.linalg.norm(eig_vecs[0])\n angle = np.arctan2(unit_eig_vec[1], unit_eig_vec[0])\n # Ellipse needs degrees\n angle = 180 * angle / np.pi\n # eigenvector normalization\n eig_vals = 2 * np.sqrt(2) * np.sqrt(eig_vals)\n ell = mpl.patches.Ellipse(\n means[n], eig_vals[0], eig_vals[1], 180 + angle, edgecolor=\"black\"\n )\n ell.set_clip_box(ax.bbox)\n ell.set_alpha(weights[n])\n ell.set_facecolor(\"#56B4E9\")\n ax.add_artist(ell)\n\n\ndef plot_results(ax1, ax2, estimator, X, y, title, plot_title=False):\n ax1.set_title(title)\n ax1.scatter(X[:, 0], X[:, 1], s=5, marker=\"o\", color=colors[y], alpha=0.8)\n ax1.set_xlim(-2.0, 2.0)\n ax1.set_ylim(-3.0, 3.0)\n ax1.set_xticks(())\n ax1.set_yticks(())\n plot_ellipses(ax1, estimator.weights_, estimator.means_, estimator.covariances_)\n\n ax2.get_xaxis().set_tick_params(direction=\"out\")\n ax2.yaxis.grid(True, alpha=0.7)\n for k, w in enumerate(estimator.weights_):\n ax2.bar(\n k,\n w,\n width=0.9,\n color=\"#56B4E9\",\n zorder=3,\n align=\"center\",\n edgecolor=\"black\",\n )\n ax2.text(k, w + 0.007, \"%.1f%%\" % (w * 100.0), horizontalalignment=\"center\")\n ax2.set_xlim(-0.6, 2 * n_components - 0.4)\n ax2.set_ylim(0.0, 1.1)\n ax2.tick_params(axis=\"y\", which=\"both\", left=False, right=False, labelleft=False)\n ax2.tick_params(axis=\"x\", which=\"both\", top=False)\n\n if plot_title:\n ax1.set_ylabel(\"Estimated Mixtures\")\n ax2.set_ylabel(\"Weight of each component\")\n\n\n# Parameters of the dataset\nrandom_state, n_components, n_features = 2, 3, 2\ncolors = np.array([\"#0072B2\", \"#F0E442\", \"#D55E00\"])\n\ncovars = np.array(\n [[[0.7, 0.0], [0.0, 0.1]], [[0.5, 0.0], [0.0, 0.1]], [[0.5, 0.0], [0.0, 0.1]]]\n)\nsamples = np.array([200, 500, 200])\nmeans = np.array([[0.0, -0.70], [0.0, 0.0], [0.0, 0.70]])\n\n# mean_precision_prior= 0.8 to minimize the influence of the prior\nestimators = [\n (\n \"Finite mixture with a Dirichlet distribution\\nprior and \" r\"$\\gamma_0=$\",\n BayesianGaussianMixture(\n weight_concentration_prior_type=\"dirichlet_distribution\",\n n_components=2 * n_components,\n reg_covar=0,\n init_params=\"random\",\n max_iter=1500,\n mean_precision_prior=0.8,\n random_state=random_state,\n ),\n [0.001, 1, 1000],\n ),\n (\n \"Infinite mixture with a Dirichlet process\\n prior and\" r\"$\\gamma_0=$\",\n BayesianGaussianMixture(\n weight_concentration_prior_type=\"dirichlet_process\",\n n_components=2 * n_components,\n reg_covar=0,\n init_params=\"random\",\n max_iter=1500,\n mean_precision_prior=0.8,\n random_state=random_state,\n ),\n [1, 1000, 100000],\n ),\n]\n\n# Generate data\nrng = np.random.RandomState(random_state)\nX = np.vstack(\n [\n rng.multivariate_normal(means[j], covars[j], samples[j])\n for j in range(n_components)\n ]\n)\ny = np.concatenate([np.full(samples[j], j, dtype=int) for j in range(n_components)])\n\n# Plot results in two different figures\nfor (title, estimator, concentrations_prior) in estimators:\n plt.figure(figsize=(4.7 * 3, 8))\n plt.subplots_adjust(\n bottom=0.04, top=0.90, hspace=0.05, wspace=0.05, left=0.03, right=0.99\n )\n\n gs = gridspec.GridSpec(3, len(concentrations_prior))\n for k, concentration in enumerate(concentrations_prior):\n estimator.weight_concentration_prior = concentration\n estimator.fit(X)\n plot_results(\n plt.subplot(gs[0:2, k]),\n plt.subplot(gs[2, k]),\n estimator,\n X,\n y,\n r\"%s$%.1e$\" % (title, concentration),\n plot_title=k == 0,\n )\n\nplt.show()"
29+
"# Author: Thierry Guillemot <[email protected]>\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib as mpl\nimport matplotlib.pyplot as plt\nimport matplotlib.gridspec as gridspec\n\nfrom sklearn.mixture import BayesianGaussianMixture\n\n\ndef plot_ellipses(ax, weights, means, covars):\n for n in range(means.shape[0]):\n eig_vals, eig_vecs = np.linalg.eigh(covars[n])\n unit_eig_vec = eig_vecs[0] / np.linalg.norm(eig_vecs[0])\n angle = np.arctan2(unit_eig_vec[1], unit_eig_vec[0])\n # Ellipse needs degrees\n angle = 180 * angle / np.pi\n # eigenvector normalization\n eig_vals = 2 * np.sqrt(2) * np.sqrt(eig_vals)\n ell = mpl.patches.Ellipse(\n means[n], eig_vals[0], eig_vals[1], 180 + angle, edgecolor=\"black\"\n )\n ell.set_clip_box(ax.bbox)\n ell.set_alpha(weights[n])\n ell.set_facecolor(\"#56B4E9\")\n ax.add_artist(ell)\n\n\ndef plot_results(ax1, ax2, estimator, X, y, title, plot_title=False):\n ax1.set_title(title)\n ax1.scatter(X[:, 0], X[:, 1], s=5, marker=\"o\", color=colors[y], alpha=0.8)\n ax1.set_xlim(-2.0, 2.0)\n ax1.set_ylim(-3.0, 3.0)\n ax1.set_xticks(())\n ax1.set_yticks(())\n plot_ellipses(ax1, estimator.weights_, estimator.means_, estimator.covariances_)\n\n ax2.get_xaxis().set_tick_params(direction=\"out\")\n ax2.yaxis.grid(True, alpha=0.7)\n for k, w in enumerate(estimator.weights_):\n ax2.bar(\n k,\n w,\n width=0.9,\n color=\"#56B4E9\",\n zorder=3,\n align=\"center\",\n edgecolor=\"black\",\n )\n ax2.text(k, w + 0.007, \"%.1f%%\" % (w * 100.0), horizontalalignment=\"center\")\n ax2.set_xlim(-0.6, 2 * n_components - 0.4)\n ax2.set_ylim(0.0, 1.1)\n ax2.tick_params(axis=\"y\", which=\"both\", left=False, right=False, labelleft=False)\n ax2.tick_params(axis=\"x\", which=\"both\", top=False)\n\n if plot_title:\n ax1.set_ylabel(\"Estimated Mixtures\")\n ax2.set_ylabel(\"Weight of each component\")\n\n\n# Parameters of the dataset\nrandom_state, n_components, n_features = 2, 3, 2\ncolors = np.array([\"#0072B2\", \"#F0E442\", \"#D55E00\"])\n\ncovars = np.array(\n [[[0.7, 0.0], [0.0, 0.1]], [[0.5, 0.0], [0.0, 0.1]], [[0.5, 0.0], [0.0, 0.1]]]\n)\nsamples = np.array([200, 500, 200])\nmeans = np.array([[0.0, -0.70], [0.0, 0.0], [0.0, 0.70]])\n\n# mean_precision_prior= 0.8 to minimize the influence of the prior\nestimators = [\n (\n \"Finite mixture with a Dirichlet distribution\\nprior and \" r\"$\\gamma_0=$\",\n BayesianGaussianMixture(\n weight_concentration_prior_type=\"dirichlet_distribution\",\n n_components=2 * n_components,\n reg_covar=0,\n init_params=\"random\",\n max_iter=1500,\n mean_precision_prior=0.8,\n random_state=random_state,\n ),\n [0.001, 1, 1000],\n ),\n (\n \"Infinite mixture with a Dirichlet process\\n prior and\" r\"$\\gamma_0=$\",\n BayesianGaussianMixture(\n weight_concentration_prior_type=\"dirichlet_process\",\n n_components=2 * n_components,\n reg_covar=0,\n init_params=\"random\",\n max_iter=1500,\n mean_precision_prior=0.8,\n random_state=random_state,\n ),\n [1, 1000, 100000],\n ),\n]\n\n# Generate data\nrng = np.random.RandomState(random_state)\nX = np.vstack(\n [\n rng.multivariate_normal(means[j], covars[j], samples[j])\n for j in range(n_components)\n ]\n)\ny = np.concatenate([np.full(samples[j], j, dtype=int) for j in range(n_components)])\n\n# Plot results in two different figures\nfor title, estimator, concentrations_prior in estimators:\n plt.figure(figsize=(4.7 * 3, 8))\n plt.subplots_adjust(\n bottom=0.04, top=0.90, hspace=0.05, wspace=0.05, left=0.03, right=0.99\n )\n\n gs = gridspec.GridSpec(3, len(concentrations_prior))\n for k, concentration in enumerate(concentrations_prior):\n estimator.weight_concentration_prior = concentration\n estimator.fit(X)\n plot_results(\n plt.subplot(gs[0:2, k]),\n plt.subplot(gs[2, k]),\n estimator,\n X,\n y,\n r\"%s$%.1e$\" % (title, concentration),\n plot_title=k == 0,\n )\n\nplt.show()"
3030
]
3131
}
3232
],

dev/_downloads/4c4c075dc14e39d30d982d0b2818ea95/plot_lasso_coordinate_descent_path.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"# Author: Alexandre Gramfort <[email protected]>\n# License: BSD 3 clause\n\nfrom itertools import cycle\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.linear_model import lasso_path, enet_path\nfrom sklearn import datasets\n\n\nX, y = datasets.load_diabetes(return_X_y=True)\n\n\nX /= X.std(axis=0) # Standardize data (easier to set the l1_ratio parameter)\n\n# Compute paths\n\neps = 5e-3 # the smaller it is the longer is the path\n\nprint(\"Computing regularization path using the lasso...\")\nalphas_lasso, coefs_lasso, _ = lasso_path(X, y, eps=eps)\n\nprint(\"Computing regularization path using the positive lasso...\")\nalphas_positive_lasso, coefs_positive_lasso, _ = lasso_path(\n X, y, eps=eps, positive=True\n)\nprint(\"Computing regularization path using the elastic net...\")\nalphas_enet, coefs_enet, _ = enet_path(X, y, eps=eps, l1_ratio=0.8)\n\nprint(\"Computing regularization path using the positive elastic net...\")\nalphas_positive_enet, coefs_positive_enet, _ = enet_path(\n X, y, eps=eps, l1_ratio=0.8, positive=True\n)\n\n# Display results\n\nplt.figure(1)\ncolors = cycle([\"b\", \"r\", \"g\", \"c\", \"k\"])\nneg_log_alphas_lasso = -np.log10(alphas_lasso)\nneg_log_alphas_enet = -np.log10(alphas_enet)\nfor coef_l, coef_e, c in zip(coefs_lasso, coefs_enet, colors):\n l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c)\n l2 = plt.plot(neg_log_alphas_enet, coef_e, linestyle=\"--\", c=c)\n\nplt.xlabel(\"-Log(alpha)\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and Elastic-Net Paths\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"Elastic-Net\"), loc=\"lower left\")\nplt.axis(\"tight\")\n\n\nplt.figure(2)\nneg_log_alphas_positive_lasso = -np.log10(alphas_positive_lasso)\nfor coef_l, coef_pl, c in zip(coefs_lasso, coefs_positive_lasso, colors):\n l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c)\n l2 = plt.plot(neg_log_alphas_positive_lasso, coef_pl, linestyle=\"--\", c=c)\n\nplt.xlabel(\"-Log(alpha)\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and positive Lasso\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"positive Lasso\"), loc=\"lower left\")\nplt.axis(\"tight\")\n\n\nplt.figure(3)\nneg_log_alphas_positive_enet = -np.log10(alphas_positive_enet)\nfor (coef_e, coef_pe, c) in zip(coefs_enet, coefs_positive_enet, colors):\n l1 = plt.plot(neg_log_alphas_enet, coef_e, c=c)\n l2 = plt.plot(neg_log_alphas_positive_enet, coef_pe, linestyle=\"--\", c=c)\n\nplt.xlabel(\"-Log(alpha)\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Elastic-Net and positive Elastic-Net\")\nplt.legend((l1[-1], l2[-1]), (\"Elastic-Net\", \"positive Elastic-Net\"), loc=\"lower left\")\nplt.axis(\"tight\")\nplt.show()"
29+
"# Author: Alexandre Gramfort <[email protected]>\n# License: BSD 3 clause\n\nfrom itertools import cycle\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn.linear_model import lasso_path, enet_path\nfrom sklearn import datasets\n\n\nX, y = datasets.load_diabetes(return_X_y=True)\n\n\nX /= X.std(axis=0) # Standardize data (easier to set the l1_ratio parameter)\n\n# Compute paths\n\neps = 5e-3 # the smaller it is the longer is the path\n\nprint(\"Computing regularization path using the lasso...\")\nalphas_lasso, coefs_lasso, _ = lasso_path(X, y, eps=eps)\n\nprint(\"Computing regularization path using the positive lasso...\")\nalphas_positive_lasso, coefs_positive_lasso, _ = lasso_path(\n X, y, eps=eps, positive=True\n)\nprint(\"Computing regularization path using the elastic net...\")\nalphas_enet, coefs_enet, _ = enet_path(X, y, eps=eps, l1_ratio=0.8)\n\nprint(\"Computing regularization path using the positive elastic net...\")\nalphas_positive_enet, coefs_positive_enet, _ = enet_path(\n X, y, eps=eps, l1_ratio=0.8, positive=True\n)\n\n# Display results\n\nplt.figure(1)\ncolors = cycle([\"b\", \"r\", \"g\", \"c\", \"k\"])\nneg_log_alphas_lasso = -np.log10(alphas_lasso)\nneg_log_alphas_enet = -np.log10(alphas_enet)\nfor coef_l, coef_e, c in zip(coefs_lasso, coefs_enet, colors):\n l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c)\n l2 = plt.plot(neg_log_alphas_enet, coef_e, linestyle=\"--\", c=c)\n\nplt.xlabel(\"-Log(alpha)\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and Elastic-Net Paths\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"Elastic-Net\"), loc=\"lower left\")\nplt.axis(\"tight\")\n\n\nplt.figure(2)\nneg_log_alphas_positive_lasso = -np.log10(alphas_positive_lasso)\nfor coef_l, coef_pl, c in zip(coefs_lasso, coefs_positive_lasso, colors):\n l1 = plt.plot(neg_log_alphas_lasso, coef_l, c=c)\n l2 = plt.plot(neg_log_alphas_positive_lasso, coef_pl, linestyle=\"--\", c=c)\n\nplt.xlabel(\"-Log(alpha)\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Lasso and positive Lasso\")\nplt.legend((l1[-1], l2[-1]), (\"Lasso\", \"positive Lasso\"), loc=\"lower left\")\nplt.axis(\"tight\")\n\n\nplt.figure(3)\nneg_log_alphas_positive_enet = -np.log10(alphas_positive_enet)\nfor coef_e, coef_pe, c in zip(coefs_enet, coefs_positive_enet, colors):\n l1 = plt.plot(neg_log_alphas_enet, coef_e, c=c)\n l2 = plt.plot(neg_log_alphas_positive_enet, coef_pe, linestyle=\"--\", c=c)\n\nplt.xlabel(\"-Log(alpha)\")\nplt.ylabel(\"coefficients\")\nplt.title(\"Elastic-Net and positive Elastic-Net\")\nplt.legend((l1[-1], l2[-1]), (\"Elastic-Net\", \"positive Elastic-Net\"), loc=\"lower left\")\nplt.axis(\"tight\")\nplt.show()"
3030
]
3131
}
3232
],
Binary file not shown.

0 commit comments

Comments
 (0)