Skip to content

Commit 095ea07

Browse files
committed
Pushing the docs to dev/ for branch: master, commit 3c1873550317bbbc63e982cccbd0afaae9cc5a66
1 parent 13061f7 commit 095ea07

File tree

906 files changed

+2664
-2661
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

906 files changed

+2664
-2661
lines changed
145 Bytes
Binary file not shown.
142 Bytes
Binary file not shown.

dev/_downloads/plot_logistic.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
},
1616
{
1717
"source": [
18-
"\n# Logit function\n\n\nShow in the plot is how the logistic regression would, in this\nsynthetic dataset, classify values as either 0 or 1,\ni.e. class one or two, using the logit-curve.\n\n\n"
18+
"\n# Logistic function\n\n\nShown in the plot is how the logistic regression would, in this\nsynthetic dataset, classify values as either 0 or 1,\ni.e. class one or two, using the logistic curve.\n\n\n"
1919
],
2020
"cell_type": "markdown",
2121
"metadata": {}
@@ -24,7 +24,7 @@
2424
"execution_count": null,
2525
"cell_type": "code",
2626
"source": [
27-
"print(__doc__)\n\n\n# Code source: Gael Varoquaux\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn import linear_model\n\n# this is our test set, it's just a straight line with some\n# Gaussian noise\nxmin, xmax = -5, 5\nn_samples = 100\nnp.random.seed(0)\nX = np.random.normal(size=n_samples)\ny = (X > 0).astype(np.float)\nX[X > 0] *= 4\nX += .3 * np.random.normal(size=n_samples)\n\nX = X[:, np.newaxis]\n# run the classifier\nclf = linear_model.LogisticRegression(C=1e5)\nclf.fit(X, y)\n\n# and plot the result\nplt.figure(1, figsize=(4, 3))\nplt.clf()\nplt.scatter(X.ravel(), y, color='black', zorder=20)\nX_test = np.linspace(-5, 10, 300)\n\n\ndef model(x):\n return 1 / (1 + np.exp(-x))\nloss = model(X_test * clf.coef_ + clf.intercept_).ravel()\nplt.plot(X_test, loss, color='blue', linewidth=3)\n\nols = linear_model.LinearRegression()\nols.fit(X, y)\nplt.plot(X_test, ols.coef_ * X_test + ols.intercept_, linewidth=1)\nplt.axhline(.5, color='.5')\n\nplt.ylabel('y')\nplt.xlabel('X')\nplt.xticks(())\nplt.yticks(())\nplt.ylim(-.25, 1.25)\nplt.xlim(-4, 10)\n\nplt.show()"
27+
"print(__doc__)\n\n\n# Code source: Gael Varoquaux\n# License: BSD 3 clause\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom sklearn import linear_model\n\n# this is our test set, it's just a straight line with some\n# Gaussian noise\nxmin, xmax = -5, 5\nn_samples = 100\nnp.random.seed(0)\nX = np.random.normal(size=n_samples)\ny = (X > 0).astype(np.float)\nX[X > 0] *= 4\nX += .3 * np.random.normal(size=n_samples)\n\nX = X[:, np.newaxis]\n# run the classifier\nclf = linear_model.LogisticRegression(C=1e5)\nclf.fit(X, y)\n\n# and plot the result\nplt.figure(1, figsize=(4, 3))\nplt.clf()\nplt.scatter(X.ravel(), y, color='black', zorder=20)\nX_test = np.linspace(-5, 10, 300)\n\n\ndef model(x):\n return 1 / (1 + np.exp(-x))\nloss = model(X_test * clf.coef_ + clf.intercept_).ravel()\nplt.plot(X_test, loss, color='red', linewidth=3)\n\nols = linear_model.LinearRegression()\nols.fit(X, y)\nplt.plot(X_test, ols.coef_ * X_test + ols.intercept_, linewidth=1)\nplt.axhline(.5, color='.5')\n\nplt.ylabel('y')\nplt.xlabel('X')\nplt.xticks(range(-5, 10))\nplt.yticks([0, 0.5, 1])\nplt.ylim(-.25, 1.25)\nplt.xlim(-4, 10)\nplt.legend(('Logistic Regression Model', 'Linear Regression Model'),\n loc=\"lower right\", fontsize='small')\nplt.show()"
2828
],
2929
"outputs": [],
3030
"metadata": {

dev/_downloads/plot_logistic.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,12 @@
44

55
"""
66
=========================================================
7-
Logit function
7+
Logistic function
88
=========================================================
99
10-
Show in the plot is how the logistic regression would, in this
10+
Shown in the plot is how the logistic regression would, in this
1111
synthetic dataset, classify values as either 0 or 1,
12-
i.e. class one or two, using the logit-curve.
12+
i.e. class one or two, using the logistic curve.
1313
1414
"""
1515
print(__doc__)
@@ -48,7 +48,7 @@
4848
def model(x):
4949
return 1 / (1 + np.exp(-x))
5050
loss = model(X_test * clf.coef_ + clf.intercept_).ravel()
51-
plt.plot(X_test, loss, color='blue', linewidth=3)
51+
plt.plot(X_test, loss, color='red', linewidth=3)
5252

5353
ols = linear_model.LinearRegression()
5454
ols.fit(X, y)
@@ -57,9 +57,10 @@ def model(x):
5757

5858
plt.ylabel('y')
5959
plt.xlabel('X')
60-
plt.xticks(())
61-
plt.yticks(())
60+
plt.xticks(range(-5, 10))
61+
plt.yticks([0, 0.5, 1])
6262
plt.ylim(-.25, 1.25)
6363
plt.xlim(-4, 10)
64-
64+
plt.legend(('Logistic Regression Model', 'Linear Regression Model'),
65+
loc="lower right", fontsize='small')
6566
plt.show()

0 commit comments

Comments
 (0)