Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit f67e0d2

Browse filesBrowse files
committed
Fixed docstrings and seed tests
1 parent e8fe8e6 commit f67e0d2
Copy full SHA for f67e0d2

File tree

Expand file treeCollapse file tree

5 files changed

+12
-11
lines changed
Filter options
Expand file treeCollapse file tree

5 files changed

+12
-11
lines changed

‎doc/modules/sgd.rst

Copy file name to clipboardExpand all lines: doc/modules/sgd.rst
+1-1Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -392,7 +392,7 @@ For PA-1 use ``loss='pa1'``, and ``loss='pa2'`` for
392392
PA-2 respectively. Setting ``C`` to 0 gives the vanilla passive-
393393
aggressive algorithm.
394394

395-
..math::
395+
.. math::
396396
397397
\eta = \frac{1}{|x|^2} (PA)
398398
\eta = \text{min} (\alpha, \frac{1}{|x|^2} (PA-1)

‎examples/linear_model/plot_sgd_comparison.py

Copy file name to clipboardExpand all lines: examples/linear_model/plot_sgd_comparison.py
+2-2Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
2-
================================
2+
=============================================
33
Comparing various Stochastic Gradient Solvers
4-
================================
4+
=============================================
55
66
An example showing how the different SGDClassifiers perform
77
on the hand-written digits dataset

‎sklearn/feature_selection/tests/test_selector_mixin.py

Copy file name to clipboardExpand all lines: sklearn/feature_selection/tests/test_selector_mixin.py
+2-2Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
def test_transform_linear_model():
1717
for clf in (LogisticRegression(C=0.1),
1818
LinearSVC(C=0.01, dual=False),
19-
SGDClassifier(alpha=0.1, n_iter=10, shuffle=True, seed=0)):
19+
SGDClassifier(alpha=0.1, n_iter=10, shuffle=True, random_state=0)):
2020
for thresh in (None, ".09*mean", "1e-5 * median"):
2121
for func in (np.array, sp.csr_matrix):
2222
X = func(iris.data)
@@ -34,7 +34,7 @@ def test_transform_linear_model():
3434

3535

3636
def test_invalid_input():
37-
clf = SGDClassifier(alpha=0.1, n_iter=10, shuffle=True, seed=0)
37+
clf = SGDClassifier(alpha=0.1, n_iter=10, shuffle=True, random_state=None)
3838

3939
clf.fit(iris.data, iris.target)
4040
assert_raises(ValueError, clf.transform, iris.data, "gobbledigook")

‎sklearn/linear_model/stochastic_gradient.py

Copy file name to clipboardExpand all lines: sklearn/linear_model/stochastic_gradient.py
+6-5Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -344,10 +344,11 @@ class SGDClassifier(BaseSGD, LinearClassifierMixin, SelectorMixin):
344344
>>> clf = linear_model.SGDClassifier()
345345
>>> clf.fit(X, Y)
346346
... #doctest: +NORMALIZE_WHITESPACE
347-
SGDClassifier(alpha=0.0001, class_weight=None, epsilon=0.1, eta0=0.0,
347+
SGDClassifier(C=1.0, alpha=0.0001, class_weight=None, epsilon=0.1, eta0=0.0,
348348
fit_intercept=True, l1_ratio=0.15, learning_rate='optimal',
349349
loss='hinge', n_iter=5, n_jobs=1, penalty='l2', power_t=0.5,
350-
rho=None, random_state=0, shuffle=False, verbose=0, warm_start=False)
350+
random_state=0, rho=None, shuffle=False, verbose=0,
351+
warm_start=False)
351352
>>> print(clf.predict([[-0.8, -1]]))
352353
[1]
353354
@@ -773,10 +774,10 @@ class SGDRegressor(BaseSGD, RegressorMixin, SelectorMixin):
773774
>>> X = np.random.randn(n_samples, n_features)
774775
>>> clf = linear_model.SGDRegressor()
775776
>>> clf.fit(X, y)
776-
SGDRegressor(alpha=0.0001, epsilon=0.1, eta0=0.01, fit_intercept=True,
777+
SGDRegressor(C=1.0, alpha=0.0001, epsilon=0.1, eta0=0.01, fit_intercept=True,
777778
l1_ratio=0.15, learning_rate='invscaling', loss='squared_loss',
778-
n_iter=5, p=None, penalty='l2', power_t=0.25, rho=None, random_state=0,
779-
shuffle=False, verbose=0, warm_start=False)
779+
n_iter=5, p=None, penalty='l2', power_t=0.25, random_state=0,
780+
rho=None, shuffle=False, verbose=0, warm_start=False)
780781
781782
See also
782783
--------

‎sklearn/linear_model/tests/test_perceptron.py

Copy file name to clipboardExpand all lines: sklearn/linear_model/tests/test_perceptron.py
+1-1Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def predict(self, X):
4444

4545
def test_perceptron_accuracy():
4646
for data in (X, X_csr):
47-
clf = Perceptron(n_iter=30, shuffle=False, seed=0)
47+
clf = Perceptron(n_iter=30, shuffle=False, random_state=None)
4848
clf.fit(data, y)
4949
score = clf.score(data, y)
5050
assert_true(score >= 0.7)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.