Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit b37c1b1

Browse filesBrowse files
authored
TST reduce warnings in test_logistic.py (#25469)
1 parent 1f12941 commit b37c1b1
Copy full SHA for b37c1b1

File tree

Expand file treeCollapse file tree

1 file changed

+3
-3
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+3
-3
lines changed

‎sklearn/linear_model/tests/test_logistic.py

Copy file name to clipboardExpand all lines: sklearn/linear_model/tests/test_logistic.py
+3-3Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1380,13 +1380,13 @@ def test_elastic_net_coeffs():
13801380
C = 2.0
13811381
l1_ratio = 0.5
13821382
coeffs = list()
1383-
for penalty in ("elasticnet", "l1", "l2"):
1383+
for penalty, ratio in (("elasticnet", l1_ratio), ("l1", None), ("l2", None)):
13841384
lr = LogisticRegression(
13851385
penalty=penalty,
13861386
C=C,
13871387
solver="saga",
13881388
random_state=0,
1389-
l1_ratio=l1_ratio,
1389+
l1_ratio=ratio,
13901390
tol=1e-3,
13911391
max_iter=200,
13921392
)
@@ -1807,7 +1807,7 @@ def test_penalty_none(solver):
18071807
# non-default value.
18081808
# - Make sure setting penalty=None is equivalent to setting C=np.inf with
18091809
# l2 penalty.
1810-
X, y = make_classification(n_samples=1000, random_state=0)
1810+
X, y = make_classification(n_samples=1000, n_redundant=0, random_state=0)
18111811

18121812
msg = "Setting penalty=None will ignore the C"
18131813
lr = LogisticRegression(penalty=None, solver=solver, C=4)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.