Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions 3 sklearn/linear_model/_logistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1168,6 +1168,9 @@ def fit(self, X, y, sample_weight=None):
"(penalty={})".format(self.penalty)
)

if self.penalty == "elasticnet" and self.l1_ratio is None:
raise ValueError("l1_ratio must be specified when penalty is elasticnet.")

# TODO(1.4): Remove "none" option
if self.penalty == "none":
warnings.warn(
Expand Down
9 changes: 9 additions & 0 deletions 9 sklearn/linear_model/tests/test_logistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,15 @@ def test_check_solver_option(LR):
lr.fit(X, y)


@pytest.mark.parametrize("LR", [LogisticRegression, LogisticRegressionCV])
def test_elasticnet_l1_ratio_err_helpful(LR):
# Check that an informative error message is raised when penalty="elasticnet"
# but l1_ratio is not specified.
model = LR(penalty="elasticnet", solver="saga")
with pytest.raises(ValueError, match=r".*l1_ratio.*"):
model.fit(np.array([[1, 2], [3, 4]]), np.array([0, 1]))


@pytest.mark.parametrize("solver", ["lbfgs", "newton-cg", "sag", "saga"])
def test_multinomial_binary(solver):
# Test multinomial LR on a binary problem.
Expand Down
Morty Proxy This is a proxified and sanitized view of the page, visit original site.