Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit fcf4740

Browse filesBrowse files
authored
TST Add a test to check the consistency of the Ridge and ElasticNet(l1_ratio=0) solutions (#19620)
1 parent b7b510f commit fcf4740
Copy full SHA for fcf4740

File tree

2 files changed

+64
-4
lines changed
Filter options

2 files changed

+64
-4
lines changed

‎sklearn/linear_model/_cd_fast.pyx

Copy file name to clipboardExpand all lines: sklearn/linear_model/_cd_fast.pyx
+14-4Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -244,10 +244,20 @@ def enet_coordinate_descent(floating[::1] w,
244244
else:
245245
# for/else, runs if for doesn't end with a `break`
246246
with gil:
247-
warnings.warn("Objective did not converge. You might want to "
248-
"increase the number of iterations. Duality "
249-
"gap: {}, tolerance: {}".format(gap, tol),
250-
ConvergenceWarning)
247+
message = (
248+
"Objective did not converge. You might want to increase "
249+
"the number of iterations, check the scale of the "
250+
"features or consider increasing regularisation. "
251+
f"Duality gap: {gap:.3e}, tolerance: {tol:.3e}"
252+
)
253+
if alpha < np.finfo(np.float64).eps:
254+
message += (
255+
" Linear regression models with null weight for the "
256+
"l1 regularization term are more efficiently fitted "
257+
"using one of the solvers implemented in "
258+
"sklearn.linear_model.Ridge/RidgeCV instead."
259+
)
260+
warnings.warn(message, ConvergenceWarning)
251261

252262
return w, gap, tol, n_iter + 1
253263

‎sklearn/linear_model/tests/test_coordinate_descent.py

Copy file name to clipboardExpand all lines: sklearn/linear_model/tests/test_coordinate_descent.py
+50Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1419,3 +1419,53 @@ def test_enet_sample_weight_does_not_overwrite_sample_weight(check_input):
14191419
reg.fit(X, y, sample_weight=sample_weight, check_input=check_input)
14201420

14211421
assert_array_equal(sample_weight, sample_weight_1_25)
1422+
1423+
1424+
@pytest.mark.parametrize("ridge_alpha", [1e-1, 1., 1e6])
1425+
@pytest.mark.parametrize("normalize", [True, False])
1426+
def test_enet_ridge_consistency(normalize, ridge_alpha):
1427+
# Check that ElasticNet(l1_ratio=0) converges to the same solution as Ridge
1428+
# provided that the value of alpha is adapted.
1429+
#
1430+
# XXX: this test does not pass for weaker regularization (lower values of
1431+
# ridge_alpha): it could be either a problem of ElasticNet or Ridge (less
1432+
# likely) and depends on the dataset statistics: lower values for
1433+
# effective_rank are more problematic in particular.
1434+
1435+
rng = np.random.RandomState(42)
1436+
X, y = make_regression(
1437+
n_samples=100,
1438+
n_features=300,
1439+
effective_rank=100,
1440+
n_informative=50,
1441+
random_state=rng,
1442+
)
1443+
sw = rng.uniform(low=0.01, high=2, size=X.shape[0])
1444+
1445+
ridge = Ridge(
1446+
alpha=ridge_alpha,
1447+
normalize=normalize,
1448+
).fit(X, y, sample_weight=sw)
1449+
1450+
enet = ElasticNet(
1451+
alpha=ridge_alpha / sw.sum(),
1452+
normalize=normalize,
1453+
l1_ratio=0.,
1454+
max_iter=1000,
1455+
)
1456+
# Even when the ElasticNet model has actually converged, the duality gap
1457+
# convergence criterion is never met when l1_ratio is 0 and for any value
1458+
# of the `tol` parameter. The convergence message should point the user to
1459+
# Ridge instead:
1460+
expected_msg = (
1461+
r"Objective did not converge\. .* "
1462+
r"Linear regression models with null weight for the "
1463+
r"l1 regularization term are more efficiently fitted "
1464+
r"using one of the solvers implemented in "
1465+
r"sklearn\.linear_model\.Ridge/RidgeCV instead\."
1466+
)
1467+
with pytest.warns(ConvergenceWarning, match=expected_msg):
1468+
enet.fit(X, y, sample_weight=sw)
1469+
1470+
assert_allclose(ridge.coef_, enet.coef_)
1471+
assert_allclose(ridge.intercept_, enet.intercept_)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.