Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 071ddc7

Browse filesBrowse files
authored
Removed assert_warns_message from gaussian_process/tests (#19697)
1 parent b9d6db8 commit 071ddc7
Copy full SHA for 071ddc7

File tree

Expand file treeCollapse file tree

3 files changed

+36
-31
lines changed
Filter options
Expand file treeCollapse file tree

3 files changed

+36
-31
lines changed

‎sklearn/gaussian_process/tests/test_gpc.py

Copy file name to clipboardExpand all lines: sklearn/gaussian_process/tests/test_gpc.py
+9-9Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from sklearn.exceptions import ConvergenceWarning
1818

1919
from sklearn.utils._testing \
20-
import assert_almost_equal, assert_array_equal, assert_warns_message
20+
import assert_almost_equal, assert_array_equal
2121

2222

2323
def f(x):
@@ -189,14 +189,14 @@ def test_multi_class_n_jobs(kernel):
189189
def test_warning_bounds():
190190
kernel = RBF(length_scale_bounds=[1e-5, 1e-3])
191191
gpc = GaussianProcessClassifier(kernel=kernel)
192-
assert_warns_message(ConvergenceWarning, "The optimal value found for "
193-
"dimension 0 of parameter "
194-
"length_scale is close to "
195-
"the specified upper bound "
196-
"0.001. Increasing the bound "
197-
"and calling fit again may "
198-
"find a better value.",
199-
gpc.fit, X, y)
192+
warning_message = (
193+
"The optimal value found for dimension 0 of parameter "
194+
"length_scale is close to the specified upper bound "
195+
"0.001. Increasing the bound and calling fit again may "
196+
"find a better value."
197+
)
198+
with pytest.warns(ConvergenceWarning, match=warning_message):
199+
gpc.fit(X, y)
200200

201201
kernel_sum = (WhiteKernel(noise_level_bounds=[1e-5, 1e-3]) +
202202
RBF(length_scale_bounds=[1e3, 1e5]))

‎sklearn/gaussian_process/tests/test_gpr.py

Copy file name to clipboardExpand all lines: sklearn/gaussian_process/tests/test_gpr.py
+20-17Lines changed: 20 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
# License: BSD 3 clause
66

77
import sys
8+
import re
89
import numpy as np
910
import warnings
1011

@@ -21,9 +22,8 @@
2122

2223
from sklearn.utils._testing \
2324
import (assert_array_less,
24-
assert_almost_equal, assert_raise_message,
25-
assert_array_almost_equal, assert_array_equal,
26-
assert_allclose, assert_warns_message)
25+
assert_almost_equal, assert_array_almost_equal,
26+
assert_array_equal, assert_allclose)
2727

2828

2929
def f(x):
@@ -404,12 +404,15 @@ def test_gpr_correct_error_message():
404404
y = np.ones(6)
405405
kernel = DotProduct()
406406
gpr = GaussianProcessRegressor(kernel=kernel, alpha=0.0)
407-
assert_raise_message(np.linalg.LinAlgError,
408-
"The kernel, %s, is not returning a "
409-
"positive definite matrix. Try gradually increasing "
410-
"the 'alpha' parameter of your "
411-
"GaussianProcessRegressor estimator."
412-
% kernel, gpr.fit, X, y)
407+
message = (
408+
"The kernel, %s, is not returning a "
409+
"positive definite matrix. Try gradually increasing "
410+
"the 'alpha' parameter of your "
411+
"GaussianProcessRegressor estimator."
412+
% kernel
413+
)
414+
with pytest.raises(np.linalg.LinAlgError, match=re.escape(message)):
415+
gpr.fit(X, y)
413416

414417

415418
@pytest.mark.parametrize('kernel', kernels)
@@ -474,14 +477,14 @@ def test_K_inv_reset(kernel):
474477
def test_warning_bounds():
475478
kernel = RBF(length_scale_bounds=[1e-5, 1e-3])
476479
gpr = GaussianProcessRegressor(kernel=kernel)
477-
assert_warns_message(ConvergenceWarning, "The optimal value found for "
478-
"dimension 0 of parameter "
479-
"length_scale is close to "
480-
"the specified upper bound "
481-
"0.001. Increasing the bound "
482-
"and calling fit again may "
483-
"find a better value.",
484-
gpr.fit, X, y)
480+
warning_message = (
481+
"The optimal value found for dimension 0 of parameter "
482+
"length_scale is close to the specified upper bound "
483+
"0.001. Increasing the bound and calling fit again may "
484+
"find a better value."
485+
)
486+
with pytest.warns(ConvergenceWarning, match=warning_message):
487+
gpr.fit(X, y)
485488

486489
kernel_sum = (WhiteKernel(noise_level_bounds=[1e-5, 1e-3]) +
487490
RBF(length_scale_bounds=[1e3, 1e5]))

‎sklearn/gaussian_process/tests/test_kernels.py

Copy file name to clipboardExpand all lines: sklearn/gaussian_process/tests/test_kernels.py
+7-5Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
from sklearn.utils._testing import (assert_almost_equal, assert_array_equal,
2121
assert_array_almost_equal,
2222
assert_allclose,
23-
assert_raise_message,
2423
fails_if_pypy)
2524

2625

@@ -361,7 +360,10 @@ def test_repr_kernels(kernel):
361360

362361
def test_rational_quadratic_kernel():
363362
kernel = RationalQuadratic(length_scale=[1., 1.])
364-
assert_raise_message(AttributeError,
365-
"RationalQuadratic kernel only supports isotropic "
366-
"version, please use a single "
367-
"scalar for length_scale", kernel, X)
363+
message = (
364+
"RationalQuadratic kernel only supports isotropic "
365+
"version, please use a single "
366+
"scalar for length_scale"
367+
)
368+
with pytest.raises(AttributeError, match=message):
369+
kernel(X)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.