Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

MAINT Clean deprecation of normalize in calibration_curve for 1.3 #25833

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 0 additions & 25 deletions 25 sklearn/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -908,7 +908,6 @@ def calibration_curve(
y_prob,
*,
pos_label=None,
normalize="deprecated",
n_bins=5,
strategy="uniform",
):
Expand All @@ -934,17 +933,6 @@ def calibration_curve(

.. versionadded:: 1.1

normalize : bool, default="deprecated"
Whether y_prob needs to be normalized into the [0, 1] interval, i.e.
is not a proper probability. If True, the smallest value in y_prob
is linearly mapped onto 0 and the largest one onto 1.

.. deprecated:: 1.1
The normalize argument is deprecated in v1.1 and will be removed in v1.3.
Explicitly normalizing `y_prob` will reproduce this behavior, but it is
recommended that a proper probability is used (i.e. a classifier's
`predict_proba` positive class).

n_bins : int, default=5
Number of bins to discretize the [0, 1] interval. A bigger number
requires more data. Bins with no samples (i.e. without
Expand Down Expand Up @@ -992,19 +980,6 @@ def calibration_curve(
check_consistent_length(y_true, y_prob)
pos_label = _check_pos_label_consistency(pos_label, y_true)

# TODO(1.3): Remove normalize conditional block.
if normalize != "deprecated":
warnings.warn(
"The normalize argument is deprecated in v1.1 and will be removed in v1.3."
" Explicitly normalizing y_prob will reproduce this behavior, but it is"
" recommended that a proper probability is used (i.e. a classifier's"
" `predict_proba` positive class or `decision_function` output calibrated"
" with `CalibratedClassifierCV`).",
FutureWarning,
)
if normalize: # Normalize predicted values into interval [0, 1]
y_prob = (y_prob - y_prob.min()) / (y_prob.max() - y_prob.min())

if y_prob.min() < 0 or y_prob.max() > 1:
raise ValueError("y_prob has values outside [0, 1].")

Expand Down
20 changes: 0 additions & 20 deletions 20 sklearn/tests/test_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,26 +401,6 @@ def test_calibration_curve():
calibration_curve(y_true2, y_pred2, strategy="percentile")


# TODO(1.3): Remove this test.
def test_calibration_curve_with_unnormalized_proba():
"""Tests the `normalize` parameter of `calibration_curve`"""
y_true = np.array([0, 0, 0, 1, 1, 1])
y_pred = np.array([0.0, 0.1, 0.2, 0.8, 0.9, 1.0])

# Ensure `normalize` == False raises a FutureWarning.
with pytest.warns(FutureWarning):
calibration_curve(y_true, y_pred, n_bins=2, normalize=False)

# Ensure `normalize` == True raises a FutureWarning and behaves as expected.
with pytest.warns(FutureWarning):
prob_true_unnormalized, prob_pred_unnormalized = calibration_curve(
y_true, y_pred * 2, n_bins=2, normalize=True
)
prob_true, prob_pred = calibration_curve(y_true, y_pred, n_bins=2)
assert_almost_equal(prob_true, prob_true_unnormalized)
assert_almost_equal(prob_pred, prob_pred_unnormalized)


@pytest.mark.parametrize("ensemble", [True, False])
def test_calibration_nan_imputer(ensemble):
"""Test that calibration can accept nan"""
Expand Down
Morty Proxy This is a proxified and sanitized view of the page, visit original site.