diff --git a/sklearn/calibration.py b/sklearn/calibration.py index 7db64a89468d5..d54aa158f5328 100644 --- a/sklearn/calibration.py +++ b/sklearn/calibration.py @@ -908,7 +908,6 @@ def calibration_curve( y_prob, *, pos_label=None, - normalize="deprecated", n_bins=5, strategy="uniform", ): @@ -934,17 +933,6 @@ def calibration_curve( .. versionadded:: 1.1 - normalize : bool, default="deprecated" - Whether y_prob needs to be normalized into the [0, 1] interval, i.e. - is not a proper probability. If True, the smallest value in y_prob - is linearly mapped onto 0 and the largest one onto 1. - - .. deprecated:: 1.1 - The normalize argument is deprecated in v1.1 and will be removed in v1.3. - Explicitly normalizing `y_prob` will reproduce this behavior, but it is - recommended that a proper probability is used (i.e. a classifier's - `predict_proba` positive class). - n_bins : int, default=5 Number of bins to discretize the [0, 1] interval. A bigger number requires more data. Bins with no samples (i.e. without @@ -992,19 +980,6 @@ def calibration_curve( check_consistent_length(y_true, y_prob) pos_label = _check_pos_label_consistency(pos_label, y_true) - # TODO(1.3): Remove normalize conditional block. - if normalize != "deprecated": - warnings.warn( - "The normalize argument is deprecated in v1.1 and will be removed in v1.3." - " Explicitly normalizing y_prob will reproduce this behavior, but it is" - " recommended that a proper probability is used (i.e. a classifier's" - " `predict_proba` positive class or `decision_function` output calibrated" - " with `CalibratedClassifierCV`).", - FutureWarning, - ) - if normalize: # Normalize predicted values into interval [0, 1] - y_prob = (y_prob - y_prob.min()) / (y_prob.max() - y_prob.min()) - if y_prob.min() < 0 or y_prob.max() > 1: raise ValueError("y_prob has values outside [0, 1].") diff --git a/sklearn/tests/test_calibration.py b/sklearn/tests/test_calibration.py index 72662ae221afa..01bdbd6566042 100644 --- a/sklearn/tests/test_calibration.py +++ b/sklearn/tests/test_calibration.py @@ -401,26 +401,6 @@ def test_calibration_curve(): calibration_curve(y_true2, y_pred2, strategy="percentile") -# TODO(1.3): Remove this test. -def test_calibration_curve_with_unnormalized_proba(): - """Tests the `normalize` parameter of `calibration_curve`""" - y_true = np.array([0, 0, 0, 1, 1, 1]) - y_pred = np.array([0.0, 0.1, 0.2, 0.8, 0.9, 1.0]) - - # Ensure `normalize` == False raises a FutureWarning. - with pytest.warns(FutureWarning): - calibration_curve(y_true, y_pred, n_bins=2, normalize=False) - - # Ensure `normalize` == True raises a FutureWarning and behaves as expected. - with pytest.warns(FutureWarning): - prob_true_unnormalized, prob_pred_unnormalized = calibration_curve( - y_true, y_pred * 2, n_bins=2, normalize=True - ) - prob_true, prob_pred = calibration_curve(y_true, y_pred, n_bins=2) - assert_almost_equal(prob_true, prob_true_unnormalized) - assert_almost_equal(prob_pred, prob_pred_unnormalized) - - @pytest.mark.parametrize("ensemble", [True, False]) def test_calibration_nan_imputer(ensemble): """Test that calibration can accept nan"""