Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit d538225

Browse filesBrowse files
Ali TBERtber16_atl
Ali TBER
authored and
tber16_atl
committed
Another syntax correction
1 parent 6e344a2 commit d538225
Copy full SHA for d538225

File tree

2 files changed

+10
-9
lines changed
Filter options

2 files changed

+10
-9
lines changed

‎sklearn/metrics/_ranking.py

Copy file name to clipboardExpand all lines: sklearn/metrics/_ranking.py
+4-4Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1005,13 +1005,13 @@ def roc_curve(
10051005
tpr = tps / tps[-1]
10061006

10071007
return fpr, tpr, thresholds
1008-
1008+
10091009

10101010
@_deprecate_positional_args
10111011
def cumulative_gain_curve(y_true, y_score, pos_label=None):
10121012
"""Compute Cumulative Gain for each ten percent of the sample
10131013
Note: This implementation is restricted to the binary classification task.
1014-
1014+
10151015
Parameters
10161016
----------
10171017
@@ -1035,10 +1035,10 @@ def cumulative_gain_curve(y_true, y_score, pos_label=None):
10351035
Examples
10361036
--------
10371037
>>> import numpy as np
1038-
>>> from sklearn import metrics
1038+
>>> from sklearn.metrics import cumulative_gain_curve
10391039
>>> y_true = [0, 1, 1, 0, 0, 0, 1, 1, 0, 0]
10401040
>>> y_pred = [0.1, 0.8, 0.9, 0,3, 0.4, 0.6, 0.6, 0.6, 0.44]
1041-
>>> percentages, gains = metrics.cumulative_gain_curve(y_true, y_pred, pos_label=1)
1041+
>>> percentages, gains = cumulative_gain_curve(y_true, y_pred, pos_label=1)
10421042
>>> percentages
10431043
array([0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ])
10441044
>>> gains

‎sklearn/metrics/ranking.py

Copy file name to clipboardExpand all lines: sklearn/metrics/ranking.py
+6-5Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -787,7 +787,7 @@ def roc_curve(y_true, y_score, pos_label=None, sample_weight=None,
787787
def cumulative_gain_curve(y_true, y_score, pos_label=None):
788788
"""Compute Cumulative Gain for each ten percent of the sample
789789
Note: This implementation is restricted to the binary classification task.
790-
790+
791791
Parameters
792792
----------
793793
@@ -811,10 +811,10 @@ def cumulative_gain_curve(y_true, y_score, pos_label=None):
811811
Examples
812812
--------
813813
>>> import numpy as np
814-
>>> from sklearn import metrics
814+
>>> from sklearn.metrics import cumulative_gain_curve
815815
>>> y_true = [0, 1, 1, 0, 0, 0, 1, 1, 0, 0]
816816
>>> y_pred = [0.1, 0.8, 0.9, 0,3, 0.4, 0.6, 0.6, 0.6, 0.44]
817-
>>> percentages, gains = metrics.cumulative_gain_curve(y_true, y_pred, pos_label=1)
817+
>>> percentages, gains = cumulative_gain_curve(y_true, y_pred, pos_label=1)
818818
>>> percentages
819819
array([0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1. ])
820820
>>> gains
@@ -1061,8 +1061,9 @@ def label_ranking_loss(y_true, y_score, sample_weight=None):
10611061
true_at_reversed_rank = np.bincount(
10621062
unique_inverse[y_true.indices[start:stop]],
10631063
minlength=len(unique_scores))
1064-
all_at_reversed_rank = np.bincount(unique_inverse,
1065-
minlength=len(unique_scores))
1064+
all_at_reversed_rank = np.bincount(
1065+
unique_inverse,
1066+
minlength=len(unique_scores))
10661067
false_at_reversed_rank = all_at_reversed_rank - true_at_reversed_rank
10671068

10681069
# if the scores are ordered, it's possible to count the number of

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.