8000 MAINT Parameters validation for sklearn.metrics.log_loss (#25577) · scikit-learn/scikit-learn@6f6b4bd · GitHub
[go: up one dir, main page]

Skip to content

Commit 6f6b4bd

Browse files
authored
MAINT Parameters validation for sklearn.metrics.log_loss (#25577)
1 parent 28f8c87 commit 6f6b4bd

File tree

2 files changed

+12
-1
lines changed

2 files changed

+12
-1
lines changed

sklearn/metrics/_classification.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
from ..utils.multiclass import type_of_target
4242
from ..utils.validation import _num_samples
4343
from ..utils.sparsefuncs import count_nonzero
44-
from ..utils._param_validation import StrOptions, Options, validate_params
44+
from ..utils._param_validation import StrOptions, Options, Interval, validate_params
4545
from ..exceptions import UndefinedMetricWarning
4646

4747
from ._base import _check_pos_label_consistency
@@ -2569,6 +2569,16 @@ def hamming_loss(y_true, y_pred, *, sample_weight=None):
25692569
raise ValueError("{0} is not supported".format(y_type))
25702570

25712571

2572+
@validate_params(
2573+
{
2574+
"y_true": ["array-like"],
2575+
"y_pred": ["array-like"],
2576+
"eps": [StrOptions({"auto"}), Interval(Real, 0, 1, closed="both")],
2577+
"normalize": ["boolean"],
2578+
"sample_weight": ["array-like", None],
2579+
"labels": ["array-like", None],
2580+
}
2581+
)
25722582
def log_loss(
25732583
y_true, y_pred, *, eps="auto", normalize=True, sample_weight=None, labels=None
25742584
):

sklearn/tests/test_public_functions.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,7 @@ def _check_function_param_validation(
122122
"sklearn.metrics.det_curve",
123123
"sklearn.metrics.f1_score",
124124
"sklearn.metrics.hamming_loss",
125+
"sklearn.metrics.log_loss",
125126
"sklearn.metrics.mean_absolute_error",
126127
"sklearn.metrics.mean_squared_error",
127128
"sklearn.metrics.mean_tweedie_deviance",

0 commit comments

Comments
 (0)
0