diff --git a/sklearn/metrics/_classification.py b/sklearn/metrics/_classification.py index b4316053c0f74..e95ca48257aec 100644 --- a/sklearn/metrics/_classification.py +++ b/sklearn/metrics/_classification.py @@ -991,7 +991,7 @@ def f1_score( ): """Compute the F1 score, also known as balanced F-score or F-measure. - The F1 score can be interpreted as a weighted average of the precision and + The F1 score can be interpreted as a harmonic mean of the precision and recall, where an F1 score reaches its best value at 1 and worst score at 0. The relative contribution of precision and recall to the F1 score are equal. The formula for the F1 score is::