8000 MAINT Make files private for sklearn.metrics and sklearn.metrics.clus… · amueller/scikit-learn@e947689 · GitHub
[go: up one dir, main page]

Skip to content

Commit e947689

Browse files
NicolasHugglemaitre
authored andcommitted
MAINT Make files private for sklearn.metrics and sklearn.metrics.cluster (scikit-learn#15306)
1 parent f86e852 commit e947689

28 files changed

+118
-85
lines changed

.gitignore

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,18 @@ sklearn/svm/libsvm.py
130130
sklearn/svm/libsvm_sparse.py
131131
sklearn/svm/liblinear.py
132132

133+
sklearn/metrics/cluster/bicluster.py
134+
sklearn/metrics/cluster/supervised.py
135+
sklearn/metrics/cluster/unsupervised.py
136+
sklearn/metrics/cluster/expected_mutual_info_fast.py
137+
138+
sklearn/metrics/base.py
139+
sklearn/metrics/classification.py
140+
sklearn/metrics/regression.py
141+
sklearn/metrics/ranking.py
142+
sklearn/metrics/pairwise_fast.py
143+
sklearn/metrics/scorer.py
144+
133145
sklearn/inspection/partial_dependence.py
134146
sklearn/inspection/permutation_importance.py
135147

sklearn/_build_utils/deprecated_modules.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,26 @@
8686
'set_verbosity_wrap'),
8787
('_liblinear', 'sklearn.svm.liblinear', 'sklearn.svm', 'train_wrap'),
8888

89+
('_bicluster', 'sklearn.metrics.cluster.bicluster',
90+
'sklearn.metrics.cluster', 'consensus_score'),
91+
('_supervised', 'sklearn.metrics.cluster.supervised',
92+
'sklearn.metrics.cluster', 'entropy'),
93+
('_unsupervised', 'sklearn.metrics.cluster.unsupervised',
94+
'sklearn.metrics.cluster', 'silhouette_score'),
95+
('_expected_mutual_info_fast',
96+
'sklearn.metrics.cluster.expected_mutual_info_fast',
97+
'sklearn.metrics.cluster', 'expected_mutual_information'),
98+
99+
('_base', 'sklearn.metrics.base', 'sklearn.metrics', 'combinations'),
100+
('_classification', 'sklearn.metrics.classification', 'sklearn.metrics',
101+
'accuracy_score'),
102+
('_regression', 'sklearn.metrics.regression', 'sklearn.metrics',
103+
'max_error'),
104+
('_ranking', 'sklearn.metrics.ranking', 'sklearn.metrics', 'roc_curve'),
105+
('_pairwise_fast', 'sklearn.metrics.pairwise_fast', 'sklearn.metrics',
106+
'np'),
107+
('_scorer', 'sklearn.metrics.scorer', 'sklearn.metrics', 'get_scorer'),
108+
89109
('_partial_dependence', 'sklearn.inspection.partial_dependence',
90110
'sklearn.inspection', 'partial_dependence'),
91111
('_permutation_importance', 'sklearn.inspection.permutation_importance',

sklearn/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,7 @@ def score(self, X, y, sample_weight=None):
404404
"""
405405

406406
from .metrics import r2_score
407-
from .metrics.regression import _check_reg_targets
407+
from .metrics._regression import _check_reg_targets
408408
y_pred = self.predict(X)
409409
# XXX: Remove the check in 0.23
410410
y_type, _, _, _ = _check_reg_targets(y, y_pred, None)

sklearn/linear_model/ridge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
from ..utils.validation import _check_sample_weight
3131
from ..preprocessing import LabelBinarizer
3232
from ..model_selection import GridSearchCV
33-
from ..metrics.scorer import check_scoring
33+
from ..metrics import check_scoring
3434
from ..exceptions import ConvergenceWarning
3535
from ..utils.sparsefuncs import mean_variance_axis
3636

sklearn/metrics/__init__.py

Lines changed: 43 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -4,36 +4,36 @@
44
"""
55

66

7-
from .ranking import auc
8-
from .ranking import average_precision_score
9-
from .ranking import coverage_error
10-
from .ranking import dcg_score
11-
from .ranking import label_ranking_average_precision_score
12-
from .ranking import label_ranking_loss
13-
from .ranking import ndcg_score
14-
from .ranking import precision_recall_curve
15-
from .ranking import roc_auc_score
16-
from .ranking import roc_curve
7+
from ._ranking import auc
8+
from ._ranking import average_precision_score
9+
from ._ranking import coverage_error
10+
from ._ranking import dcg_score
11+
from ._ranking import label_ranking_average_precision_score
12+
from ._ranking import label_ranking_loss
13+
from ._ranking import ndcg_score
14+
from ._ranking import precision_recall_curve
15+
from ._ranking import roc_auc_score
16+
from ._ranking import roc_curve
1717

18-
from .classification import accuracy_score
19-
from .classification import balanced_accuracy_score
20-
from .classification import classification_report
21-
from .classification import cohen_kappa_score
22-
from .classification import confusion_matrix
23-
from .classification import f1_score
24-
from .classification import fbeta_score
25-
from .classification import hamming_loss
26-
from .classification import hinge_loss
27-
from .classification import jaccard_similarity_score
28-
from .classification import jaccard_score
29-
from .classification import log_loss
30-
from .classification import matthews_corrcoef
31-
from .classification import precision_recall_fscore_support
32-
from .classification import precision_score
33-
from .classification import recall_score
34-
from .classification import zero_one_loss
35-
from .classification import brier_score_loss
36-
from .classification import multilabel_confusion_matrix
18+
from ._classification import accuracy_score
19+
from ._classification import balanced_accuracy_score
20+
from ._classification import classification_report
21+
from ._classification import cohen_kappa_score
22+
from ._classification import confusion_matrix
23+
from ._classification import f1_score
24+
from ._classification import fbeta_score
25+
from ._classification import hamming_loss
26+
from ._classification import hinge_loss
27+
from ._classification import jaccard_similarity_score
28+
from ._classification import jaccard_score
29+
from ._classification import log_loss
30+
from ._classification import matthews_corrcoef
31+
from ._classification import precision_recall_fscore_support
32+
from ._classification import precision_score
33+
from ._classification import recall_score
34+
from ._classification import zero_one_loss
35+
from ._classification import brier_score_loss
36+
from ._classification import multilabel_confusion_matrix
3737

3838
from . import cluster
3939
from .cluster import adjusted_mutual_info_score
@@ -60,22 +60,22 @@
6060
from .pairwise import pairwise_kernels
6161
from .pairwise import pairwise_distances_chunked
6262

63-
from .regression import explained_variance_score
64-
from .regression import max_error
65-
from .regression import mean_absolute_error
66-
from .regression import mean_squared_error
67-
from .regression import mean_squared_log_error
68-
from .regression import median_absolute_error
69-
from .regression import r2_score
70-
from .regression import mean_tweedie_deviance
71-
from .regression import mean_poisson_deviance
72-
from .regression import mean_gamma_deviance
63+
from ._regression import explained_variance_score
64+
from ._regression import max_error
65+
from ._regression import mean_absolute_error
66+
from ._regression import mean_squared_error
67+
from ._regression import mean_squared_log_error
68+
from ._regression import median_absolute_error
69+
from ._regression import r2_score
70+
from ._regression import mean_tweedie_deviance
71+
from ._regression import mean_poisson_deviance
72+
from ._regression import mean_gamma_deviance
7373

7474

75-
from .scorer import check_scoring
76-
from .scorer import make_scorer
77-
from .scorer import SCORERS
78-
from .scorer import get_scorer
75+
from ._scorer import check_scoring
76+
from ._scorer import make_scorer
77+
from ._scorer import SCORERS
78+
from ._scorer import get_scorer
7979

8080
from ._plot.roc_curve import plot_roc_curve
8181
from ._plot.roc_curve import RocCurveDisplay
File renamed without changes.

sklearn/metrics/ranking.py renamed to sklearn/metrics/_ranking.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from ..preprocessing import label_binarize
3636
from ..preprocessing._label import _encode
3737

38-
from .base import _average_binary_score, _average_multiclass_ovo_score
38+
from ._base import _average_binary_score, _average_multiclass_ovo_score
3939

4040

4141
def auc(x, y):
File renamed without changes.

0 commit comments

Comments
 (0)
0