8000 MAINT Make files private for sklearn.metrics and sklearn.metrics.clus… · scikit-learn/scikit-learn@e947689 · GitHub
[go: up one dir, main page]

Skip to content

Commit e947689

Browse files
NicolasHugglemaitre
authored andcommitted
MAINT Make files private for sklearn.metrics and sklearn.metrics.cluster (#15306)
1 parent f86e852 commit e947689

28 files changed

+118
-85
lines changed

.gitignore

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,18 @@ sklearn/svm/libsvm.py
130130
sklearn/svm/libsvm_sparse.py
131131
sklearn/svm/liblinear.py
132132

133+
sklearn/metrics/cluster/bicluster.py
134+
sklearn/metrics/cluster/supervised.py
135+
sklearn/metrics/cluster/unsupervised.py
136+
sklearn/metrics/cluster/expected_mutual_info_fast.py
137+
138+
sklearn/metrics/base.py
139+
sklearn/metrics/classification.py
140+
sklearn/metrics/regression.py
141+
sklearn/metrics/ranking.py
142+
sklearn/metrics/pairwise_fast.py
143+
sklearn/metrics/scorer.py
144+
133145
sklearn/inspection/partial_dependence.py
134146
sklearn/inspection/permutation_importance.py
135147

sklearn/_build_utils/deprecated_modules.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,26 @@
8686
'set_verbosity_wrap'),
8787
('_liblinear', 'sklearn.svm.liblinear', 'sklearn.svm', 'train_wrap'),
8888

89+
('_bicluster', 'sklearn.metrics.cluster.bicluster',
90+
'sklearn.metrics.cluster', 'consensus_score'),
91+
('_supervised', 'sklearn.metrics.cluster.supervised',
92+
'sklearn.metrics.cluster', 'entropy'),
93+
('_unsupervised', 'sklearn.metrics.cluster.unsupervised',
94+
'sklearn.metrics.cluster', 'silhouette_score'),
95+
('_expected_mutual_info_fast',
96+
'sklearn.metrics.cluster.expected_mutual_info_fast',
97+
'sklearn.metrics.cluster', 'expected_mutual_information'),
98+
99+
('_base', 'sklearn.metrics.base', 'sklearn.metrics', 'combinations'),
100+
('_classification', 'sklearn.metrics.classification', 'sklearn.metrics',
101+
'accuracy_score'),
102+
('_regression', 'sklearn.metrics.regression', 'sklearn.metrics',
103+
'max_error'),
104+
('_ranking', 'sklearn.metrics.ranking', 'sklearn.metrics', 'roc_curve'),
105+
('_pairwise_fast', 'sklearn.metrics.pairwise_fast', 'sklearn.metrics',
106+
'np'),
107+
('_scorer', 'sklearn.metrics.scorer', 'sklearn.metrics', 'get_scorer'),
108+
89109
('_partial_dependence', 'sklearn.inspection.partial_dependence',
90110
'sklearn.inspection', 'partial_dependence'),
91111
('_permutation_importance', 'sklearn.inspection.permutation_importance',

sklearn/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,7 @@ def score(self, X, y, sample_weight=None):
404404
"""
405405

406406
from .metrics import r2_score
407-
from .metrics.regression import _check_reg_targets
407+
from .metrics._regression import _check_reg_targets
408408
y_pred = self.predict(X)
409409
# XXX: Remove the check in 0.23
410410
y_type, _, _, _ = _check_reg_targets(y, y_pred, None)

sklearn/linear_model/ridge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
from ..utils.validation import _check_sample_weight
3131
from ..preprocessing import LabelBinarizer
3232
from ..model_selection import GridSearchCV
33-
from ..metrics.scorer import check_scoring
33+
from ..metrics import check_scoring
3434
from ..exceptions import ConvergenceWarning
3535
from ..utils.sparsefuncs import mean_variance_axis
3636

sklearn/metrics/__init__.py

Lines changed: 43 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -4,36 +4,36 @@
44
"""
55

66

7-
from .ranking import auc
8-
from .ranking import average_precision_score
9-
from .ranking import coverage_error
10-
from .ranking import dcg_score
11-
from .ranking import label_ranking_average_precision_score
12-
from .ranking import label_ranking_loss
13-
from .ranking import ndcg_score
14-
from .ranking import precision_recall_curve
15-
from .ranking import roc_auc_score
16-
from .ranking import roc_curve
7+
from ._ranking import auc
8+
from ._ranking import average_precision_score
9+
from ._ranking import coverage_error
10+
from ._ranking import dcg_score
11+
from ._ranking import label_ranking_average_precision_score
12+
from ._ranking import label_ranking_loss
13+
from ._ranking import ndcg_score
14+
from ._ranking import precision_recall_curve
15+
from ._ranking import roc_auc_score
16+
from ._ranking import roc_curve
1717

18-
from .classification import accuracy_score
19-
from .classification import balanced_accuracy_score
20-
from .classification import classification_report
21-
from .classification import cohen_kappa_score
22-
from .classification import confusion_matrix
23-
from .classification import f1_score
24-
from .classification import fbeta_score
25-
from .classification import hamming_loss
26-
from .classification import hinge_loss
27-
from .classification import jaccard_similarity_score
28-
from .classification import jaccard_score
29-
from .classification import log_loss
30-
from .classification import matthews_corrcoef
31-
from .classification import precision_recall_fscore_support
32-
from .classification import precision_score
33-
from .classification import recall_score
34-
from .classification import zero_one_loss
35-
from .classification import brier_score_loss
36-
from .classification import multilabel_confusion_matrix
18+
from ._classification import accuracy_score
19+
from ._classification import balanced_accuracy_score
20+
from ._classification import classification_report
21+
from ._classification import cohen_kappa_score
22+
from ._classification import confusion_matrix
23+
from ._classification import f1_score
24+
from ._classification import fbeta_score
25+
from ._classification import hamming_loss
26+
from ._classification import hinge_loss
27+
from ._classification import jaccard_similarity_score
28+
from ._classification import jaccard_score
29+
from ._classification import log_loss
30+
from ._classification import matthews_corrcoef
31+
from ._classification import precision_recall_fscore_support
32+
from ._classification import precision_score
33+
from ._classification import recall_score
34+
from ._classification import zero_one_loss
35+
from ._classification import brier_score_loss
36+
from ._classification import multilabel_confusion_matrix
3737

3838
from . import cluster
3939
from .cluster import adjusted_mutual_info_score
@@ -60,22 +60,22 @@
6060
from .pairwise import pairwise_kernels
6161
from .pairwise import pairwise_distances_chunked
6262

63-
from .regression import explained_variance_score
64-
from .regression import max_error
65-
from .regression import mean_absolute_error
66-
from .regression import mean_squared_error
67-
from .regression import mean_squared_log_error
68-
from .regression import median_absolute_error
69-
from .regression import r2_score
70-
from .regression import mean_tweedie_deviance
71-
from .regression import mean_poisson_deviance
72-
from .regression import mean_gamma_deviance
63+
from ._regression import explained_variance_score
64+
from ._regression import max_error
65+
from ._regression import mean_absolute_error
66+
from ._regression import mean_squared_error
67+
from ._regression import mean_squared_log_error
68+
from ._regression import median_absolute_error
69+
from ._regression import r2_score
70+
from ._regression import mean_tweedie_deviance
71+
from ._regression import mean_poisson_deviance
72+
from ._regression import mean_gamma_deviance
7373

7474

75-
from .scorer import check_scoring
76-
from .scorer import make_scorer
77-
from .scorer import SCORERS
78-
from .scorer import get_scorer
75+
from ._scorer import check_scoring
76+
from ._scorer import make_scorer
77+
from ._scorer import SCORERS
78+
from ._scorer import get_scorer
7979

8080
from ._plot.roc_curve import plot_roc_curve
8181
from ._plot.roc_curve import RocCurveDisplay
File renamed without changes.

sklearn/metrics/ranking.py renamed to sklearn/metrics/_ranking.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from ..preprocessing import label_binarize
3636
from ..preprocessing._label import _encode
3737

38-
from .base import _average_binary_score, _average_multiclass_ovo_score
38+
from ._base import _average_binary_score, _average_multiclass_ovo_score
3939

4040

4141
def auc(x, y):
File renamed without changes.

sklearn/metrics/scorer.py renamed to sklearn/metrics/_scorer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -406,7 +406,7 @@ def check_scoring(estimator, scoring=None, allow_none=False):
406406
module = getattr(scoring, '__module__', None)
407407
if hasattr(module, 'startswith') and \
408408
module.startswith('sklearn.metrics.') and \
409-
not module.startswith('sklearn.metrics.scorer') and \
409+
not module.startswith('sklearn.metrics._scorer') and \
410410
not module.startswith('sklearn.metrics.tests.'):
411411
raise ValueError('scoring value %r looks like it is a metric '
412412
'function rather than a scorer. A scorer should '

sklearn/metrics/cluster/__init__.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -5,24 +5,24 @@
55
- supervised, which uses a ground truth class values for each sample.
66
- unsupervised, which does not and measures the 'quality' of the model itself.
77
"""
8-
from .supervised import adjusted_mutual_info_score
9-
from .supervised import normalized_mutual_info_score
10-
from .supervised import adjusted_rand_score
11-
from .supervised import completeness_score
12-
from .supervised import contingency_matrix
13-
from .supervised import expected_mutual_information
14-
from .supervised import homogeneity_completeness_v_measure
15-
from .supervised import homogeneity_score
16-
from .supervised import mutual_info_score
17-
from .supervised import v_measure_score
18-
from .supervised import fowlkes_mallows_score
19-
from .supervised import entropy
20-
from .unsupervised import silhouette_samples
21-
from .unsupervised import silhouette_score
22-
from .unsupervised import calinski_harabasz_score
23-
from .unsupervised import calinski_harabaz_score
24-
from .unsupervised import davies_bouldin_score
25-
from .bicluster import consensus_score
8+
from ._supervised import adjusted_mutual_info_score
9+
from ._supervised import normalized_mutual_info_score
10+
from ._supervised import adjusted_rand_score
11+
from ._supervised import completeness_score
12+
from ._supervised import contingency_matrix
13+
from ._supervised import expected_mutual_information
14+
from ._supervised import homogeneity_completeness_v_measure
15+
from ._supervised import homogeneity_score
16+
from ._supervised import mutual_info_score
17+
from ._supervised import v_measure_score
18+
from ._supervised import fowlkes_mallows_score
19+
from ._supervised import entropy
20+
from ._unsupervised import silhouette_samples
21+
from ._unsupervised import silhouette_score
22+
from ._unsupervised import calinski_harabasz_score
23+
from ._unsupervised import calinski_harabaz_score
24+
from ._unsupervised import davies_bouldin_score
25+
from ._bicluster import consensus_score
2626

2727
__all__ = ["adjusted_mutual_info_score", "normalized_mutual_info_score",
2828
"adjusted_rand_score", "completeness_score", "contingency_matrix",

sklearn/metrics/cluster/supervised.py renamed to sklearn/metrics/cluster/_supervised.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import numpy as np
2121
from scipy import sparse as sp
2222

23-
from .expected_mutual_info_fast import expected_mutual_information
23+
from ._expected_mutual_info_fast import expected_mutual_information
2424
from ...utils.validation import check_array, check_consistent_length
2525
from ...utils.fixes import comb, _astype_copy_false
2626

sklearn/metrics/cluster/setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ def configuration(parent_package="", top_path=None):
99
libraries = []
1010
if os.name == 'posix':
1111
libraries.append('m')
12-
config.add_extension("expected_mutual_info_fast",
13-
sources=["expected_mutual_info_fast.pyx"],
12+
config.add_extension("_expected_mutual_info_fast",
13+
sources=["_expected_mutual_info_fast.pyx"],
1414
include_dirs=[numpy.get_include()],
1515
libraries=libraries)
1616

sklearn/metrics/cluster/tests/test_bicluster.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from sklearn.utils.testing import assert_almost_equal
66

7-
from sklearn.metrics.cluster.bicluster import _jaccard
7+
from sklearn.metrics.cluster._bicluster import _jaccard
88
from sklearn.metrics import consensus_score
99

1010

sklearn/metrics/cluster/tests/test_supervised.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from sklearn.metrics.cluster import mutual_info_score
1414
from sklearn.metrics.cluster import normalized_mutual_info_score
1515
from sklearn.metrics.cluster import v_measure_score
16-
from sklearn.metrics.cluster.supervised import _generalized_average
16+
from sklearn.metrics.cluster._supervised import _generalized_average
1717

1818
from sklearn.utils import assert_all_finite
1919
from sklearn.utils.testing import (

sklearn/metrics/pairwise.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
from ..preprocessing import normalize
3030
from ..utils._mask import _get_mask
3131

32-
from .pairwise_fast import _chi2_kernel_fast, _sparse_manhattan
32+
from ._pairwise_fast import _chi2_kernel_fast, _sparse_manhattan
3333
from ..exceptions import DataConversionWarning
3434

3535

sklearn/metrics/setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ def configuration(parent_package="", top_path=None):
1414
config.add_subpackage('_plot.tests')
1515
config.add_subpackage('cluster')
1616

17-
config.add_extension("pairwise_fast",
18-
sources=["pairwise_fast.pyx"],
17+
config.add_extension("_pairwise_fast",
18+
sources=["_pairwise_fast.pyx"],
1919
libraries=libraries)
2020

2121
config.add_subpackage('tests')

sklearn/metrics/tests/test_classification.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
from sklearn.metrics import brier_score_loss
4646
from sklearn.metrics import multilabel_confusion_matrix
4747

48-
from sklearn.metrics.classification import _check_targets
48+
from sklearn.metrics._classification import _check_targets
4949
from sklearn.exceptions import UndefinedMetricWarning
5050

5151
from scipy.spatial.distance import hamming as sp_hamming

sklearn/metrics/tests/test_common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
from sklearn.metrics import ndcg_score
5858
from sklearn.metrics import dcg_score
5959

60-
from sklearn.metrics.base i F438 mport _average_binary_score
60+
from sklearn.metrics._base import _average_binary_score
6161

6262

6363
# Note toward developers about metric testing

sklearn/metrics/tests/test_ranking.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626
from sklearn.metrics import label_ranking_loss
2727
from sklearn.metrics import roc_auc_score
2828
from sklearn.metrics import roc_curve
29-
from sklearn.metrics.ranking import _ndcg_sample_scores, _dcg_sample_scores
30-
from sklearn.metrics.ranking import ndcg_score, dcg_score
29+
from sklearn.metrics._ranking import _ndcg_sample_scores, _dcg_sample_scores
30+
from sklearn.metrics import ndcg_score, dcg_score
3131

3232
from sklearn.exceptions import UndefinedMetricWarning
3333

sklearn/metrics/tests/test_regression.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from sklearn.metrics import r2_score
1818
from sklearn.metrics import mean_tweedie_deviance
1919

20-
from sklearn.metrics.regression import _check_reg_targets
20+
from sklearn.metrics._regression import _check_reg_targets
2121

2222
from ...exceptions import UndefinedMetricWarning
2323

sklearn/metrics/tests/test_score_objects.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,11 @@
1919
log_loss, precision_score, recall_score,
2020
jaccard_score)
2121
from sklearn.metrics import cluster as cluster_module
22-
from sklearn.metrics.scorer import (check_scoring, _PredictScorer,
23-
_passthrough_scorer, _MultimetricScorer)
22+
from sklearn.metrics import check_scoring
23+
from sklearn.metrics._scorer import (_PredictScorer, _passthrough_scorer,
24+
_MultimetricScorer,
25+
_check_multimetric_scoring)
2426
from sklearn.metrics import accuracy_score
25-
from sklearn.metrics.scorer import _check_multimetric_scoring
2627
from sklearn.metrics import make_scorer, get_scorer, SCORERS
2728
from sklearn.neighbors import KNeighborsClassifier
2829
from sklearn.svm import LinearSVC

sklearn/model_selection/_search.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,8 @@
3535
from ..utils.random import sample_without_replacement
3636
from ..utils.validation import indexable, check_is_fitted
3737
from ..utils.metaestimators import if_delegate_has_method
38-
from ..metrics.scorer import _check_multimetric_scoring
39-
from ..metrics.scorer import check_scoring
38+
from ..metrics._scorer import _check_multimetric_scoring
39+
from ..metrics import check_scoring
4040

4141

4242
__all__ = ['GridSearchCV', 'ParameterGrid', 'fit_grid_point',

sklearn/model_selection/_validation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525
_message_with_time)
2626
from ..utils.validation import _is_arraylike, _num_samples
2727
from ..utils.metaestimators import _safe_split
28-
from ..metrics.scorer import (check_scoring, _check_multimetric_scoring,
29-
_MultimetricScorer)
28+
from ..metrics import check_scoring
29+
from ..metrics._scorer import _check_multimetric_scoring, _MultimetricScorer
3030
from ..exceptions import FitFailedWarning
3131
from ._split import check_cv
3232
from ..preprocessing import LabelEncoder

0 commit comments

Comments
 (0)
0