10000 MNT Apply ruff/flake8-implicit-str-concat rules (ISC) (#30695) · scikit-learn/scikit-learn@5cdbbf1 · GitHub
[go: up one dir, main page]

Skip to content

Commit 5cdbbf1

Browse files
MNT Apply ruff/flake8-implicit-str-concat rules (ISC) (#30695)
Co-authored-by: Yao Xiao <108576690+Charlie-XIAO@users.noreply.github.com>
1 parent efc355e commit 5cdbbf1

File tree

26 files changed

+58
-62
lines changed

26 files changed

+58
-62
lines changed

build_tools/get_comment.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,9 +56,8 @@ def get_step_message(log, start, end, title, message, details):
5656
return ""
5757
res = (
5858
"-----------------------------------------------\n"
59-
+ f"### {title}\n\n"
60-
+ message
61-
+ "\n\n"
59+
f"### {title}\n\n"
60+
f"{message}\n\n"
6261
)
6362
if details:
6463
res += (

examples/classification/plot_lda.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -98,10 +98,10 @@ def generate_data(n_samples, n_features):
9898
plt.legend(loc="lower left")
9999
plt.ylim((0.65, 1.0))
100100
plt.suptitle(
101-
"LDA (Linear Discriminant Analysis) vs. "
102-
+ "\n"
103-
+ "LDA with Ledoit Wolf vs. "
104-
+ "\n"
105-
+ "LDA with OAS (1 discriminative feature)"
101+
"LDA (Linear Discriminant Analysis) vs."
102+
"\n"
103+
"LDA with Ledoit Wolf vs."
104+
"\n"
105+
"LDA with OAS (1 discriminative feature)"
106106
)
107107
plt.show()

examples/cluster/plot_cluster_comparison.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -224,14 +224,14 @@
224224
warnings.filterwarnings(
225225
"ignore",
226226
message="the number of connected components of the "
227-
+ "connectivity matrix is [0-9]{1,2}"
228-
+ " > 1. Completing it to avoid stopping the tree early.",
227+
"connectivity matrix is [0-9]{1,2}"
228+
" > 1. Completing it to avoid stopping the tree early.",
229229
category=UserWarning,
230230
)
231231
warnings.filterwarnings(
232232
"ignore",
233233
message="Graph is not fully connected, spectral embedding"
234-
+ " may not work as expected.",
234+
" may not work as expected.",
235235
category=UserWarning,
236236
)
237237
algorithm.fit(X)

examples/cluster/plot_linkage_comparison.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,8 +123,8 @@
123123
warnings.filterwarnings(
124124
"ignore",
125125
message="the number of connected components of the "
126-
+ "connectivity matrix is [0-9]{1,2}"
127-
+ " > 1. Completing it to avoid stopping the tree early.",
126+
"connectivity matrix is [0-9]{1,2}"
127+
" > 1. Completing it to avoid stopping the tree early.",
128128
category=UserWarning,
129129
)
130130
algorithm.fit(X)

examples/kernel_approximation/plot_scalable_poly_kernels.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@
143143
}
144144
print(
145145
f"Linear SVM score on {n_components} PolynomialCountSketch "
146-
+ f"features: {ps_lsvm_score:.2f}%"
146+
f"features: {ps_lsvm_score:.2f}%"
147147
)
148148

149149
# %%

examples/mixture/plot_concentration_prior.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def plot_results(ax1, ax2, estimator, X, y, title, plot_title=False):
103103
# mean_precision_prior= 0.8 to minimize the influence of the prior
104104
estimators = [
105105
(
106-
"Finite mixture with a Dirichlet distribution\nprior and " r"$\gamma_0=$",
106+
"Finite mixture with a Dirichlet distribution\n" r"prior and $\gamma_0=$",
107107
BayesianGaussianMixture(
108108
weight_concentration_prior_type="dirichlet_distribution",
109109
n_components=2 * n_components,
@@ -116,7 +116,7 @@ def plot_results(ax1, ax2, estimator, X, y, title, plot_title=False):
116116
[0.001, 1, 1000],
117117
),
118118
(
119-
"Infinite mixture with a Dirichlet process\n prior and" r"$\gamma_0=$",
119+
"Infinite mixture with a Dirichlet process\n" r"prior and $\gamma_0=$",
120120
BayesianGaussianMixture(
121121
weight_concentration_prior_type="dirichlet_process",
122122
n_components=2 * n_components,

maint_tools/sort_whats_new.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def entry_sort_key(s):
2323

2424
for entry in re.split("\n(?=- )", text.strip()):
2525
modules = re.findall(
26-
r":(?:func|meth|mod|class):" r"`(?:[^<`]*<|~)?(?:sklearn.)?([a-z]\w+)", entry
26+
r":(?:func|meth|mod|class):`(?:[^<`]*<|~)?(?:sklearn.)?([a-z]\w+)", entry
2727
)
2828
modules = set(modules)
2929
if len(modules) > 1:

sklearn/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,8 @@ def _clone_parametrized(estimator, *, safe=True):
110110
if isinstance(estimator, type):
111111
raise TypeError(
112112
"Cannot clone object. "
113-
+ "You should provide an instance of "
114-
+ "scikit-learn estimator instead of a class."
113+
"You should provide an instance of "
114+
"scikit-learn estimator instead of a class."
115115
)
116116
else:
117117
raise TypeError(

sklearn/compose/tests/test_target.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def test_transform_target_regressor_error():
3636
)
3737
with pytest.raises(
3838
TypeError,
39-
match=r"fit\(\) got an unexpected " "keyword argument 'sample_weight'",
39+
match=r"fit\(\) got an unexpected keyword argument 'sample_weight'",
4040
):
4141
regr.fit(X, y, sample_weight=sample_weight)
4242

sklearn/datasets/_twenty_newsgroups.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def strip_newsgroup_header(text):
115115

116116

117117
_QUOTE_RE = re.compile(
118-
r"(writes in|writes:|wrote:|says:|said:" r"|^In article|^Quoted from|^\||^>)"
118+
r"(writes in|writes:|wrote:|says:|said:|^In article|^Quoted from|^\||^>)"
119119
)
120120

121121

0 commit comments

Comments
 (0)
0