8000 insert versionadded versionchanged directives in docstrings for 0.18 · scikit-learn/scikit-learn@803d68d · GitHub
[go: up one dir, main page]

Skip to content

Commit 803d68d

Browse files
committed
insert versionadded versionchanged directives in docstrings for 0.18
indicate where exception classes were moved from
1 parent af41bfd commit 803d68d

File tree

15 files changed

+161
-8
lines changed

15 files changed

+161
-8
lines changed

sklearn/datasets/kddcup99.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,8 @@ def fetch_kddcup99(subset=None, shuffle=False, random_state=None,
156156
.. [2] A Geometric Framework for Unsupervised Anomaly Detection: Detecting
157157
Intrusions in Unlabeled Data (2002) by Eleazar Eskin, Andrew Arnold,
158158
Michael Prerau, Leonid Portnoy, Sal Stolfo
159+
160+
.. versionadded:: 0.18
159161
"""
160162
kddcup99 = _fetch_brute_kddcup99(shuffle=shuffle, percent10=percent10,
161163
download_if_missing=download_if_missing)

sklearn/decomposition/kernel_pca.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,15 +78,21 @@ class KernelPCA(BaseEstimator, TransformerMixin):
7878
A pseudo random number generator used for the initialization of the
7979
residuals when eigen_solver == 'arpack'.
8080
81+
.. versionadded:: 0.18
82+
8183
n_jobs : int, default=1
8284
The number of parallel jobs to run.
8385
If `-1`, then the number of jobs is set to the number of CPU cores.
8486
87+
.. versionadded:: 0.18
88+
8589
copy_X : boolean, default=True
8690
If True, input X is copied and stored by the model in the `X_fit_`
8791
attribute. If no further changes will be done to X, setting
8892
`copy_X=False` saves memory by storing a reference.
8993
94+
.. versionadded:: 0.18
95+
9096
Attributes
9197
----------
9298
lambdas_ : array, (n_components,)

sklearn/decomposition/pca.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,8 @@ class PCA(_BasePCA):
194194
explained_variance_ : array, [n_components]
195195
The amount of variance explained by each of the selected components.
196196
197+
.. versionadded:: 0.18
198+
197199
explained_variance_ratio_ : array, [n_components]
198200
Percentage of variance explained by each of the selected components.
199201

sklearn/ensemble/forest.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -197,6 +197,8 @@ def decision_path(self, X):
197197
n_nodes_ptr : array of size (n_estimators + 1, )
198198
The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]]
199199
gives the indicator value for the i-th estimator.
200+
201+
.. versionadded:: 0.18
200202
"""
201203
X = self._validate_X_predict(X)
202204
indicators = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
@@ -786,6 +788,9 @@ class RandomForestClassifier(ForestClassifier):
786788
`ceil(min_samples_split * n_samples)` are the minimum
787789
number of samples for each split.
788790
791+
.. versionchanged:: 0.18
792+
Added float values for percentages.
793+
789794
min_samples_leaf : int, float, optional (default=1)
790795
The minimum number of samples required to be at a leaf node:
791796
@@ -794,6 +799,9 @@ class RandomForestClassifier(ForestClassifier):
794799
`ceil(min_samples_leaf * n_samples)` are the minimum
795800
number of samples for each node.
796801
802+
.. versionchanged:: 0.18
803+
Added float values for percentages.
804+
797805
min_weight_fraction_leaf : float, optional (default=0.)
798806
The minimum weighted fraction of the input samples required to be at a
799807
leaf node.
@@ -991,6 +999,9 @@ class RandomForestRegressor(ForestRegressor):
991999
`ceil(min_samples_split * n_samples)` are the minimum
9921000
number of samples for each split.
9931001
1002+
.. versionchanged:: 0.18
1003+
Added float values for percentages.
1004+
9941005
min_samples_leaf : int, float, optional (default=1)
9951006
The minimum number of samples required to be at a leaf node:
9961007
@@ -999,6 +1010,9 @@ class RandomForestRegressor(ForestRegressor):
9991010
`ceil(min_samples_leaf * n_samples)` are the minimum
10001011
number of samples for each node.
10011012
1013+
.. versionchanged:: 0.18
1014+
Added float values for percentages.
1015+
10021016
min_weight_fraction_leaf : float, optional (default=0.)
10031017
The minimum weighted fraction of the input samples required to be at a
10041018
leaf node.
@@ -1156,6 +1170,9 @@ class ExtraTreesClassifier(ForestClassifier):
11561170
`ceil(min_samples_split * n_samples)` are the minimum
11571171
number of samples for each split.
11581172
1173+
.. versionchanged:: 0.18
1174+
Added float values for percentages.
1175+
11591176
min_samples_leaf : int, float, 8000 optional (default=1)
11601177
The minimum number of samples required to be at a leaf node:
11611178
@@ -1164,6 +1181,9 @@ class ExtraTreesClassifier(ForestClassifier):
11641181
`ceil(min_samples_leaf * n_samples)` are the minimum
11651182
number of samples for each node.
11661183
1184+
.. versionchanged:: 0.18
1185+
Added float values for percentages.
1186+
11671187
min_weight_fraction_leaf : float, optional (default=0.)
11681188
The minimum weighted fraction of the input samples required to be at a
11691189
leaf node.
@@ -1360,6 +1380,9 @@ class ExtraTreesRegressor(ForestRegressor):
13601380
`ceil(min_samples_split * n_samples)` are the minimum
13611381
number of samples for each split.
13621382
1383+
.. versionchanged:: 0.18
1384+
Added float values for percentages.
1385+
13631386
min_samples_leaf : int, float, optional (default=1)
13641387
The minimum number of samples required to be at a leaf node:
13651388
@@ -1368,6 +1391,9 @@ class ExtraTreesRegressor(ForestRegressor):
13681391
`ceil(min_samples_leaf * n_samples)` are the minimum
13691392
number of samples for each node.
13701393
1394+
.. versionchanged:: 0.18
1395+
Added float values for percentages.
1396+
13711397
min_weight_fraction_leaf : float, optional (default=0.)
13721398
The minimum weighted fraction of the input samples required to be at a
13731399
leaf node.
@@ -1511,6 +1537,9 @@ class RandomTreesEmbedding(BaseForest):
15111537
`ceil(min_samples_split * n_samples)` is the minimum
15121538
number of samples for each split.
15131539
1540+
.. versionchanged:: 0.18
1541+
Added float values for percentages.
1542+
15141543
min_samples_leaf : int, float, optional (default=1)
15151544
The minimum number of samples required to be at a leaf node:
15161545
@@ -1519,6 +1548,9 @@ class RandomTreesEmbedding(BaseForest):
15191548
`ceil(min_samples_leaf * n_samples)` is the minimum
15201549
number of samples for each node.
15211550
1551+
.. versionchanged:: 0.18
1552+
Added float values for percentages.
1553+
15221554
min_weight_fraction_leaf : float, optional (default=0.)
15231555
The minimum weighted fraction of the input samples required to be at a
15241556
leaf node.

sklearn/ensemble/gradient_boosting.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1315,6 +1315,9 @@ class GradientBoostingClassifier(BaseGradientBoosting, ClassifierMixin):
13151315
`ceil(min_samples_split * n_samples)` are the minimum
13161316
number of samples for each split.
13171317
1318+
.. versionchanged:: 0.18
1319+
Added float values for percentages.
1320+
13181321
min_samples_leaf : int, float, optional (default=1)
13191322
The minimum number of samples required to be at a leaf node:
13201323
@@ -1323,6 +1326,8 @@ class GradientBoostingClassifier(BaseGradientBoosting, ClassifierMixin):
13231326
`ceil(min_samples_leaf * n_samples)` are the minimum
13241327
number of samples for each node.
13251328
1329+
.. versionchanged:: 0.18
1330+
Added float values for percentages.
13261331
13271332
min_weight_fraction_leaf : float, optional (default=0.)
13281333
The minimum weighted fraction of the input samples required to be at a
@@ -1678,6 +1683,9 @@ class GradientBoostingRegressor(BaseGradientBoosting, RegressorMixin):
16781683
`ceil(min_samples_split * n_samples)` are the minimum
16791684
number of samples for each split.
16801685
1686+
.. versionchanged:: 0.18
1687+
Added float values for percentages.
1688+
16811689
min_samples_leaf : int, float, optional (default=1)
16821690
The minimum number of samples required to be at a leaf node:
16831691
@@ -1686,6 +1694,9 @@ class GradientBoostingRegressor(BaseGradientBoosting, RegressorMixin):
16861694
`ceil(min_samples_leaf * n_samples)` are the minimum
16871695
number of samples for each node.
16881696
1697+
.. versionchanged:: 0.18
1698+
Added float values for percentages.
1699+
16891700
min_weight_fraction_leaf : float, optional (default=0.)
16901701
The minimum weighted fraction of the input samples required to be at a
16911702
leaf node.

sklearn/ensemble/iforest.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,8 @@ class IsolationForest(BaseBagging):
105105
.. [2] Liu, Fei Tony, Ting, Kai Ming and Zhou, Zhi-Hua. "Isolation-based
106106
anomaly detection." ACM Transactions on Knowledge Discovery from
107107
Data (TKDD) 6.1 (2012): 3.
108+
109+
.. versionadded:: 0.18
108110
"""
109111

110112
def __init__(self,

sklearn/exceptions.py

Lines changed: 32 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,15 +30,26 @@ class NotFittedError(ValueError, AttributeError):
3030
... print(repr(e))
3131
... # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
3232
NotFittedError('This LinearSVC instance is not fitted yet',)
33+
34+
.. versionchanged:: 0.18
35+
Moved from sklearn.utils.validation.
3336
"""
3437

3538

3639
class ChangedBehaviorWarning(UserWarning):
37-
"""Warning class used to notify the user of any change in the behavior."""
40+
"""Warning class used to notify the user of any change in the behavior.
41+
42+
.. versionchanged:: 0.18
43+
Moved from sklearn.base.
44+
"""
3845

3946

4047
class ConvergenceWarning(UserWarning):
41-
"""Custom warning to capture convergence problems"""
48+
"""Custom warning to capture convergence problems
49+
50+
.. versionchanged:: 0.18
51+
Moved from sklearn.utils.
52+
"""
4253

4354

4455
class DataConversionWarning(UserWarning):
@@ -53,6 +64,9 @@ class DataConversionWarning(UserWarning):
5364
- requests a non-copying operation, but a copy is required to meet the
5465
implementation's data-type expectations;
5566
- passes an input whose shape can be interpreted ambiguously.
67+
68+
.. versionchanged:: 0.18
69+
Moved from sklearn.utils.validation.
5670
"""
5771

5872

@@ -64,6 +78,9 @@ class DataDimensionalityWarning(UserWarning):
6478
projection space, is higher than the number of features, which quantifies
6579
the dimensionality of the original source space, to imply that the
6680
dimensionality of the problem will not be reduced.
81+
82+
.. versionchanged:: 0.18
83+
Moved from sklearn.utils.
6784
"""
6885

6986

@@ -73,6 +90,8 @@ class EfficiencyWarning(UserWarning):
7390
This warning notifies the user that the efficiency may not be optimal due
7491
to some reason which may be included as a part of the warning message.
7592
This may be subclassed into a more specific Warning class.
93+
94+
.. versionadded:: 0.18
7695
"""
7796

7897

@@ -102,6 +121,9 @@ class FitFailedWarning(RuntimeWarning):
102121
FitFailedWarning("Classifier fit failed. The score on this train-test
103122
partition for these parameters will be set to 0.000000. Details:
104123
\\nValueError('Penalty term must be positive; got (C=-2)',)",)
124+
125+
.. versionchanged:: 0.18
126+
Moved from sklearn.cross_validation.
105127
"""
106128

107129

@@ -110,8 +132,15 @@ class NonBLASDotWarning(EfficiencyWarning):
110132
111133
This warning is used to notify the user that BLAS was not used for dot
112134
operation and hence the efficiency may be affected.
135+
136+
.. versionchanged:: 0.18
137+
Moved from sklearn.utils.validation, extends EfficiencyWarning.
113138
"""
114139

115140

116141
class UndefinedMetricWarning(UserWarning):
117-
"""Warning used when the metric is invalid"""
142+
"""Warning used when the metric is invalid
143+
144+
.. versionchanged:: 0.18
145+
Moved from sklearn.base.
146+
"""

sklearn/gaussian_process/gpc.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,8 @@ def optimizer(obj_func, initial_theta, bounds):
138138
139139
log_marginal_likelihood_value_: float
140140
The log-marginal-likelihood of ``self.kernel_.theta``
141+
142+
.. versionadded:: 0.18
141143
"""
142144
def __init__(self, kernel=None, optimizer="fmin_l_bfgs_b",
143145
n_restarts_optimizer=0, max_iter_predict=100,
@@ -546,6 +548,8 @@ def optimizer(obj_func, initial_theta, bounds):
546548
547549
n_classes_ : int
548550
The number of classes in the training data
551+
552+
.. versionadded:: 0.18
549553
"""
550554
def __init__(self, kernel=None, optimizer="fmin_l_bfgs_b",
551555
n_restarts_optimizer=0, max_iter_predict=100,

sklearn/gaussian_process/gpr.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,8 @@ def optimizer(obj_func, initial_theta, bounds):
125125
126126
log_marginal_likelihood_value_: float
127127
The log-marginal-likelihood of ``self.kernel_.theta``
128+
129+
.. versionadded:: 0.18
128130
"""
129131
def __init__(self, kernel=None, alpha=1e-10,
130132
optimizer="fmin_l_bfgs_b", n_restarts_optimizer=0,

0 commit comments

Comments
 (0)
0