From a9fc2ae4bd6bfa7715ea631be2a2cc57ad8dca26 Mon Sep 17 00:00:00 2001 From: baam25simo Date: Wed, 8 Sep 2021 19:04:53 +0200 Subject: [PATCH 1/6] `GaussianProcessRegressor` removed from `DOCSTRING_IGNORE_LIST` in test_docstrings.py --- maint_tools/test_docstrings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/maint_tools/test_docstrings.py b/maint_tools/test_docstrings.py index 612817e23f6b9..ff8e1d29636a2 100644 --- a/maint_tools/test_docstrings.py +++ b/maint_tools/test_docstrings.py @@ -9,7 +9,6 @@ # List of modules ignored when checking for numpydoc validation. DOCSTRING_IGNORE_LIST = [ - "GaussianProcessRegressor", "GaussianRandomProjection", "GridSearchCV", "HalvingGridSearchCV", From 5e094130afa307bf6723bcc739417413cafc29d2 Mon Sep 17 00:00:00 2001 From: baam25simo Date: Wed, 8 Sep 2021 19:12:42 +0200 Subject: [PATCH 2/6] In `GaussianProcessRegressor`: "See Also" section added. --- sklearn/gaussian_process/_gpr.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sklearn/gaussian_process/_gpr.py b/sklearn/gaussian_process/_gpr.py index 8b503358423f4..d18b5b8cbe6b2 100644 --- a/sklearn/gaussian_process/_gpr.py +++ b/sklearn/gaussian_process/_gpr.py @@ -82,7 +82,7 @@ def optimizer(obj_func, initial_theta, bounds): Per default, the L-BFGS-B algorithm from `scipy.optimize.minimize` is used. If None is passed, the kernel's parameters are kept fixed. - Available internal optimizers are: `{'fmin_l_bfgs_b'}` + Available internal optimizers are: `{'fmin_l_bfgs_b'}`. n_restarts_optimizer : int, default=0 The number of restarts of the optimizer for finding the kernel's @@ -146,6 +146,11 @@ def optimizer(obj_func, initial_theta, bounds): .. versionadded:: 1.0 + See Also + -------- + GaussianProcessClassifier : Gaussian process classification (GPC) + based on Laplace approximation. + References ---------- .. [1] `Rasmussen, Carl Edward. From 8ba21d9dde4c7c96ffc33b8368143bf81995e801 Mon Sep 17 00:00:00 2001 From: baam25simo Date: Wed, 8 Sep 2021 19:14:40 +0200 Subject: [PATCH 3/6] In `GaussianProcessRegressor.fit`: `self` description added. --- sklearn/gaussian_process/_gpr.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sklearn/gaussian_process/_gpr.py b/sklearn/gaussian_process/_gpr.py index d18b5b8cbe6b2..e9b5e24551a15 100644 --- a/sklearn/gaussian_process/_gpr.py +++ b/sklearn/gaussian_process/_gpr.py @@ -205,7 +205,8 @@ def fit(self, X, y): Returns ------- - self : returns an instance of self. + self : object + GaussianProcessRegressor class instance. """ if self.kernel is None: # Use an RBF kernel as default self.kernel_ = C(1.0, constant_value_bounds="fixed") * RBF( From 15c73a0bd67ffdc9e298206acb9e39cdf6968d6f Mon Sep 17 00:00:00 2001 From: baam25simo Date: Wed, 8 Sep 2021 19:17:18 +0200 Subject: [PATCH 4/6] In `GaussianProcessRegressor.log_marginal_likelihood`: summary fixed. --- sklearn/gaussian_process/_gpr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearn/gaussian_process/_gpr.py b/sklearn/gaussian_process/_gpr.py index e9b5e24551a15..938d0685cef04 100644 --- a/sklearn/gaussian_process/_gpr.py +++ b/sklearn/gaussian_process/_gpr.py @@ -470,7 +470,7 @@ def sample_y(self, X, n_samples=1, random_state=0): def log_marginal_likelihood( self, theta=None, eval_gradient=False, clone_kernel=True ): - """Returns log-marginal likelihood of theta for training data. + """Return log-marginal likelihood of theta for training data. Parameters ---------- From 5da2ad378d817f08369eafd28ed49577e64a0c51 Mon Sep 17 00:00:00 2001 From: baam25simo Date: Wed, 8 Sep 2021 19:17:53 +0200 Subject: [PATCH 5/6] In `GaussianProcessRegressor.predict`: summary fixed. --- sklearn/gaussian_process/_gpr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearn/gaussian_process/_gpr.py b/sklearn/gaussian_process/_gpr.py index 938d0685cef04..17ac810e0be0b 100644 --- a/sklearn/gaussian_process/_gpr.py +++ b/sklearn/gaussian_process/_gpr.py @@ -324,7 +324,7 @@ def obj_func(theta, eval_gradient=True): return self def predict(self, X, return_std=False, return_cov=False): - """Predict using the Gaussian process regression model + """Predict using the Gaussian process regression model. We can also predict based on an unfitted model by using the GP prior. In addition to the mean of the predictive distribution, optionally also From bc24e6e78e88ef84a98c617cee4f11a5de92b8b2 Mon Sep 17 00:00:00 2001 From: baam25simo Date: Wed, 8 Sep 2021 19:19:09 +0200 Subject: [PATCH 6/6] In `GaussianProcessRegressor.sample_y`: `n_sample` description fixed. --- sklearn/gaussian_process/_gpr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearn/gaussian_process/_gpr.py b/sklearn/gaussian_process/_gpr.py index 17ac810e0be0b..cf9a349f1b074 100644 --- a/sklearn/gaussian_process/_gpr.py +++ b/sklearn/gaussian_process/_gpr.py @@ -439,7 +439,7 @@ def sample_y(self, X, n_samples=1, random_state=0): Query points where the GP is evaluated. n_samples : int, default=1 - Number of samples drawn from the Gaussian process per query point + Number of samples drawn from the Gaussian process per query point. random_state : int, RandomState instance or None, default=0 Determines random number generation to randomly draw samples.