From 087fea65a2386d7c82466a868cd811f3d36a183e Mon Sep 17 00:00:00 2001 From: craetona Date: Thu, 2 May 2024 22:18:49 -0700 Subject: [PATCH 1/2] Add example link to kNN Regressor --- sklearn/neighbors/_regression.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sklearn/neighbors/_regression.py b/sklearn/neighbors/_regression.py index 2897c1ce409e8..343f333ef4e63 100644 --- a/sklearn/neighbors/_regression.py +++ b/sklearn/neighbors/_regression.py @@ -49,6 +49,11 @@ class KNeighborsRegressor(KNeighborsMixin, RegressorMixin, NeighborsBase): Uniform weights are used by default. + Refer to the example entitled + :ref:`sphx_glr_auto_examples_neighbors_plot_regression.py` + showing the impact of the 'weights' parameter on the decisison + boundary. + algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, default='auto' Algorithm used to compute the nearest neighbors: From d9b38ea2ce4c7a1fcd107986d3d4bd82aef36bda Mon Sep 17 00:00:00 2001 From: adrinjalali Date: Tue, 11 Jun 2024 14:15:44 +0200 Subject: [PATCH 2/2] DOC improve example --- examples/neighbors/plot_regression.py | 27 ++++++++++++++------------- sklearn/neighbors/_regression.py | 7 +++---- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/examples/neighbors/plot_regression.py b/examples/neighbors/plot_regression.py index d5ceba8a34860..431540e81761b 100644 --- a/examples/neighbors/plot_regression.py +++ b/examples/neighbors/plot_regression.py @@ -6,27 +6,26 @@ Demonstrate the resolution of a regression problem using a k-Nearest Neighbor and the interpolation of the target using both barycenter and constant weights. - """ -# Author: Alexandre Gramfort -# Fabian Pedregosa -# -# License: BSD 3 clause (C) INRIA - +# Authors: The scikit-learn developers +# SPDX-License-Identifier: BSD-3-Clause # %% # Generate sample data # -------------------- +# Here we generate a few data points to use to train the model. We also generate +# data in the whole range of the training data to visualize how the model would +# react in that whole region. import matplotlib.pyplot as plt import numpy as np from sklearn import neighbors -np.random.seed(0) -X = np.sort(5 * np.random.rand(40, 1), axis=0) -T = np.linspace(0, 5, 500)[:, np.newaxis] -y = np.sin(X).ravel() +rng = np.random.RandomState(0) +X_train = np.sort(5 * rng.rand(40, 1), axis=0) +X_test = np.linspace(0, 5, 500)[:, np.newaxis] +y = np.sin(X_train).ravel() # Add noise to targets y[::5] += 1 * (0.5 - np.random.rand(8)) @@ -34,15 +33,17 @@ # %% # Fit regression model # -------------------- +# Here we train a model and visualize how `uniform` and `distance` +# weights in prediction effect predicted values. n_neighbors = 5 for i, weights in enumerate(["uniform", "distance"]): knn = neighbors.KNeighborsRegressor(n_neighbors, weights=weights) - y_ = knn.fit(X, y).predict(T) + y_ = knn.fit(X_train, y).predict(X_test) plt.subplot(2, 1, i + 1) - plt.scatter(X, y, color="darkorange", label="data") - plt.plot(T, y_, color="navy", label="prediction") + plt.scatter(X_train, y, color="darkorange", label="data") + plt.plot(X_test, y_, color="navy", label="prediction") plt.axis("tight") plt.legend() plt.title("KNeighborsRegressor (k = %i, weights = '%s')" % (n_neighbors, weights)) diff --git a/sklearn/neighbors/_regression.py b/sklearn/neighbors/_regression.py index 343f333ef4e63..48a51ace630e6 100644 --- a/sklearn/neighbors/_regression.py +++ b/sklearn/neighbors/_regression.py @@ -49,10 +49,9 @@ class KNeighborsRegressor(KNeighborsMixin, RegressorMixin, NeighborsBase): Uniform weights are used by default. - Refer to the example entitled - :ref:`sphx_glr_auto_examples_neighbors_plot_regression.py` - showing the impact of the 'weights' parameter on the decisison - boundary. + See the following example for a demonstration of the impact of + different weighting schemes on predictions: + :ref:`sphx_glr_auto_examples_neighbors_plot_regression.py`. algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, default='auto' Algorithm used to compute the nearest neighbors: