@@ -1304,21 +1304,6 @@ class NuSVR(RegressorMixin, BaseLibSVM):
1304
1304
support_vectors_ : ndarray of shape (n_SV, n_features)
1305
1305
Support vectors.
1306
1306
1307
- Examples
1308
- --------
1309
- >>> from sklearn.svm import NuSVR
1310
- >>> from sklearn.pipeline import make_pipeline
1311
- >>> from sklearn.preprocessing import StandardScaler
1312
- >>> import numpy as np
1313
- >>> n_samples, n_features = 10, 5
1314
- >>> np.random.seed(0)
1315
- >>> y = np.random.randn(n_samples)
1316
- >>> X = np.random.randn(n_samples, n_features)
1317
- >>> regr = make_pipeline(StandardScaler(), NuSVR(C=1.0, nu=0.1))
1318
- >>> regr.fit(X, y)
1319
- Pipeline(steps=[('standardscaler', StandardScaler()),
1320
- ('nusvr', NuSVR(nu=0.1))])
1321
-
1322
1307
See Also
1323
1308
--------
1324
1309
NuSVC : Support Vector Machine for classification implemented with libsvm
@@ -1335,6 +1320,21 @@ class NuSVR(RegressorMixin, BaseLibSVM):
1335
1320
.. [2] `Platt, John (1999). "Probabilistic outputs for support vector
1336
1321
machines and comparison to regularizedlikelihood methods."
1337
1322
<http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.41.1639>`_
1323
+
1324
+ Examples
1325
+ --------
1326
+ >>> from sklearn.svm import NuSVR
1327
+ >>> from sklearn.pipeline import make_pipeline
1328
+ >>> from sklearn.preprocessing import StandardScaler
1329
+ >>> import numpy as np
1330
+ >>> n_samples, n_features = 10, 5
1331
+ >>> np.random.seed(0)
1332
+ >>> y = np.random.randn(n_samples)
1333
+ >>> X = np.random.randn(n_samples, n_features)
1334
+ >>> regr = make_pipeline(StandardScaler(), NuSVR(C=1.0, nu=0.1))
1335
+ >>> regr.fit(X, y)
1336
+ Pipeline(steps=[('standardscaler', StandardScaler()),
1337
+ ('nusvr', NuSVR(nu=0.1))])
1338
1338
"""
1339
1339
1340
1340
_impl = "nu_svr"
0 commit comments