|
28 | 28 |
|
29 | 29 | # Author: Yoshihiro Uchida <nimbus1after2a1sun7shower@gmail.com>
|
30 | 30 |
|
| 31 | +# %% |
| 32 | +# Generate sinusoidal data with noise |
| 33 | +# ----------------------------------- |
31 | 34 | import numpy as np
|
32 |
| -import matplotlib.pyplot as plt |
33 |
| - |
34 |
| -from sklearn.linear_model import BayesianRidge |
35 | 35 |
|
36 | 36 |
|
37 | 37 | def func(x):
|
38 | 38 | return np.sin(2 * np.pi * x)
|
39 | 39 |
|
40 | 40 |
|
41 |
| -# ############################################################################# |
42 |
| -# Generate sinusoidal data with noise |
43 | 41 | size = 25
|
44 | 42 | rng = np.random.RandomState(1234)
|
45 | 43 | x_train = rng.uniform(0.0, 1.0, size)
|
46 | 44 | y_train = func(x_train) + rng.normal(scale=0.1, size=size)
|
47 | 45 | x_test = np.linspace(0.0, 1.0, 100)
|
48 | 46 |
|
49 | 47 |
|
50 |
| -# ############################################################################# |
| 48 | +# %% |
51 | 49 | # Fit by cubic polynomial
|
| 50 | +# ----------------------- |
| 51 | +from sklearn.linear_model import BayesianRidge |
| 52 | + |
52 | 53 | n_order = 3
|
53 | 54 | X_train = np.vander(x_train, n_order + 1, increasing=True)
|
54 | 55 | X_test = np.vander(x_test, n_order + 1, increasing=True)
|
| 56 | +reg = BayesianRidge(tol=1e-6, fit_intercept=False, compute_score=True) |
55 | 57 |
|
56 |
| -# ############################################################################# |
| 58 | +# %% |
57 | 59 | # Plot the true and predicted curves with log marginal likelihood (L)
|
58 |
| -reg = BayesianRidge(tol=1e-6, fit_intercept=False, compute_score=True) |
| 60 | +# ------------------------------------------------------------------- |
| 61 | +import matplotlib.pyplot as plt |
| 62 | + |
59 | 63 | fig, axes = plt.subplots(1, 2, figsize=(8, 4))
|
60 | 64 | for i, ax in enumerate(axes):
|
61 | 65 | # Bayesian ridge regression with different initial value pairs
|
|
0 commit comments