|
1 | 1 | @testset "regression.jl" begin
|
2 | 2 |
|
3 |
| -# number of input features |
4 |
| -N = 200 |
5 |
| -# homoscedastic observation noise |
6 |
| -σ = 0.3 |
| 3 | + # number of input features |
| 4 | + N = 200 |
| 5 | + # homoscedastic observation noise |
| 6 | + σ = 0.3 |
7 | 7 |
|
8 |
| -# experiment of Murray et al. |
9 |
| -@testset "GP regression" begin |
10 |
| - # added to the diagonal of the covariance matrix due to numerical issues of |
11 |
| - # the Cholesky decomposition |
12 |
| - jitter = 1e-9 |
| 8 | + # experiment of Murray et al. |
| 9 | + @testset "GP regression" begin |
| 10 | + # added to the diagonal of the covariance matrix due to numerical issues of |
| 11 | + # the Cholesky decomposition |
| 12 | + jitter = 1e-9 |
13 | 13 |
|
14 |
| - # for different dimensions of input features |
15 |
| - for d in 1:10 |
16 |
| - # sample input features |
17 |
| - inputs = rand(d, N) |
| 14 | + # for different dimensions of input features |
| 15 | + for d in 1:10 |
| 16 | + # sample input features |
| 17 | + inputs = rand(d, N) |
18 | 18 |
|
19 |
| - # define covariance matrix of latent variable |
20 |
| - # add noise to the diagonal due to numerical issues |
21 |
| - prior_Σ = Symmetric(exp.((-0.5) .* pairwise(SqEuclidean(), inputs; dims = 2))) + |
22 |
| - jitter * I |
23 |
| - prior = MvNormal(prior_Σ) |
| 19 | + # define covariance matrix of latent variable |
| 20 | + # add noise to the diagonal due to numerical issues |
| 21 | + prior_Σ = |
| 22 | + Symmetric(exp.((-0.5) .* pairwise(SqEuclidean(), inputs; dims=2))) + |
| 23 | + jitter * I |
| 24 | + prior = MvNormal(prior_Σ) |
24 | 25 |
|
25 |
| - # sample noisy observations |
26 |
| - observations = rand(prior) .+ σ .* randn(N) |
| 26 | + # sample noisy observations |
| 27 | + observations = rand(prior) .+ σ .* randn(N) |
27 | 28 |
|
28 |
| - # define log likelihood function |
29 |
| - ℓ(f) = let observations = observations, σ = σ |
30 |
| - logpdf(MvNormal(f, σ), observations) |
31 |
| - end |
| 29 | + # define log likelihood function |
| 30 | + ℓ(f) = |
| 31 | + let observations = observations, σ = σ |
| 32 | + logpdf(MvNormal(f, σ), observations) |
| 33 | + end |
32 | 34 |
|
33 |
| - # run elliptical slice sampler for 100 000 time steps |
34 |
| - samples = sample(ESSModel(prior, ℓ), ESS(), 100_000; progress = false) |
| 35 | + # run elliptical slice sampler for 100 000 time steps |
| 36 | + samples = sample(ESSModel(prior, ℓ), ESS(), 100_000; progress=false) |
35 | 37 |
|
36 |
| - # compute analytical posterior of GP |
37 |
| - posterior_Σ = prior_Σ * (I - (prior_Σ + σ^2 * I) \ prior_Σ) |
38 |
| - posterior_μ = posterior_Σ * observations / σ^2 |
| 38 | + # compute analytical posterior of GP |
| 39 | + posterior_Σ = prior_Σ * (I - (prior_Σ + σ^2 * I) \ prior_Σ) |
| 40 | + posterior_μ = posterior_Σ * observations / σ^2 |
39 | 41 |
|
40 |
| - # compare with empirical estimates |
41 |
| - @test mean(samples) ≈ posterior_μ rtol = 0.05 |
| 42 | + # compare with empirical estimates |
| 43 | + @test mean(samples) ≈ posterior_μ rtol = 0.05 |
| 44 | + end |
42 | 45 | end
|
43 |
| -end |
44 | 46 |
|
45 |
| -# extreme case with independent observations |
46 |
| -@testset "Independent components" begin |
47 |
| - # define distribution of latent variables |
48 |
| - prior = MvNormal(N, 1) |
| 47 | + # extreme case with independent observations |
| 48 | + @testset "Independent components" begin |
| 49 | + # define distribution of latent variables |
| 50 | + prior = MvNormal(N, 1) |
49 | 51 |
|
50 |
| - # sample noisy observations |
51 |
| - observations = rand(prior) .+ σ .* randn(N) |
| 52 | + # sample noisy observations |
| 53 | + observations = rand(prior) .+ σ .* randn(N) |
52 | 54 |
|
53 |
| - # define log likelihood function |
54 |
| - ℓ(f) = let observations = observations, σ = σ |
55 |
| - logpdf(MvNormal(f, σ), observations) |
56 |
| - end |
| 55 | + # define log likelihood function |
| 56 | + ℓ(f) = |
| 57 | + let observations = observations, σ = σ |
| 58 | + logpdf(MvNormal(f, σ), observations) |
| 59 | + end |
57 | 60 |
|
58 |
| - # run elliptical slice sampling for 100 000 time steps |
59 |
| - samples = sample(ESSModel(prior, ℓ), ESS(), 100_000; progress = false) |
| 61 | + # run elliptical slice sampling for 100 000 time steps |
| 62 | + samples = sample(ESSModel(prior, ℓ), ESS(), 100_000; progress=false) |
60 | 63 |
|
61 |
| - # compute analytical posterior |
62 |
| - posterior_μ = observations / (1 + σ^2) |
| 64 | + # compute analytical posterior |
| 65 | + posterior_μ = observations / (1 + σ^2) |
63 | 66 |
|
64 |
| - # compare with empirical estimates |
65 |
| - @test mean(samples) ≈ posterior_μ rtol = 0.02 |
66 |
| -end |
| 67 | + # compare with empirical estimates |
| 68 | + @test mean(samples) ≈ posterior_μ rtol = 0.02 |
| 69 | + end |
67 | 70 | end
|
0 commit comments