Moritz Schauer / Feb 05 2020
Remix of Probabilistic programming by Moritz Schauer
Inference with Turing V. 3
Simple hierarchical model
Pkg.add("Turing")
Pkg.add("StatsPlots")
Pkg.add("Distributions")
134.1s
Julia
using Turing
using StatsPlots
icecore2(y1, y2, y3, σ, s, s0) = begin
s ~ Normal(0.0, 1.0)
X1 ~ Normal(0.0, s0)
X2 ~ Normal(X1, s)
X3 ~ Normal(X2, s)
y1 ~ Normal(X1, σ)
y2 ~ Normal(X2, σ)
y3 ~ Normal(X3, 2σ)
end;
σ, s = 5.0, 5.0
s0 = 100.0
y = [-30.3, -43.0, -40.0]
# Run sampler, collect results
chn = sample(icecore2(y[1], y[2], y[3], σ, s, s0), HMC(100000, 0.1, 5));
chn;
X1chain = chn[:X1].value[1:10:end]
X2chain = chn[:X2].value[1:10:end]
X3chain = chn[:X3].value[1:10:end]
using Statistics
println("X1 =", mean(X1chain), " +- ", std(X1chain))
println("X2 =", mean(X2chain), " +- ", std(X2chain))
println("X3 =", mean(X3chain), " +- ", std(X3chain))
21.2s
Julia
#plot(Chains(chn[:X1]))
plot(chn[:X1].value[1:10:end], label="X1")
plot!(chn[:X2].value[1:10:end], label="X2")
1.1s
Julia
0μ = [mean(X1chain), mean(X2chain), mean(X3chain)]
plot(y, label="y")
plot!(μ, label="μ")
1.6s
Julia
X1chain = chn[:X1].value[1:10:end]
X2chain = chn[:X1].value[1:10:end]
plot(plot(density(X1chain), title="X1"),
plot(density(X2chain), title="X2"))
#plot(plot(density(Chains(chn[:X1])), title="X1"),
# plot(density(Chains(chn[:X2])), title="X2"))
1.2s
Julia
Shift+Enter to run
Julia
Markov Chain
using Distributions, Plots
n = 5000
P = Normal(0.0, 1.0)
η = 0.8
h(x, z) = η * x + z
X = zeros(n)
let x = 0.0
for i in 1:n
X[i] = x
z = rand(P)
x = h(x, z)
end
end
0.3s
Julia
plot(X)
0.6s
Julia
std(X), sqrt(1/(1- 0.8^2))
0.2s
Julia
(1.65201, 1.66667)
ODE example
Pkg.add.(["Distributions", "Plots"]);
16.5s
Julia
using Distributions, Plots
n = 100
m = 100
dt = 0.01
Θ = rand(Normal(0.5, sqrt(0.1)), m)
X = zeros(n, m)
X[1, :] = rand(Normal(5, sqrt(3)), n)
t = [0.0]
let X = X
for i in 2:n
push!(t, t[end] + dt)
for j in 1:m
X[i, j] = (1-Θ[j]*dt)*X[i-1, j]
end
end
end
plot(plot(t, X, legend=false, label="u", color="light blue"), histogram(X[end,:], label="u(1)", color="light blue"))
1.1s
Julia
Filtering the time series
using Turing
icecore(y, σ, s, s0) = begin
# Get observation length.
N = length(y)
# State sequence.
X = Vector(undef, N)
X[1] ~ Normal(0.0, s0)
for i = 2:N
X[i] ~ Normal(X[i-1], s)
end
# Observe each point of the input.
for i = 1:N
y[i] ~ Normal(X[i], σ)
end
end;
0.5s
Julia
y = [-30.0, -40.0, -20.0, -35.0]
σ, s = 5.0, 5.0
s0 = 100.0
chn = sample(icecore(1.5, σ, s, s0), HMC(1000, 0.1, 5, :X));
3.2s
Julia
chn[:X].value.data
2.1s
Julia
1000×1×1 Array{Union{Missing, Float64},3}:
[:, :, 1] =
2.04545
1.92587
1.50201
1.85594
1.09108
1.38515
1.68215
0.583994
-0.0306877
0.103294
⋮
13.2335
12.89
13.4515
13.6189
12.7772
12.5271
13.079
12.3667
13.1263
Shift+Enter to run
Julia
Example
import Pkg; Pkg.add("Distributions")
using Distributions
142.1s
Julia
f(y, x, alpha, beta, r, lambda) = pdf(InverseGamma(alpha, beta), x)*pdf(Exponential(lambda), y-r*x)
alpha, beta, r, lambda = 2.0, 1.0, .5, 1.0
sigma = 0.1
y = 1.0
nextx(x) = x*exp(sigma*randn())
q(x, xnext) = pdf(LogNormal(log(x), sigma), xnext)
p(x) = f(y, x, alpha, beta, r, lambda)
x = 0.2
n = 1000 # step
chain = zeros(n)
for i in 1:n
global x
xprime = nextx(x)
alpha = min( (p(xprime)*q(xprime, x) ) / ( (p(x)*q(x, xprime) ) ) , 1.0)
z = rand() # Uniform[0, 1]
if z < alpha
x = xprime
else
# x does change
end
chain[i] = x
end
using Plots
plot(chain)
92.2s
Julia
mean(chain) , std(chain)
1.3s
Julia
(0.853125, 0.522669)
Shift+Enter to run
Julia