Moritz Schauer / Jul 30 2019
Remix of

# Probabilistic programming

#=Pkg.add("Turing")
Pkg.add("Distributions")=#
using Turing
using StatsPlots

# Define a simple Normal model with unknown mean and variance.
@model gdemo(x, y) = begin
s ~ InverseGamma(2.0, 3.0)
m ~ Normal(0.0, sqrt(s))
x ~ Normal(m, sqrt(s))
y ~ Normal(m, sqrt(s))
end

#  Run sampler, collect results
chn = sample(gdemo(1.5, 2), HMC(1000, 0.1, 5));
plot(Chains(chn[:s]))
plot(plot(density(Chains(chn[:s])), title="s"),
plot(density(Chains(chn[:m])), title="m"))
using Distributions, Plots
n = 5000
P = Normal(0.0, 1.0)
η = 0.8
f(x, z) =  η * x + z

X = zeros(n)
let x = 0.0
for i in 1:n
X[i] = x

z = rand(P)
x = f(x, z)
end
end
plot(X)
std(X), sqrt(1/(1- 0.8^2))
Pkg.add.(["Distributions", "Plots"]);
using Distributions, Plots
n = 100
m = 100
dt = 0.01
Θ = rand(Normal(0.5, sqrt(0.1)), m)
X = zeros(n, m)
X[1, :] = rand(Normal(5, sqrt(3)), n)
t = [0.0]
let X = X
for i in 2:n
push!(t, t[end] + dt)
for j in 1:m
X[i, j] = (1-Θ[j]*dt)*X[i-1, j]

end
end
end
plot(plot(t, X, legend=false, label="u", color="light blue"), histogram(X[end,:], label="u(1)", color="light blue"))