Github: https://github.com/quangtiencs/bayesian-cognitive-modeling-with-turing.jl
using DynamicPPL, Turing
using StatsPlots, Random
using LaTeXStrings
using CSV
using DataFrames
using SpecialFunctions
format=:png
$$ \mu \sim \text{Gaussian}(0, \sqrt{1000}) $$ $$ \sigma \sim \text{Uniform} (0, 10) $$ $$ x_{i} \sim \text{Gaussian} (\mu, \sigma^2) $$
x = [1.1, 1.9, 2.3, 1.8]
@model function GaussianModel(x)
mu ~ Normal(0, sqrt(1000))
sigma ~ Uniform(0, 10.0)
for i in eachindex(x)
x[i] ~ Normal(mu, sigma)
end
end
iterations=10_000
chain = sample(GaussianModel(x), NUTS(2000, 0.9), iterations, thinning=10)
plot(chain, alpha=0.5, size=(700, 400), fmt=format)
chain
Iterations = 2001:10:101991
Number of chains = 1
Samples per chain = 10000
Wall duration = 6.8 seconds
Compute duration = 6.8 seconds
parameters = mu, sigma
internals = lp, n_steps, is_accept, acceptance_rate, log_density,
hamiltonian_energy, hamiltonian_energy_error,
max_hamiltonian_energy_error, tree_depth, numerical_error,
step_size, nom_step_size
Summary Statistics
parameters mean std naive_se mcse ess rhat ⋯
Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯
mu 1.7676 0.6697 0.0067 0.0072 8799.3432 1.0000 ⋯
sigma 0.9946 0.8889 0.0089 0.0093 8529.8922 1.0000 ⋯
1 column omitted
Quantiles
parameters 2.5% 25.0% 50.0% 75.0% 97.5%
Symbol Float64 Float64 Float64 Float64 Float64
mu 0.4649 1.5347 1.7764 2.0204 2.9320
sigma 0.3130 0.5140 0.7272 1.1275 3.4459
p = marginalkde(chain[:mu], chain[:sigma], size=(500, 500), fmt=format)
scatter!(p.subplots[2], chain[:mu], chain[:sigma], alpha=0.1, markersize=1)
autocorplot(chain, fmt=format)
$$ \mu \sim \text{Gaussian}(0, \sqrt{1000}) $$
$$ \lambda_{i} \sim \text{Gamma} (k=.001, \theta = 1 / .001) $$
$$ \sigma_{i} = 1/{\sqrt\lambda_{i}} $$
$$ x_{i} \sim \text{Gaussian} (\mu, \sigma_{i}) $$
x = [-27.020, 3.570, 8.191, 9.898, 9.603, 9.945, 10.056]
@model function SvenScientists(x)
mu ~ Normal(0, sqrt(1000))
# α và θ ó =)) hỏng phải alpha beta haha
lambda ~ filldist(Gamma(0.01, 1 / 0.01), 7)
sigma = 1 ./ (sqrt.(lambda))
for i in eachindex(x)
x[i] ~ Normal(mu, sigma[i])
end
end
iterations=20_000
burnin=4_000
chain = sample(SvenScientists(x), NUTS(), iterations, burnin=burnin)
plot(chain, alpha=0.5, size=(700, 1500), fmt=format)
chain
Iterations = 2001:10:101991
Number of chains = 1
Samples per chain = 10000
Wall duration = 6.8 seconds
Compute duration = 6.8 seconds
parameters = mu, sigma
internals = lp, n_steps, is_accept, acceptance_rate, log_density,
hamiltonian_energy, hamiltonian_energy_error,
max_hamiltonian_energy_error, tree_depth, numerical_error,
step_size, nom_step_size
Summary Statistics
parameters mean std naive_se mcse ess rhat ⋯
Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯
mu 1.7676 0.6697 0.0067 0.0072 8799.3432 1.0000 ⋯
sigma 0.9946 0.8889 0.0089 0.0093 8529.8922 1.0000 ⋯
1 column omitted
Quantiles
parameters 2.5% 25.0% 50.0% 75.0% 97.5%
Symbol Float64 Float64 Float64 Float64 Float64
mu 0.4649 1.5347 1.7764 2.0204 2.9320
sigma 0.3130 0.5140 0.7272 1.1275 3.4459
?Turing.Gamma
Gamma(α,θ)
The Gamma distribution with shape parameter α
and scale θ
has probability density function
$$ f(x; \alpha, \theta) = \frac{x^{\alpha-1} e^{-x/\theta}}{\Gamma(\alpha) \theta^\alpha}, \quad x > 0 $$
$$ \mu_{i} \sim \text{Uniform}(0, 300) $$ $$ \sigma \sim \text{Uniform} (0, 100) $$ $$ x_{ij} \sim \text{Gaussian} (\mu_{i}, \sigma) $$
x = [[90, 95, 100], [105, 110, 115], [150, 155, 160]]
@model function RepeatedMeasurementIQ(x)
mu_i ~ filldist(Uniform(0, 300.), 3)
sigma ~ Uniform(0, 100.)
for i in eachindex(x)
for j in eachindex(x[i])
x[i][j] ~ Normal(mu_i[i], sigma)
end
end
end
iterations=20_000
burnin=4_000
chain = sample(RepeatedMeasurementIQ(x), NUTS(), iterations, burnin=burnin)
plot(chain, fmt=format, alpha=0.5, size=(700, 800))
density(chain[:"mu_i[1]"], size=(800, 150))
density!(chain[:"mu_i[2]"])
density!(chain[:"mu_i[3]"])
chain
Iterations = 1001:1:21000
Number of chains = 1
Samples per chain = 20000
Wall duration = 2.71 seconds
Compute duration = 2.71 seconds
parameters = mu_i[1], mu_i[2], mu_i[3], sigma
internals = lp, n_steps, is_accept, acceptance_rate, log_density,
hamiltonian_energy, hamiltonian_energy_error,
max_hamiltonian_energy_error, tree_depth, numerical_error,
step_size, nom_step_size
Summary Statistics
parameters mean std naive_se mcse ess rhat ⋯
Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯
mu_i[1] 95.0648 4.1115 0.0291 0.0309 15506.3153 1.0000 ⋯
mu_i[2] 109.9797 4.0659 0.0288 0.0317 12564.5068 1.0000 ⋯
mu_i[3] 155.0112 4.0802 0.0289 0.0344 14992.6288 1.0000 ⋯
sigma 6.5279 2.6715 0.0189 0.0326 6467.7532 1.0001 ⋯
1 column omitted
Quantiles
parameters 2.5% 25.0% 50.0% 75.0% 97.5%
Symbol Float64 Float64 Float64 Float64 Float64
mu_i[1] 87.0204 92.7278 95.0066 97.3270 103.3776
mu_i[2] 101.8947 107.6774 109.9983 112.2714 117.9291
mu_i[3] 146.7623 152.6509 155.0004 157.3264 163.2994
sigma 3.4400 4.7723 5.9069 7.5275 13.3027