In [1]:
using Random, Distributions, DataFrames, Turing, StatsPlots, Plots

Bayesian designs for Turing library

In [7]:
## This first macro is similar to that of the first tutorial

#=  Define a macro function for Turing.jl
    For how to use this model, please refer to the first tutorial of this course
    Inside this function, we define priors and sampling distributions 
=#

@model function binaryTrial(y::Vector{Int64}, n::Int64, a::Int64, b::Int64) 
    m ~ Beta(a, b)
    y[1] ~ Binomial(n, m)
end

## Simulate sampled data
# n = number of patients in a single arm
# p = TRUE probability of success rate given this treatment
N = 20
p = 0.6
d = Binomial(N, p)
## One trial
data = rand(d, 1)

## Complete the function by passing the data into it. Now the object "model" contains all model specifications
## a = 1, b = 1 for a flat prior
model = binaryTrial(data, N, 1, 1)

## Run MCMC
chain = sample(model, NUTS(), 1000)

## Plot posterior
histogram(chain[:m], bins=30, alpha=0.4, label="A", normalize = true) 
plot!(Beta(data[1] + 1, N - data[1] + 1), labels = "Beta($(data[1]+1), $(N - data[1] + 1))")
┌ Info: Found initial step size
│   ϵ = 1.6
└ @ Turing.Inference /Users/caesar/.julia/packages/Turing/rl6ku/src/inference/hmc.jl:188
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
Sampling: 100%|█████████████████████████████████████████| Time: 0:00:00
Out[7]:

Binary endpoint, subject level data

In [31]:
@model function binaryTrial(y::Vector{Int64}, a::Int64, b::Int64) 
    # Prior model for p
    p ~ Beta(a, b)
    
    n = size(y)[1]
    for i in 1:n 
        y[i] ~ Binomial(1, p)
    end  

end

## Simulate sampled data
# n = number of patients in a single arm
# p = TRUE probability of success rate given this treatment
N = 10
p = 0.6
d = Binomial(1, p)
## One trial as a vector of Bernoulli 0 and 1's
Random.seed!(5)
data = rand(d, N)

## Complete the function by passing the data into it. Now the object "model" contains all model specifications
## a = 1, b = 1 for a flat prior
model = binaryTrial(data, 1, 1)

## Run MCMC
chain = sample(model, NUTS(), 1000)

## Plot posterior
## With the seed we set, data has 8 success out of 10 data points, 
## so the posterior is heavy around 0.8
histogram(chain[:p], bins=30, alpha=0.4, label="A", normalize = true)
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Info: Found initial step size
│   ϵ = 3.2
└ @ Turing.Inference /Users/caesar/.julia/packages/Turing/rl6ku/src/inference/hmc.jl:188
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
┌ Warning: The current proposal will be rejected due to numerical error(s).
│   isfinite.((θ, r, ℓπ, ℓκ)) = (true, false, false, false)
└ @ AdvancedHMC /Users/caesar/.julia/packages/AdvancedHMC/kB7Xa/src/hamiltonian.jl:47
Out[31]:

Continuous endpoint of Gaussian mixture model

In [5]:
# fieldnames(typeof(chain))
@model function continuousTrial(y::Vector{Float64}, a::Int64, b::Int64, K::Int64, ::Type{T} = Float64) where {T}
    n = size(y)[1]
    
    # Prior for population standard deviation, sigma
     ~ InverseGamma(a, b)
    
    mu_group_raw ~ filldist(Normal(0, sqrt(10)), K)
    
    ## ensure ordering to prevent label switching
    mu_group = sort(mu_group_raw)
    
    #lambda = Vector{T}(undef, K)
    #for i = 1:K
    #    lambda[i] ~ Dirichlet(ones(K)/K)
    #end
    lambda ~ Dirichlet(ones(K)/K)
    
    mu = Vector{Any}(undef, n)
    group = Vector{Any}(undef, n)
    for i in 1:n
        ## Categorical should be a vector p specifying prob for each category
        ## If an integer is specified, then they're given equal probs. 
        group[i] ~ Categorical(lambda)
        mu[i] = mu_group[group[i]]
        y[i] ~ Normal(mu[i], sqrt())  
    end
    
end

## Assume two groups
K=2
## c(rnorm(500, 0, 1), rnorm(500, 5, 1)),
data = vcat(rand(Normal(0, 1), 10), rand(Normal(5, 1), 10))

## Complete the function by passing the data into it. Now the object "model" contains all model specifications
## a = 1, b = 1 for a flat prior
model = continuousTrial(data, 2, 3, K)

## Run MCMC
chain = sample(model, NUTS(), 200)

Ordinal endpoints

Time-to-event endpoint, blinded data

In [19]:
# fieldnames(typeof(chain))
@model function ordinalTrial(event::Vector{Int64}, time::Vector{Float64}, eta::Float64,
                                median_ctrl_lwd::Float64, median_ctrl_upd::Float64, ::Type{T} = Float64) where {T}
    n = size(event)[1]
    
    # Prior for hazard ratio
    hr ~ Uniform(hr_lbd, hr_upd)
    medianC ~ Uniform(median_ctrl_lwd, median_ctrl_upd)
    
    medianT = medianC / hr
    lambdaT = -log(1-0.5)/medianT
    lambdaC = log(2) / medianC
    
    for i in 1:n
        ## weibull likelihood for control arm
        like_ctrl[i] = exp(event[i] * (log(alpha) + alpha * log(lambdaC) + (alpha-1)*log(time[i])) - (lambdaC * time[i])^alpha)
        ## exponential likelihood for treatment arm
        like_trt[i] <- exp( -1 * time[i] * lambdaT + log(lambdaT) * event[i])

        ## definie log-likelihood using zero tricks
        phi[i] <- 100000 - log( eta*like_ctrl[i] + (1-eta)*like_trt[i] )
        zeros[i] ~ dpois(phi[i])
    end
    
end

## Assume two groups
K=2
## c(rnorm(500, 0, 1), rnorm(500, 5, 1)),

n = 400
rratio = 1
eta = rratio/(1+rratio)
## shape α=1 and unit scale
time = vcat(rand(Weibull(1,1), 198), rand(Exponential(1), 202))
event = rand(Binomial(1, 0.5), n)

## Complete the function by passing the data into it. Now the object "model" contains all model specifications
## a = 1, b = 1 for a flat prior
model = ordinalTrial(event, time, eta, 10.0, 40.0)

## Run MCMC
chain = sample(model, NUTS(100, 0.65), 200)
UndefVarError: hr_lbd not defined

Stacktrace:
  [1] ordinalTrial(__model__::DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, __varinfo__::DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, __context__::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, event::Vector{Int64}, time::Vector{Float64}, eta::Float64, median_ctrl_lwd::Float64, median_ctrl_upd::Float64, ::Type{Float64})
    @ Main ./In[19]:7
  [2] macro expansion
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/model.jl:489 [inlined]
  [3] _evaluate!!(model::DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, varinfo::DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, context::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/qCW6L/src/model.jl:472
  [4] evaluate_threadunsafe!!
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/model.jl:447 [inlined]
  [5] evaluate!!
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/model.jl:404 [inlined]
  [6] evaluate!!
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/model.jl:415 [inlined]
  [7] Model
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/model.jl:377 [inlined]
  [8] VarInfo
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/varinfo.jl:127 [inlined]
  [9] VarInfo
    @ ~/.julia/packages/DynamicPPL/qCW6L/src/varinfo.jl:126 [inlined]
 [10] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}; resume_from::Nothing, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/qCW6L/src/sampler.jl:81
 [11] macro expansion
    @ ~/.julia/packages/AbstractMCMC/6aLyN/src/sample.jl:124 [inlined]
 [12] macro expansion
    @ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
 [13] (::AbstractMCMC.var"#21#22"{Bool, String, Nothing, Int64, Int64, Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}}, Random._GLOBAL_RNG, DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}, Int64, Int64})()
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/6aLyN/src/logging.jl:11
 [14] with_logstate(f::Function, logstate::Any)
    @ Base.CoreLogging ./logging.jl:511
 [15] with_logger(f::Function, logger::LoggingExtras.TeeLogger{Tuple{LoggingExtras.EarlyFilteredLogger{ConsoleProgressMonitor.ProgressLogger, AbstractMCMC.var"#1#3"{Module}}, LoggingExtras.EarlyFilteredLogger{Base.CoreLogging.SimpleLogger, AbstractMCMC.var"#2#4"{Module}}}})
    @ Base.CoreLogging ./logging.jl:623
 [16] with_progresslogger(f::Function, _module::Module, logger::Base.CoreLogging.SimpleLogger)
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/6aLyN/src/logging.jl:34
 [17] macro expansion
    @ ~/.julia/packages/AbstractMCMC/6aLyN/src/logging.jl:10 [inlined]
 [18] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/6aLyN/src/sample.jl:115
 [19] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; chain_type::Type, resume_from::Nothing, progress::Bool, nadapts::Int64, discard_adapt::Bool, discard_initial::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Turing.Inference ~/.julia/packages/Turing/rl6ku/src/inference/hmc.jl:133
 [20] sample
    @ ~/.julia/packages/Turing/rl6ku/src/inference/hmc.jl:116 [inlined]
 [21] #sample#2
    @ ~/.julia/packages/Turing/rl6ku/src/inference/Inference.jl:145 [inlined]
 [22] sample
    @ ~/.julia/packages/Turing/rl6ku/src/inference/Inference.jl:145 [inlined]
 [23] #sample#1
    @ ~/.julia/packages/Turing/rl6ku/src/inference/Inference.jl:135 [inlined]
 [24] sample(model::DynamicPPL.Model{typeof(ordinalTrial), (:event, :time, :eta, :median_ctrl_lwd, :median_ctrl_upd, :T), (:T,), (), Tuple{Vector{Int64}, Vector{Float64}, Float64, Float64, Float64, DataType}, Tuple{DataType}, DynamicPPL.DefaultContext}, alg::NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}, N::Int64)
    @ Turing.Inference ~/.julia/packages/Turing/rl6ku/src/inference/Inference.jl:135
 [25] top-level scope
    @ In[19]:45
 [26] eval
    @ ./boot.jl:373 [inlined]
 [27] include_string(mapexpr::typeof(REPL.softscope), mod::Module, code::String, filename::String)
    @ Base ./loading.jl:1196
In [16]:
1:5
Out[16]:
1:5
In [61]:
??NUTS
search: NUTS ANUTS DynamicNUTS ncodeunits InteractiveUtils

Out[61]:
NUTS(n_adapts::Int, δ::Float64; max_depth::Int=5, Δ_max::Float64=1000.0, init_ϵ::Float64=0.0)

No-U-Turn Sampler (NUTS) sampler.

Usage:

NUTS()            # Use default NUTS configuration.
NUTS(1000, 0.65)  # Use 1000 adaption steps, and target accept ratio 0.65.

Arguments:

  • n_adapts::Int : The number of samples to use with adaptation.
  • δ::Float64 : Target acceptance rate for dual averaging.
  • max_depth::Int : Maximum doubling tree depth.
  • Δ_max::Float64 : Maximum divergence during doubling tree.
  • init_ϵ::Float64 : Inital step size; 0 means automatically searching using a heuristic procedure.
In [5]:
Categorical(5)
Out[5]:
Categorical{Float64, Vector{Float64}}(
support: Base.OneTo(5)
p: [0.2, 0.2, 0.2, 0.2, 0.2]
)