Search code examples
juliaprobabilistic-programming

Changing Sampled Parameter Type in Turing.jl Model


This is my Julia code to simulate data and sample from a Turing.jl model:

using LinearAlgebra, Distributions, StatsBase
using Turing, FillArrays, DynamicHMC, LabelledArrays
using NNlib, GLM
using CSV, DataFrames

function generate_hmnl_data(R::Int=100, S::Int=30, C::Int=3,
    Theta::Array{Float64, 2}=ones(2, 4),
    Sigma::Array{Float64, 2}=Matrix(Diagonal(fill(0.1, 4))))
    K = size(Theta, 2)
    G = size(Theta, 1)
    Y = Array{Int64}(undef, R, S)
    X = randn(R, S, C, K)
    Z = Array{Float64}(undef, G, R)
    Z[1, :] .= 1
    if G > 1
        Z[2:G, :] = randn(R * (G-1))
    end
    Beta = Array{Float64}(undef, K, R)
    for r in 1:R
        println(Z[:, r])
        println(Theta)
        Beta[:, r] = rand(MvNormal(Theta' * Z[:, r], Sigma))
        for s in 1:S
            Y[r, s] = sample(1:C, Weights(exp.(X[r, s, :, :] * Beta[:, r])))
        end
    end
    return (R=R, S=S, C=C, K=K, G=G, Y=Y, X=X, Z=Z,
    beta_true=Beta, Theta_true=Theta, Sigma_true=Sigma)
end
d1 = generate_hmnl_data()

@model function hmnl(G::Int, Y::Matrix{Int64}, X::Array{Float64}, Z::Matrix{Float64})
    R, S, C, K = size(X)
    Theta = zeros(K, G)
    for k in 1:K
        for g in 1:G
            Theta[k, g] ~ Normal(0, 10)
        end
    end
    Sigma ~ InverseWishart(K, diagm(ones(K)))
    Beta = zeros(K, R)
    println(eltype(Beta))
    for r in 1:R
        Beta[:, r] ~ MvNormal(Theta * Z[:, r], Sigma)
        println(typeof(Beta[:, r]))
        for s in 1:S
            beta_r = copy(Beta[:, r])
            beta_r = convert(Vector{Float64}, beta_r)
            ut_rs = X[r, s, :, :] * beta_r 
            v = softmax(ut_rs)
            Y[r, s] ~ Categorical(v)
        end
    end
end

sampler = HMC(.05, 10)
test_mod = hmnl(d1.G, d1.Y, d1.X, d1.Z)
chains = sample(test_mod, sampler, 1_000)

I get this error when I try to sample from the model: MethodError: no method matching float(::Type{Any}). The sampling statement Beta[:, r] ~ MvNormal(Theta * Z[:, r], Sigma) changes Beta[:, r] to type Vector{Any}.

I have tried

beta_r = copy(Beta[:, r])
beta_r = convert(Vector{Float64}, beta_r)
ut_rs = X[r, s, :, :] * beta_r

But then I get this error instead:

ERROR: TypeError: in typeassert, expected Float64, got a value of type ForwardDiff.Dual{Nothing, Float64, 12}

So it's messing with Turing AD somehow. I'm new to Turing and can't understand the right way to do this.


Solution

  • I'm reposting an answer from Tor Fjelde (https://github.com/torfjelde) which I received on Github. For Turing to work you need to ensure types in your model can be inferred. I wasn't doing that. https://turing.ml/v0.22/docs/using-turing/performancetips#ensure-that-types-in-your-model-can-be-inferred

    This function worked:

    @model function hmnl(G::Int, Y::Matrix{Int64}, X::Array{Float64}, Z::Matrix{Float64}, ::Type{T} = Float64) where {T}
        R, S, C, K = size(X)
        Theta = zeros(T, K, G)
        for k in 1:K
            for g in 1:G
                Theta[k, g] ~ Normal(0, 10)
            end
        end
        Sigma ~ InverseWishart(K, diagm(ones(K)))
        Beta = zeros(T, K, R)
        println(eltype(Beta))
        for r in 1:R
            Beta[:, r] ~ MvNormal(Theta * Z[:, r], Sigma)
            println(typeof(Beta[:, r]))
            for s in 1:S
                ut_rs = X[r, s, :, :] * Beta[:, r]
                v = softmax(ut_rs)
                Y[r, s] ~ Categorical(v)
            end
        end
    end