Search code examples
jags

Getting error while translating openBUGS model into JAGS


I want to estimate the betas for a cox regression model, using the OpenBUGS code. I modified the example code, since in the example it has only one beta, I need a various of numbers of betas, depends on the models I feed it with.

This is my openBUGS model; it runs as expected:

bugsmodel <- function(){
# Set up data
for(i in 1:N) {
  for(j in 1:T) {
    Y[i,j] <- step(obs.t[i] - t[j] + eps)
    dN[i, j] <- Y[i, j] * step(t[j + 1] - obs.t[i] - eps) * fail[i]
  }
}
# Model
for(i in 1:N){
  betax[i,1] <- 0
  for(k in 2:p+1){
    betax[i,k] <- betax[i,k-1] + beta[k-1]*x[i,k-1]
  }
}
for(j in 1:T) {
  for(i in 1:N) {
    dN[i, j] ~ dpois(Idt[i, j]) # Likelihood
    Idt[i, j] <- Y[i, j] * exp(betax[i,p+1]) * dL0[j] # Intensity
  }
  dL0[j] ~ dgamma(mu[j], c)
  mu[j] <- dL0.star[j] * c # prior mean hazard
}
c <- 0.001
r <- 0.1
for (j in 1 : T) {
  dL0.star[j] <- r * (t[j + 1] - t[j])
} 
for(k in 1:p){
  beta[k] ~ dnorm(0.0,0.000001)
}
}

However, I modified its syntax to run in JAGS, it gives me error of redefining:

model_jags <- "model{
  # Set up data
for(i in 1:N) {
  for(j in 1:T) {
  Y[i,j] <- step(obs.t[i] - t[j] + eps)
  dN[i, j] <- Y[i, j] * step(t[j + 1] - obs.t[i] - eps) * fail[i]
  }
}
# Model
for(i in 1:N){
  betax[i,1] <- 0
  for(k in 2:p+1){
    betax[i,k] <- betax[i,k-1] + beta[k-1]*x[i,k-1]
  }
}
for(j in 1:T) {
  for(i in 1:N) {
  dN[i, j] ~ dpois(Idt[i, j]) # Likelihood
  Idt[i, j] <- Y[i, j] * exp(betax[i,p+1]) * dL0[j] # Intensity
  }
  dL0[j] ~ dgamma(mu[j], c)
  mu[j] <- dL0.star[j] * c # prior mean hazard
}
c <- 0.001
r <- 0.1
for (j in 1 : T) {
  dL0.star[j] <- r * (t[j + 1] - t[j])
} 
for(k in 1:p){
  beta[k] ~ dnorm(0.0,0.000001)
}
}"

testing code:

n = 100
round=2
x1 = rbinom(n,size=1,prob=0.5)
x2 = rbinom(n,size=1,prob=0.5)
x3 = rbinom(n,size=1,prob=0.5)
x = t(rbind(x1,x2,x3))
censortime = runif(n,0,1)
survtime= rexp(n,rate=exp(x1+2*x2+3*x3))
survtime = round(survtime,digits=round)
event = as.numeric(censortime>survtime)
y = survtime; 
y[event==0] = censortime[event==0]
t=sort(unique(y[event==1]))
t=c(t,max(censortime))
bigt=length(t)-1
#####################################
model=c(1,1,1)
x <- x[,model==1]
p <- sum(model) # models have betas with 1
params <- c("beta","dL0")
data <- list(x=x,obs.t=y,t=t,T=bigt,N=n,fail=event,eps=1E-10,p=p)
inits <-  function(){list( beta = rep(0,p), dL0 = rep(0.0001,bigt))}

jags <- jags.model(textConnection(model_jags),
               data = data,
               n.chains = 1,
               n.adapt = 100)

Solution

  • You need two modifications to your model code:

    1) The data transformation at the top should be done in a separate data{} block in JAGS (this is giving the error about redefinition of node dN).

    2) The loop index:

    for(k in 2:p+1){
    

    Is the same as (due to operator precedence):

    for(k in (2:p)+1){
    

    But I guess it should be:

    for(k in 2:(p+1)){
    

    With these two modifications, the following model code works for me with your testing code:

    model_jags <- "
    data{
      # Set up data
    for(i in 1:N) {
      for(j in 1:T) {
      Y[i,j] <- step(obs.t[i] - t[j] + eps)
      dN[i, j] <- Y[i, j] * step(t[j + 1] - obs.t[i] - eps) * fail[i]
      }
    }
    }
    
    # Model
    model{
    for(i in 1:N){
      betax[i,1] <- 0
      for(k in 2:(p+1)){
        betax[i,k] <- betax[i,k-1] + beta[k-1]*x[i,k-1]
      }
    }
    for(j in 1:T) {
      for(i in 1:N) {
      dN[i, j] ~ dpois(Idt[i, j]) # Likelihood
      Idt[i, j] <- Y[i, j] * exp(betax[i,p+1]) * dL0[j] # Intensity
      }
      dL0[j] ~ dgamma(mu[j], c)
      mu[j] <- dL0.star[j] * c # prior mean hazard
    }
    c <- 0.001
    r <- 0.1
    for (j in 1 : T) {
      dL0.star[j] <- r * (t[j + 1] - t[j])
    } 
    for(k in 1:p){
      beta[k] ~ dnorm(0.0,0.000001)
    }
    }"
    

    Hope that helps,

    Matt