# run a model to calculate the intercept and slope of the expression y = m x + c, assuming normal observation errors for y:
# Simulate the data
X <- 1:100
Y <- rnorm(length(X), 2*X + 10, 1)
# Model in the JAGS format
model <- "model {
for(i in 1 : N){
Y[i] ~ dnorm(true.y[i], precision);
true.y[i] <- (m * X[i]) + c;
}
m ~ dunif(-1000,1000);
c ~ dunif(-1000,1000);
precision ~ dexp(1);
}"
# Use dump.format to convert the data and initial values files into the R dump format
data <- dump.format(list(X=X, Y=Y, N=length(X)))
inits1 <- dump.format(list(m=1, c=1, precision=1))
inits2 <- dump.format(list(m=0.1, c=10, precision=1))
# Run the model
results <- run.jags(model=model, monitor=c("m", "c", "precision"), data=data, n.chains=2, inits=c(inits1,
inits2))
# Analyse the results
results$summary
Run the code above in your browser using DataLab