# run a model to calculate the intercept and slope of the expression y = m x + c, assuming normal observation errors for y:
# Simulate the data
x <- 1:100
y <- rnorm(length(x), 2*x + 10, 1)
# Model in the JAGS format
model <- "model {
for(i in 1 : n){
y[i] ~ dnorm(true.y[i], precision);
true.y[i] <- (m * x[i]) + c;
}
m ~ dunif(-1000,1000);
c ~ dunif(-1000,1000);
precision ~ dexp(1);
}"
# Use dump.format to convert the data and initial values files into the R dump format
data <- dump.format(c("x", "y", "n"), list(x, y, length(x)))
inits1 <- dump.format(c("m", "c", "precision"), list(1, 1, 1))
inits2 <- dump.format(c("m", "c", "precision"), list(0.1, 10, 1))
# Run the model
results <- run.jags(data=data, model=model, inits=c(inits1,
inits2), monitor=c("m", "c", "precision"))
# Analyse the results
m <- summary(c(results[[1]][,1], results[[2]][,1]))
c <- summary(c(results[[1]][,2], results[[2]][,2]))
p <- summary(c(results[[1]][,3], results[[2]][,3]))
Run the code above in your browser using DataLab