Metropolis(bayesianSetup, settings = list(startValue = NULL, optimize = T,
proposalGenerator = NULL, consoleUpdates = 100, burnin = 0, thin = 1, parallel
= NULL, adapt = T, adaptationInterval = 500, adaptationNotBefore = 3000,
DRlevels = 1, proposalScaling = NULL, adaptationDepth = NULL,
temperingFunction = NULL, gibbsProbabilities = NULL, message = TRUE))
createBayesianSetup
(recommended), or a log target functioncreateProposalGenerator
)AM
for an adaptive Metropolis sampler) or use the parameters to adapt the basic Metropolis-Hastings. The advantage of the latter case is that you can easily combine different properties (e.g. adapive sampling and delayed rejection sampling) without changing the function.Haario, Heikki, et al. "DRAM: efficient adaptive MCMC." Statistics and Computing 16.4 (2006): 339-354.
Hastings, W. K. (1970). Monte carlo sampling methods using markov chains and their applications. Biometrika 57 (1), 97-109.
Green, Peter J., and Antonietta Mira. "Delayed rejection in reversible jump Metropolis-Hastings." Biometrika (2001): 1035-1053.
Metropolis, N., A. W. Rosenbluth, M. N. Rosenbluth, A. H. Teller, and E. Teller (1953). Equation of state calculations by fast computing machines. The journal of chemical physics 21 (6), 1087 - 1092.
# Running the metropolis via the runMCMC with a proposal covariance generated from the prior
# (can be useful for complicated priors)
ll = function(x) sum(dnorm(x, log = TRUE))
setup = createBayesianSetup(ll, lower = c(-10,-10), upper = c(10,10))
samples = setup$prior$sampler(1000)
generator = createProposalGenerator(diag(1, setup$numPars))
generator = updateProposalGenerator(generator, samples, manualScaleAdjustment = 1, message = TRUE)
settings = list(proposalGenerator = generator, optimize = FALSE, iterations = 500)
out = runMCMC(bayesianSetup = setup, sampler = "Metropolis", settings = settings)
Run the code above in your browser using DataLab