# NOT RUN {
library(sns)
library(MfUSampler)
# using the expander framework and base distributions to define
# log-likelihood function for geometric regression
loglike.geometric <- function(beta, X, y, fgh) {
regfac.expand.1par(beta, X, y, fbase1.geometric.logit, fgh)
}
# generate data for geometric regression
N <- 1000
K <- 5
X <- matrix(runif(N*K, min=-0.5, max=+0.5), ncol=K)
beta <- runif(K, min=-0.5, max=+0.5)
y <- rgeom(N, prob = 1/(1+exp(-X%*%beta)))
# mcmc sampling of log-likelihood
nsmp <- 100
# Slice Sampler
beta.smp <- array(NA, dim=c(nsmp,K))
beta.tmp <- rep(0,K)
for (n in 1:nsmp) {
beta.tmp <- MfU.Sample(beta.tmp
, f=loglike.geometric, X=X, y=y, fgh=0)
beta.smp[n,] <- beta.tmp
}
beta.slice <- colMeans(beta.smp[(nsmp/2+1):nsmp,])
# Adaptive Rejection Sampler
beta.smp <- array(NA, dim=c(nsmp,K))
beta.tmp <- rep(0,K)
for (n in 1:nsmp) {
beta.tmp <- MfU.Sample(beta.tmp, uni.sampler="ars"
, f=function(beta, X, y, grad) {
if (grad)
loglike.geometric(beta, X, y, fgh=1)$g
else
loglike.geometric(beta, X, y, fgh=0)
}
, X=X, y=y)
beta.smp[n,] <- beta.tmp
}
beta.ars <- colMeans(beta.smp[(nsmp/2+1):nsmp,])
# SNS (Stochastic Newton Sampler)
beta.smp <- array(NA, dim=c(nsmp,K))
beta.tmp <- rep(0,K)
for (n in 1:nsmp) {
beta.tmp <- sns(beta.tmp, fghEval=loglike.geometric, X=X, y=y, fgh=2, rnd = n>nsmp/4)
beta.smp[n,] <- beta.tmp
}
beta.sns <- colMeans(beta.smp[(nsmp/2+1):nsmp,])
# compare sample averages with actual values
cbind(beta, beta.sns, beta.slice, beta.ars)
# }
Run the code above in your browser using DataLab