## random data points that come from a normal distribution observed.data<-rnorm(15, 2, 4) ## Number of steps for MCMC mcmcL<-50100 ## Vectors to save estimate of mu and P(M|D) mu<-numeric(mcmcL) sigma<-numeric(mcmcL) pr.DataModel<-numeric(mcmcL) ## random seed for mu from a uniform between -25 and 25 ## we could use almost anything here mu[1]<-runif(1,-25,25) sigma[1]<-runif(1,0.000001,25) ## initial probability: first term = likelihood, second and third ## terms = priors; we assume a normal prior on mu with mean 0 and ## sigma 1000, which is an uniformative prior and an uninformative ## gamma prior for the sigma parameter sigprior<-log(dgamma(sigma[1],shape=0.0001,scale=0.0001)) if (sigprior==-Inf){ sigprior<-.Machine$double.xmin ## make this into the smallest possible finite value } pr.DataModel[1]<-sum(log(dnorm(observed.data, mu[1], sigma[1]))) + log(dnorm(mu[1], 0, 1000)) + sigprior ## alternative using a uniform prior for sigma ## pr.DataModel[1]<-sum(log(dnorm(observed.data, mu[1], sigma[1]))) + ## log(dnorm(mu[1], 0, 1000)) + log(dunif(sigma[1], 0, 1000)) ## Enter MCMC for(i in 2:mcmcL){ ## proposal of candidate value for mu, we will use a normal with ## mean = 0, and sigma = 10 ## and for sigma, from a gamma 1, 1 mu[i]<-rnorm(1, 0, 10) sigma[i]<-rgamma(1, 1, 0.5) ## calculate probability of the data given the candidate value, mu[i], ## still using old value of sigma sigprior<-log(dgamma(sigma[i-1],shape=0.0001,scale=0.0001)) if (sigprior==-Inf){ sigprior<-.Machine$double.xmin } pr.mu_cand<-sum(log(dnorm(observed.data, mu[i], sigma[i-1]))) + log(dnorm(mu[i], 0, 1000)) + sigprior ## grab the probability of the data given mu[i-1] (the previous value in the chain) pr.mu_old <- sum(log(dnorm(observed.data, mu[i-1], sigma[i-1]))) + log(dnorm(mu[i-1], 0, 1000)) + sigprior ## calculate probablity of drawing the mu[i] and mu[i-1] from the ## candidate generating or jump function pr.sam_cand<-log(dnorm(mu[i], 0, 10)) pr.sam_old<-log(dnorm(mu[i-1], 0, 10)) ## calculate Metropolis Hastings ratio ## (use a difference in logs rather than ratio to maintain numerical precision) mh<-(pr.mu_cand - pr.sam_cand) - (pr.mu_old - pr.sam_old) ## sample U from random uniform (with defaults min=0, and max=1) ## accept or reject proposed value of mu ## reject candidate mu[i] candidate value if logical test is true ifelse(log(runif(1)) > mh, mu[i]<-mu[i-1], mu[i]) ## Now do the same for sigma ### ------------------------------------------------ ## calculate probability of the data given the candidate value, sigma[i], ## still using old value of mu sigprior<-log(dgamma(sigma[i],shape=0.0001,scale=0.0001)) if (sigprior==-Inf){ sigprior<-.Machine$double.xmin } pr.sig_cand<-sum(log(dnorm(observed.data, mu[i-1], sigma[i]))) + log(dnorm(mu[i-1], 0, 1000)) + sigprior ## grab the probability of the data given sigma[i-1] (the previous value in the chain) pr.sig_old <- pr.mu_old ## calculate probablity of drawing the sigma[i] and sigma[i-1] from the ## candidate generating or jump function pr.sam_cand<-log(dgamma(sigma[i], 1, 0.5)) pr.sam_old<-log(dgamma(sigma[i-1], 1, 0.5)) ## calculate Metropolis Hastings ratio ## (use a difference in logs rather than ratio to maintain numerical precision) mh<-(pr.sig_cand - pr.sam_cand) - (pr.sig_old - pr.sam_old) ## sample U from random uniform (with defaults min=0, and max=1) ## accept or reject proposed value of sigma ## reject candidate sigma[i] candidate value if logical test is true ifelse(log(runif(1)) > mh, sigma[i]<-sigma[i-1], sigma[i]) ## Calculate p(M|D) with the current parameters and save sigprior<-log(dgamma(sigma[i],shape=0.0001,scale=0.0001)) if (sigprior==-Inf){ sigprior<-.Machine$double.xmin } pr.DataModel[i]<-sum(log(dnorm(observed.data, mu[i], sigma[i]))) + log(dnorm(mu[i], 0, 1000)) + sigprior } ### make some plots ---------------------------------- ## evaluation of mixing in chain par(mar=c(4,4,1,0.5), mfrow=c(2,1)) plot(1:mcmcL,mu, type="l", ylab=expression(hat(mu)), xlab="MCMC steps") plot(1:mcmcL,sigma, type="l", ylab=expression(hat(sigma)), xlab="MCMC steps") ## joint probability of mu and sigma ## library(MASS) ## persp(kde2d(mu,sigma)) library(MASS) f<-kde2d(mu[101:mcmcL],sigma[101:mcmcL]) # x11() persp(f$x,f$y,f$z,col="blue",theta=35,shade=0.3,phi=25,xlab="mu",ylab="sigma", zlab="probability",ticktype="detailed")