# try to find a non-negative vector m so that
# 1. sqrt(m[1]^2 + m[2]^2) is close to 5
# 2. sqrt(m[1] * m[2]) is close to 3.5
# 3. 2 * m[1] + m[2] is close to 10
# We are trying to match this data vector:
data = c(5, 3.5, 10)
# Define log-likelihood as -0.5 * sum of squared differences between
# modeled and true data
loglikelihood = function(model, data){
d2 = c(sqrt(sum(model^2)), sqrt(abs(prod(model))), sum(model*c(2,1)))
-0.5 * sum((d2 - data)^2)
}
# A proposed model is generated by randomly picking a model parameter
# and perturbing it by a random number distributed normally according to sigma
generate = function(x, sigma){
w = ceiling(runif(1) * length(x))
x[w] = x[w] + rnorm(1, 0, sigma[w])
return(x)
}
# Proposal distribution is defined as multivariate normal, with mean
# zero and standard deviations sigma:
logproposal = function(x1, x2, sigma){
-0.5 * sum(((x1) - (x2))^2/(sigma+1e-12)^2)
}
# logprior reflects prior knowledge that the answer is non-negative
logprior = function(m){
if(all(m >= 0))
0
else
-Inf
}
sigma = c(0.1, 0.1)
m1 = c(0, 0)
x = Metropolis(loglikelihood, sigma, m1, niter = 5000, gen = generate,
logproposal = logproposal, logprior = logprior, burn = 0, save_int = 1,
data = data)
# Notice the high acceptance ratios--this means that values in sigma are
# too low. The MCMC is probably "optimally tuned" when sigma is set so
# acceptance ratios vary between roughly 0.2 and 0.5.
# Plot models--par 1 on x, par 2 on y axis
# Note initial trajectory away from m1 (0, 0) to more likely
# region--this can be eliminated by setting 'burn' to a higher value
plot(x$m[,1], x$m[,2], pch = '.', col = rainbow(nrow(x$m)))
# Histograms/scatter plots showing posterior distributions.
# Note the strong covariance between these parameters!
par(mfrow = c(2, 2))
hist(x$m[,1])
plot(x$m[,2], x$m[,1], pch = '.')
plot(x$m[,1], x$m[,2], pch = '.')
hist(x$m[,2])
Run the code above in your browser using DataLab