1+ xs <- c(- 3 , 2 )
2+
3+ # ## The likelihood
4+
5+ c_loglik <- function (theta ){
6+ sum(dcauchy(x = xs , location = theta , log = TRUE ))
7+ }
8+ c_loglik <- Vectorize(c_loglik )
9+
10+ curve(c_loglik , - abs(2 * min(xs )), abs(2 * max(xs )),
11+ xlab = expression(theta ),
12+ lwd = 3 ,
13+ ylab = " Log-likelihood" ,
14+ main = " Cauchy example -- BC exercise 1.28" )
15+
16+ # ## Bayesian inference
17+
18+ # ### Quadrature
19+ Mu <- 0
20+ Sigma <- 1
21+
22+ c_logposterior_kernel <- function (theta ){
23+ dnorm(x = theta , mean = Mu , sd = Sigma ) +
24+ c_loglik(theta )
25+ }
26+ c_logposterior_kernel <- Vectorize(c_logposterior_kernel )
27+
28+
29+ # # marginal likelihood via quadrature
30+ m_of_x <- integrate(function (t ) exp(c_logposterior_kernel(t )),
31+ - Inf , Inf )
32+
33+ c_posterior <- function (theta ){
34+ exp(c_logposterior_kernel(theta ) - log(m_of_x $ value ))
35+ }
36+ c_posterior <- Vectorize(c_posterior )
37+
38+ minT <- - abs(4 * min(xs ))
39+ maxT <- abs(4 * max(xs ))
40+
41+ curve(c_posterior , minT , maxT ,
42+ lwd = 4 ,
43+ xlab = expression(theta ),
44+ ylab = " Posterior density" ,
45+ main = " Cauchy example -- BC exercise 1.28" )
46+
47+
48+ integrand <- function (t ){
49+ t * c_posterior(t )
50+ }
51+
52+ posterior.mean.quadrature <- integrate(integrand ,
53+ - Inf , Inf ,
54+ subdivisions = 1E5 )
55+
56+ # ### MCMC
57+
58+ library(cmdstanr )
59+
60+ c_model <- cmdstan_model(" stan/cauchy.stan" )
61+
62+ s.data <- list (
63+ n = length(xs ),
64+ x = xs ,
65+ prior_loc = Mu ,
66+ prior_scale = Sigma
67+ )
68+
69+ mcmc.samples <- c_model $ sample(data = s.data ,
70+ max_treedepth = 13 ,
71+ adapt_delta = .999 ,
72+ chains = 10 ,
73+ parallel_chains = 10 ,
74+ metric = " diag_e" )
75+ mcmc.samples
76+
77+ theta.draws <- mcmc.samples $ draws(" theta" )
78+
79+ # ### Importance sampling
80+ # $$ Example 6.2.2, Eq (6.2.6)
81+ library(matrixStats )
82+ M <- length(theta.draws )
83+ prior.draws <- rnorm(n = M , mean = Mu , sd = Sigma )
84+ logWs <- sapply(prior.draws , function (t ){
85+ sum(dcauchy(x = xs , location = t , log = TRUE ))
86+ })
87+ logZW <- logSumExp(logWs )
88+ weights <- exp(logWs - logZW )
89+ m_is <- sum(weights * prior.draws )
90+
91+ IS.draws <- sample(x = prior.draws , size = M , replace = TRUE ,
92+ prob = weights )
93+
94+ hist(theta.draws , probability = TRUE ,
95+ xlim = c(minT , maxT ),
96+ xlab = expression(theta ))
97+ curve(c_posterior , lwd = 3 , add = TRUE )
98+ lines(density(IS.draws ), lwd = 3 , lty = 2 , col = 2 )
99+
100+ # #### Posterior mean estimates
101+ mean(theta.draws ) # # MCMC (HMC)
102+ posterior.mean.quadrature # # quadrature
103+ m_is # # Importance sampling
0 commit comments