@@ -78,15 +78,15 @@ \section*{Bayesian model choice}
7878where the $ w_i$ are the \textbf {prior probabilities } for each model.
7979\end {frame }
8080% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
81- \begin {frame }{An intuitive predictive}
82- A nice consequence of the formulation we just saw is that the predictive distribution looks quite intuitive:
83- \begin {align }
84- \nonumber
85- p(\tilde {x} \mid \boldsymbol {x}) &= \sum _{j} w_j \frac {1}{m_j(\boldsymbol {x})}\int _{\boldsymbol {\Theta }_j} f_j(\tilde {x} \mid t_j) f_j(\boldsymbol {x}\mid t_j)\pi _j(t_j)\, dt_j,\\
86- \label {eq:predictive_1 }
87- &= \sum _{j} \pr (\mathcal {M}_j \mid \boldsymbol {x}) p_j(\tilde {x} \mid \boldsymbol {x}).
88- \end {align }
89- \end {frame }
81+ % \begin{frame}{An intuitive predictive}
82+ % A nice consequence of the formulation we just saw is that the predictive distribution looks quite intuitive:
83+ % \begin{align}
84+ % \nonumber
85+ % p(\tilde{x} \mid \boldsymbol{x}) &= \sum_{j} w_j \frac{1}{m_j(\boldsymbol{x})}\int_{\boldsymbol{\Theta}_j} f_j(\tilde{x} \mid t_j) f_j(\boldsymbol{x}\mid t_j)\pi_j(t_j)\,dt_j,\\
86+ % \label{eq:predictive_1}
87+ % &= \sum_{j} \pr(\mathcal{M}_j \mid \boldsymbol{x}) p_j(\tilde{x} \mid \boldsymbol{x}).
88+ % \end{align}
89+ % \end{frame}
9090% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
9191\begin {frame }{Hello, my old friend}
9292Here, Bayes factors also play a central role:
@@ -110,7 +110,7 @@ \section*{Bayesian model choice}
110110 \label {eq:predictive_2 }
111111 &= \sum _j w_j^\prime \int _{\boldsymbol {\Theta }_j} f_j(\tilde {x} \mid t_j) p(t_j \mid \boldsymbol {x})\, dt_j.
112112\end {align }
113- which is another version of the expression in (\ref {eq:predictive_1 }).
113+ % which is another version of the expression in (\ref{eq:predictive_1}).
114114\end {frame }
115115% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
116116\begin {frame }{Model checking}
0 commit comments