% \documentclass[serif]{beamer} % Serif for Computer Modern math font. \documentclass[serif, handout]{beamer} % Handout to ignore pause statements. \hypersetup{colorlinks,linkcolor=,urlcolor=red} \usefonttheme{serif} % Looks like Computer Modern for non-math text -- nice! \setbeamertemplate{navigation symbols}{} % Suppress navigation symbols % \usetheme{Berlin} % Displays sections on top \usetheme{Frankfurt} % Displays section titles on top: Fairly thin but still swallows some material at bottom of crowded slides %\usetheme{Berkeley} \usepackage[english]{babel} \usepackage{amsmath} % for binom \usepackage{amsfonts} % for \mathbb{R} The set of reals % \usepackage{graphicx} % To include pdf files! % \definecolor{links}{HTML}{2A1B81} % \definecolor{links}{red} \setbeamertemplate{footline}[frame number] \mode \title{Conditional Probability\footnote{ This slide show is an open-source document. See last slide for copyright information.}} \subtitle{STA 256: Fall 2018} \date{} % To suppress date \begin{document} \begin{frame} \titlepage \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Conditional Probability: The idea} \pause %\framesubtitle{} \begin{itemize} \item If event $B$ has occurred, maybe the probability of $A$ is different from the probability of $A$ overall. \pause \item Maybe the chances of an auto insurance claim are different depending on the type of car. \pause \item We will talk about the \emph{conditional} probability of an insurance claim \emph{given} that the car is a Dodge Charger. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Restrict the sample space} %\framesubtitle{} To condition on the event $B$, make $B$ the new, restricted sample space. \pause \begin{center} \vspace{2mm} \includegraphics[width=2in]{Venn2} \pause \vspace{2mm} \end{center} Express the probability of $A$ as a fraction of the probability of $B$\pause, provided the probability of $B$ is not zero. \pause \begin{displaymath} P(A|B) = \frac{P(A\cap B)}{P(B)} \end{displaymath} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Definition: The probability of $A$ given $B$} %\framesubtitle{} \begin{center} {\Large If $P(B)>0$, \pause $\displaystyle P(A|B) = \frac{P(A\cap B)}{P(B)}$ } % End size \vspace{2mm} \pause \includegraphics[width=3in]{HIV} \pause \vspace{2mm} \pause $P(A|F) = \frac{P(A\cap F)}{P(F)} \pause = \frac{0.01}{0.50} \pause = 0.02$ \end{center} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Multiplication Law} \pause \framesubtitle{$P(A|B) = \frac{P(A\cap B)}{P(B)} \Rightarrow P(A\cap B) = P(A|B)P(B)$} \pause {\Large \begin{displaymath} P(A\cap B) = P(A|B)P(B) \end{displaymath} \pause } % End size \vspace{10mm} Useful for sequential experiments. \pause A jar contains 15 red balls and 5 blue balls. What is the probability of randomly drawing a red and then a blue? \pause \vspace{3mm} $P(R_1 \cap B_2) \pause = P(R_1)P(B_2|R_1) \pause = \frac{15}{20} \, \frac{5}{19} \pause = \frac{15}{76} \pause \approx 0.197$ \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Make a Tree} \framesubtitle{Justified by the multiplication principle} %\begin{center} \includegraphics[width=4.55in]{tree} %\end{center} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Trees} %\framesubtitle{} \begin{itemize} \item Can be extended to more than 2 stages. \pause \item Are best for \emph{small} sequential experiments. \pause \item Can allow you to side-step two important theorems, if the problem is set up nicely for you. \pause \begin{itemize} \item The Law of Total Probability \pause \item Bayes' Theorem. \end{itemize} \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Law of Total Probability} \pause %\framesubtitle{} Partition $\Omega$, into $B_1, B_2, \ldots B_n$, disjoint, with $P(B_k)>0$ for all $k$. \pause \vspace{3mm} \begin{columns} \column{0.5\textwidth} \begin{center} \includegraphics[width=2in]{TotalProb} \end{center} \pause \column{0.5\textwidth} $A = \cup_{k=1}^n(A\cap B_k)$, disjoint \begin{eqnarray*} P(A) & = & \sum_{k=1}^n P(A \cap B_k) \\ \pause & = & \sum_{k=1}^n P(A|B_k)P(B_k) \end{eqnarray*} \end{columns} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Example} \framesubtitle{Law of Total Probability: $P(A) = \sum_{k=1}^n P(A|B_k)P(B_k)$} In the 2 coins and one die example, \pause we got $P(2) = \frac{7}{18}$ from a tree. \pause \begin{eqnarray*} P(2) & = & P(2|\mbox{Coin 1}) \, P(\mbox{Coin 1}) \\ & + & P(2|\mbox{Coin 2}) \, P(\mbox{Coin 2}) \\ & + & P(2|\mbox{Die}) \, P(\mbox{Die}) \\ \pause & = & \frac{1}{2}\cdot\frac{1}{3} + \frac{1}{2}\cdot\frac{1}{3} + \frac{1}{6}\cdot\frac{1}{3} \\ \pause & = & \frac{7}{18} \end{eqnarray*} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Law of Total Probability} \framesubtitle{A general statement} \pause Let $\Omega = \cup_{k=1}^\infty B_k$, disjoint\pause, with $P(B_k)>0$ for all $k$. Then \pause \vspace{5mm} {\Large \begin{displaymath} P(A) = \sum_{k=1}^\infty P(A|B_k)P(B_k) \end{displaymath} \pause } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Bayes Theorem: The idea} %\framesubtitle{} {\Large Bayes' Theorem allows you to turn conditional probability around, and obtain $P(B|A)$ from $P(A|B)$. } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Thomas Bayes (1701-1761)} \framesubtitle{Image from the Wikipedia} \begin{center} \includegraphics[width=2.8in]{Thomas_Bayes} \end{center} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Bayes' Theorem} \framesubtitle{One version of many} \pause Let $\Omega = \cup_{k=1}^\infty B_k$, disjoint\pause, with $P(B_k)>0$ for all $k$. Then \pause \vspace{3mm} {\Large \begin{displaymath} P(B_j|A) = \frac{P(A|B_j)P(B_j)}{\sum_{k=1}^\infty P(A|B_k)P(B_k)} \end{displaymath} \pause } % End size \vspace{2mm} An important special case is \pause %\vspace{2mm} {\Large \begin{displaymath} P(B|A) = \frac{P(A|B)P(B)}{P(A|B)P(B) + P(A|B^c)P(B^c)} \end{displaymath} } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Copyright Information} This slide show was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \vspace{5mm} \href{http://www.utstat.toronto.edu/~brunner/oldclass/256f18} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/256f18}} \end{frame} \end{document} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% $ = \{\omega \in \Omega: \}$ \begin{frame} \frametitle{} \pause %\framesubtitle{} \begin{itemize} \item \pause \item \pause \item \end{itemize} \end{frame} \begin{frame} \frametitle{Tree} \framesubtitle{Useful for small sequential experiments} \pause \begin{center} \includegraphics[width=4.5in]{tree} \end{center} \end{frame}