% \documentclass[serif]{beamer} % Serif for Computer Modern math font. \documentclass[serif, handout]{beamer} % Handout to ignore pause statements \hypersetup{colorlinks,linkcolor=,urlcolor=red} \usefonttheme{serif} % Looks like Computer Modern for non-math text -- nice! \setbeamertemplate{navigation symbols}{} % Suppress navigation symbols % \usetheme{Berlin} % Displays sections on top \usetheme{Frankfurt} % Displays section titles on top: Fairly thin but still swallows some material at bottom of crowded slides %\usetheme{Berkeley} \usepackage[english]{babel} \usepackage{amsmath} % for binom % \usepackage{graphicx} % To include pdf files! % \definecolor{links}{HTML}{2A1B81} % \definecolor{links}{red} \setbeamertemplate{footline}[frame number] \mode \title{Moment-generating functions\footnote{ This slide show is an open-source document. See last slide for copyright information.}} \subtitle{STA 302: Fall 2015} \date{} % To suppress date \begin{document} \begin{frame} \titlepage \end{frame} \begin{frame} \frametitle{The change of variables formula} \framesubtitle{Let $X$ be a random variable.} Let $Y=g(X)$. There are two ways to get $E(Y)$. \pause \vspace{3mm} \begin{enumerate} \item Derive the distribution of $Y$ and compute \begin{displaymath} E(Y) = \int_{-\infty}^\infty y \, f_{_Y}(y) \, dy \end{displaymath} \pause \item Use the distribution of $X$, and calculate \begin{displaymath} E(g(X)) = \int_{-\infty}^\infty g(x) \, f_{_X}(x) \, dx \end{displaymath} \pause \end{enumerate} Big theorem: These two expressions are equal. \end{frame} \begin{frame} \frametitle{The change of variables formula is very general} \framesubtitle{Including but not limited to } \pause \begin{columns} % Use Beamer's columns to use more of the margins! \column{1.2\textwidth} \begin{itemize} \item[] $E(g(X)) = \int_{-\infty}^\infty g(x) \, f_{_X}(x) \, dx$ \pause \item[] \item[] $E(g(\mathbf{X})) = \int_{-\infty}^\infty \cdots \int_{-\infty}^\infty g(x_1, \ldots, x_p) \, f_{_\mathbf{X}}(x_1, \ldots, x_p) \, dx_1 \ldots dx_p $ \pause \item[] \item[] $E\left(g(X)\right) = \sum_x g(x) p_{_X}(x) $ \end{itemize} \end{columns} \end{frame} \begin{frame} \frametitle{Moment-generating functions} %\framesubtitle{} {\LARGE \begin{displaymath} M_{_Y}(t) = E(e^{Yt}) \pause = \left\{ \begin{array}{l} % ll means left left \int_{-\infty}^\infty e^{yt} \, f_{_Y}(y) \, dy \\ \\ \sum_y e^{yt} p_{_Y}(y) \end{array} \right. % Need that crazy invisible right period! \end{displaymath} } % End size \end{frame} \begin{frame} \frametitle{Properties of moment-generating functions} \pause %\framesubtitle{} \begin{itemize} \item Moment-generating functions can be used to generate moments. \pause To get $E(Y^k)$, differentiate $M_{_Y}(t)$ with respect to $t$. Differentiate $k$ times and set $t=0$. \pause \item[] \item Moment-generating functions correspond uniquely to probability distributions. \end{itemize} \end{frame} \begin{frame} \frametitle{The function $M(t)$ is like a fingerprint of the probability distribution.} \pause %\framesubtitle{} \begin{itemize} \item[] $Y \sim N(\mu,\sigma^2)$ if and only if $M_{_Y}(t) = e^{\mu t + \frac{1}{2}\sigma^2t^2}$. \pause \item[] \item[] $Y \sim \chi^2(\nu)$ if and only if $M_{_Y}(t) = (1-2t)^{-\nu/2}$ for $t < \frac{1}{2}$. \end{itemize} \end{frame} \begin{frame} \frametitle{Normal: $M(t) = e^{\mu t + \frac{1}{2}\sigma^2t^2}$} %\framesubtitle{} \begin{center} \includegraphics[width=3in]{NormalMGF} \end{center} \end{frame} \begin{frame} \frametitle{Chi-squared: $M(t) = (1-2t)^{-\nu/2}$} %\framesubtitle{} \begin{center} \includegraphics[width=3in]{ChisqMGF} \end{center} \end{frame} \begin{frame} \frametitle{Example: Using moment-generating functions to prove distribution facts} %\framesubtitle{} Let $X \sim N(\mu,\sigma^2)$. Show $Y = \frac{X-\mu}{\sigma} \sim N(0,1)$ \end{frame} \begin{frame} \frametitle{Facts about moment-generating functions} \framesubtitle{Use these to find distributions of \emph{functions} of random variables} \begin{itemize} \item $M_{aY}(t) = M_Y(at)$ \pause \item $M_{_{Y+a}}(t) = e^{at}M_Y(t)$ \pause \item If $Y_1, \ldots, Y_n$ are independent, $M_{_{\sum_{i=1}^n Y_i}}(t) = \prod_{i=1}^n M_{Y_i}(t)$ \end{itemize} \end{frame} \begin{frame} \frametitle{A standard example} \framesubtitle{Using $M_{_{\sum_{i=1}^n X_i}}(t) = \prod_{i=1}^n M_{X_i}(t)$} \pause Let $X_1, \ldots, X_n \stackrel{i.i.d.}{\sim} N(\mu,\sigma^2)$, with $Y = \sum_{i=1}^n X_i$. Find the probability distribution of $Y$. \pause \vspace{3mm} How about $\overline{X}$? \pause Recall $M_{aY}(t) = M_Y(at)$. \end{frame} \begin{frame} \frametitle{Another standard example} % \framesubtitle{Using $M_{_{\sum_{i=1}^n X_i}}(t) = \prod_{i=1}^n M_{X_i}(t)$} \pause Let $X_1, \ldots, X_n \stackrel{ind.}{\sim} \chi^2(\nu_i)$, and $Y = \sum_{i=1}^n X_i$. Find the probability distribution of $Y$. \end{frame} \begin{frame} \frametitle{Less well known} \framesubtitle{But very useful later} If $W=W_1+W_2$ with $W_1$ and $W_2$ independent, $W\sim\chi^2(\nu_1+\nu_2)$ and $W_2\sim\chi^2(\nu_2)$ then $W_1\sim\chi^2(\nu_1)$. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Copyright Information} This slide show was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \vspace{5mm} \href{http://www.utstat.toronto.edu/~brunner/oldclass/302f15} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/302f15}} \end{frame} \end{document} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # R code for plots of normal MGFs tt = seq(from=-1,to=1,by=0.05) mu = 0; sigsq = 1 zero = exp(mu*tt + 0.5*sigsq*tt^2) mu = 1; one = exp(mu*tt + 0.5*sigsq*tt^2) mu = -1; minusone = exp(mu*tt + 0.5*sigsq*tt^2) x = c(tt,tt,tt); y = c(zero,one,minusone) plot(x,y,pch=' ',xlab='t',ylab = 'M(t)') lines(tt,zero,lty=1) lines(tt,one,lty=2) lines(tt,minusone,lty=3) title("Fingerprints of the normal distribution") # Legend x1 <- c(-0.4,0) ; y1 <- c(4,4) ; lines(x1,y1,lty=1) text(0.25,4,expression(paste(mu," = 0, ",sigma^2," = 1"))) x2 <- c(-0.4,0) ; y2 <- c(3.75,3.75) ; lines(x2,y2,lty=2) text(0.25,3.75,expression(paste(mu," = 1, ",sigma^2," = 1"))) x3 <- c(-0.4,0) ; y3 <- c(3.5,3.5) ; lines(x3,y3,lty=3) text(0.25,3.5,expression(paste(mu," = -1, ",sigma^2," = 1"))) # R code for plots of chi-squared MGFs tt = seq(from=-0.25,to=0.25,by=0.005) nu = 1; one = (1-2*tt)^(-nu/2) nu = 2; two = (1-2*tt)^(-nu/2) nu = 3; three = (1-2*tt)^(-nu/2) x = c(tt,tt,tt); y = c(one,two,three) plot(x,y,pch=' ',xlab='t',ylab = 'M(t)') lines(tt,one,lty=1) lines(tt,two,lty=2) lines(tt,three,lty=3) title("Fingerprints of the chi-squared distribution") # Legend x1 <- c(-0.2,-0.1) ; y1 <- c(2.5,2.5) ; lines(x1,y1,lty=1) text(-0.05,2.5,expression(paste(nu," = 1"))) x2 <- c(-0.2,-0.1) ; y2 <- c(2.3,2.3) ; lines(x2,y2,lty=2) text(-0.05,2.3,expression(paste(nu," = 2"))) x3 <- c(-0.2,-0.1) ; y3 <- c(2.1,2.1) ; lines(x3,y3,lty=3) text(-0.05,2.1,expression(paste(nu," = 3")))