% 302f13Assignment2.tex MGFs and one regression through the origin \documentclass[10pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb \usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} %\pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{STA 302f13 Assignment Two}}\footnote{Copyright information is at the end of the last page.} \vspace{1 mm} \end{center} \noindent These problems are preparation for the quiz in tutorial on Friday September 27th, and are not to be handed in. Starting with Problem~\ref{mgfstart}, you can play a little game. Try not to do the same work twice. Instead, use results of earlier problems whenever possible. \begin{enumerate} \item Sometimes, you want the least squares line to go through the origin, so that predicted $Y$ automatically equals zero when $x=0$. For example, suppose the cases are half-kilogram batches of rice purchased from grocery stores. The independent variable $x$ is concentration of arsenic in the rice before washing, and the dependent variable $Y$ is concentration of arsenic after washing. Discounting the very unlikely possibility that arsenic contamination can happen \emph{during} washing, you want to use your knowledge that zero arsenic before washing implies zero arsenic after washing. You will use your knowledge by building it into the statistical model. Accordingly, let $Y_i = \beta x_i + \epsilon_i$ for $i=1, \ldots, n$, where $\epsilon_1, \ldots, \epsilon_n$ are a random sample from a distribution with expected value zero and variance $\sigma^2$, and $\beta$ and $\sigma^2$ are unknown constants. The numbers $x_1, \ldots, x_n$ are known, observed constants. \begin{enumerate} \item What is $E(Y_i)$? \item What is $Var(Y_i)$? \item Find the Least Squares estimate of $\beta$ by minimizing the function \begin{displaymath} Q(\beta)=\sum_{i=1}^n(Y_i-\beta x_i)^2 \end{displaymath} over all values of $\beta$. Let $\widehat{\beta}$ denote the point at which $Q(\beta)$ is minimal. \item Give the equation of the least-squares line. Of course it's the \emph{constrained} least-squares line, passing through $(0,0)$. \item Recall that a statistic is an \emph{unbiased estimator} of a parameter if the expected value of the statistic is equal to the parameter. Is $\widehat{\beta}$ an unbiased estimator of $\beta$? Answer Yes or No and show your work. \item Let $\widehat{\beta}_2 = \frac{\overline{Y}_n}{\overline{x}_n}$. Is $\widehat{\beta}_2$ also unbiased for $\beta$? Answer Yes or No and show your work. % Need numerical beta-hat and predicted Y here. Maybe a set of (x,Y) values. \item \emph{This last part is a challenge for your entertainment. It will not be on the quiz or the final exam.} Prove that $\widehat{\beta}$ is a more accurate estimator than $\widehat{\beta}_{2}$ in the sense that it has smaller variance. Hint: The sample variance of the independent variable values cannot be negative. \end{enumerate} %%%%%%%%%%%%%%%%%%%%%%%%% MGF %%%%%%%%%%%%%%%%%%%%%%%%% \item \label{mgfstart} Denote the moment-generating function of a random variable $Y$ by $M_Y(t)$. The moment-generating function is defined by $M_Y(t) = E(e^{Yt})$. \begin{enumerate} \item Let $a$ be a constant. Prove that $M_{aX}(t) = M_X(at)$. \item Prove that $M_{X+a}(t) = e^{at}M_X(t)$. \item Let $X_1, \ldots, X_n$ be \emph{independent} random variables. Prove that \begin{displaymath} M_{\sum_{i=1}^n X_i}(t) = \prod_{i=1}^n M_{X_i}(t). \end{displaymath} For convenience, you may assume that $X_1, \ldots, X_n$ are all continuous, so you will integrate. \end{enumerate} \newpage \item Recall that if $X\sim N(\mu,\sigma^2)$, it has moment-generating function $M_X(t) = e^{\mu t + \frac{1}{2}\sigma^2t^2}$. \begin{enumerate} \item Let $X\sim N(\mu,\sigma^2)$ and $Y=aX+b$, where $a$ and $b$ are constants. Find the distribution of $Y$. Show your work. \item Let $X\sim N(\mu,\sigma^2)$ and $Z = \frac{X-\mu}{\sigma}$. Find the distribution of $Z$. \item Let $X_1, \ldots, X_n$ be random sample from a $N(\mu,\sigma^2)$ distribution. Find the distribution of $Y = \sum_{i=1}^nX_i$. \item Let $X_1, \ldots, X_n$ be random sample from a $N(\mu,\sigma^2)$ distribution. Find the distribution of the sample mean $\overline{X}$. \item Let $X_1, \ldots, X_n$ be random sample from a $N(\mu,\sigma^2)$ distribution. Find the distribution of $Z = \frac{\sqrt{n}(\overline{X}-\mu)}{\sigma}$. \end{enumerate} \item A Chi-squared random variable $X$ with parameter $\nu>0$ has moment-generating function $M_X(t) = (1-2t)^{-\nu/2}$. \begin{enumerate} \item Let $X_1, \ldots, X_n$ be independent random variables with $X_i \sim \chi^2(\nu_i)$ for $i=1, \ldots, n$. Find the distribution of $Y = \sum_{i=1}^n X_i$. \item Let $Z \sim N(0,1)$. Find the distribution of $Y=Z^2$. \item Let $X_1, \ldots, X_n$ be random sample from a $N(\mu,\sigma^2)$ distribution. Find the distribution of $Y = \frac{1}{\sigma^2} \sum_{i=1}^n\left(X_i-\mu \right)^2$. \item Let $Y=X_1+X_2$, where $X_1$ and $X_2$ are independent, $X_1\sim\chi^2(\nu_1)$ and $Y\sim\chi^2(\nu_1+\nu_2)$, where $\nu_1$ and $\nu_2$ are both positive. Show $X_2\sim\chi^2(\nu_2)$. \item Let $X_1, \ldots, X_n$ be random sample from a $N(\mu,\sigma^2)$ distribution. Show \begin{displaymath} \frac{(n-1)S^2}{\sigma^2} \sim \chi^2(n-1), \end{displaymath} where $S^2 = \frac{\sum_{i=1}^n\left(X_i-\overline{X} \right)^2 }{n-1}$. Hint: $\sum_{i=1}^n\left(X_i-\mu \right)^2 = \sum_{i=1}^n\left(X_i-\overline{X} + \overline{X} - \mu \right)^2 = \ldots$ For this question, you may use the independence of $\overline{X}$ and $S^2$ without proof. \end{enumerate} \end{enumerate} \vspace{50mm} \noindent \begin{center}\begin{tabular}{l} \hspace{6in} \\ \hline \end{tabular}\end{center} This assignment was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/302f13} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/302f13}} \end{document}