% 302f20Assignment1.tex REVIEW \documentclass[11pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb \usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} %\pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{STA 302f20 Assignment One}}\footnote{Copyright information is at the end of the last page.} \vspace{1 mm} \end{center} \noindent Please do these review questions in preparation for Quiz One; they are not to be handed in. This material will not directly be on the final exam. Use the formula sheet on the course website. \vspace{3mm} \begin{enumerate} %%%%%%%%%%%%%%%%%%%%%%%%%%%%% Expected value etc. %%%%%%%%%%%%%%%%%%%%%%%%%%% \item The discrete random variable $X$ has probability mass function $p(x) = |x|/20$ for $x = -4, \ldots, 4$ and zero otherwise. Let $Y=X^2-1$. \begin{enumerate} \item What is $E(X)$? The answer is a number. Show some work. % zero \item Calculate the variance of $X$. The answer is a number. My answer is 10. \item What is $P(Y=8)$? My answer is 0.30 \item What is $P(Y=-1)$? My answer is zero. \item What is $P(Y=-4)$? My answer is zero. \item What is the probability distribution of $Y$? Give the $y$ values with their probabilities. \begin{verbatim} y 0 3 8 15 p(y) 0.1 0.2 0.3 0.4 \end{verbatim} \item What is $E(Y)$? The answer is a number. My answer is 9. \item What is $Var(Y)$? The answer is a number. My answer is 30. \end{enumerate} \item This question clarifies the meaning of $E(a)$ and $Var(a)$ when $a$ is a constant. \begin{enumerate} \item Let $X$ be a discrete random variable with $P(X=a)=1$ (later we will call this a \emph{degenerate} random variable). Using the definitions on the formula sheet, calculate $E(X)$ and $Var(X)$. This is the real meaning of the concept. \item Let $a$ be a real constant and $X$ be a continuous random variable with density $f(x)$. Let $Y = g(X) = a$. Using the formula for $E(g(X))$ on the formula sheet, calculate $E(Y)$ and $Var(Y)$. This reminds us that the change of variables formula (which is a very big theorem) applies to the case of a constant function. \end{enumerate} % See 2016 for another version of this question. \item The discrete random variables $X$ and $Y$ have joint distribution \begin{center} \begin{tabular}{c|ccc} & $x=1$ & $x=2$ & $x=3$ \\ \hline $y=1$ & $3/12$ & $1/12$ & $3/12$ \\ $y=2$ & $1/12$ & $3/12$ & $1/12$ \\ \end{tabular} \end{center} \begin{enumerate} \item What is the marginal distribution of $X$? List the values with their probabilities. \item What is the marginal distribution of $Y$? List the values with their probabilities. \item Calculate $E(X)$. Show your work. \item What is $Var(X)$? Show your work. \item Calculate $E(Y)$. Show your work. \item Calculate $Var(Y)$. Show your work. You may use Question~\ref{handyA} if you wish. \item Let $Z_1 = g_1(X,Y) = X+Y$. What is the probability distribution of $Z_1$? Show some work. \item Calculate $E(Z_1)$. Show your work. \item Do we have $E(X+Y) = E(X)+E(Y)$? Answer Yes or No. Note that the answer \emph{does not require independence}, or even zero covariance. \item Let $Z_2 = g_2(X,Y) = XY$. What is the probability distribution of $Z_2$? List the values with their probabilities. Show some work. \item Calculate $E(Z_2)$. Show your work. \item Do we have $E(XY) = E(X)E(Y)$? Answer Yes or No. \item Using the well-known formula of Question~\ref{handyB}, what is $Cov(X,Y)$? \item Are $X$ and $Y$ independent? Answer Yes or No and show some work. \end{enumerate} \item \label{prod} Let $X_1$ and $X_2$ be continuous random variables that are \emph{independent}. Using the expression for $E(g(\mathbf{X}))$ on the formula sheet, show $E(X_1 X_2) = E(X_1)E(X_2)$. Draw an arrow to the place in your answer where you use independence, and write ``This is where I use independence." Because $X_1$ and $X_2$ are continuous, you will need to integrate. Does your proof still apply if $X_1$ and $X_2$ are discrete? \item \label{handy} Using the definitions of variance covariance along with the linear property $E(\sum_{i=1}^na_iY_i) = \sum_{i=1}^na_iE(Y_i)$ (no integrals), show the following: \begin{enumerate} \item \label{handyA} $Var(Y) = E(Y^2)-\mu_{_Y}^2$ \item \label{handyB} $Cov(X,Y)=E(XY)-E(X)E(Y)$ \item If $X$ and $Y$ are independent, $Cov(X,Y) = 0$. Of course you may use Problem~\ref{prod}. \end{enumerate} \item Let $X$ be a random variable and $a$ be a constant. Show \begin{enumerate} \item $Var(aX) = a^2Var(X)$. \item $Var(X+a) = Var(X)$. \end{enumerate} \item Show $Var(X+Y) = Var(X) + Var(Y) + 2Cov(X,Y)$. \item Let $X$ and $Y$ be random variables, and let $a$ and $b$ be constants. Show $Cov(X+a,Y+b) = Cov(X,Y)$. \item Let $X$ and $Y$ be random variables, with $E(X)=\mu_x$, $E(Y)=\mu_y$, $Var(X)=\sigma^2_x$, $Var(Y)=\sigma^2_y$, $Cov(X,Y) = \sigma_{xy}$ and $Corr(X,Y) = \rho_{xy}$. Let $a$ and $b$ be non-zero constants. \begin{enumerate} \item Find $Cov(aX,Y)$. \item Find $Corr(aX,Y)$. Do not forget that $a$ could be negative. \end{enumerate} \item Let $E(X_1)=\mu_1$, $E(X_2)=\mu_2$, $E(Y_1)=\mu_3$, $E(Y_2)=\mu_4$. Show $Cov(X_1+X_2,Y_1+Y_2) = Cov(X_1,Y_1) + Cov(X_1,Y_2) +Cov(X_2,Y_1) +Cov(X_2,Y_2)$. \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item Let $y_1, \ldots, y_n$ be numbers (not necessarily random variables), and $\overline{y}=\frac{1}{n}\sum_{i=1}^ny_i$. Show \begin{enumerate} \item $\sum_{i=1}^n(y_i-\overline{y})=0$ \item $\sum_{i=1}^n(y_i-\overline{y})^2=\sum_{i=1}^ny_i^2 \,-\, n\overline{y}^2$ \item The sum of squares $Q_m = \sum_{i=1}^n(y_i-m)^2$ is minimized when $m = \overline{y}$. \end{enumerate} \item Let $x_1, \ldots, x_n$ and $y_1, \ldots, y_n$ be numbers, with $\overline{x}=\frac{1}{n}\sum_{i=1}^nx_i$ and $\overline{y}=\frac{1}{n}\sum_{i=1}^ny_i$. Show $\sum_{i=1}^n(x_i-\overline{x})(y_i-\overline{y}) = \sum_{i=1}^n x_iy_i \,-\, n\overline{x} \, \overline{y}$. \item Let $Y_1, \ldots, Y_n$ be independent random variables with $E(Y_i)=\mu$ and $Var(Y_i)=\sigma^2$ for $i=1, \ldots, n$. For this question, please use definitions and familiar properties of expected value, not integrals or sums. \begin{enumerate} \item Find $E(\sum_{i=1}^nY_i)$. Are you using independence? \item Find $Var\left(\sum_{i=1}^n Y_i\right)$. What earlier questions are you using in connection with independence? \item Using your answer to the last question, find $Var(\overline{Y})$. \item A statistic $T$ is an \emph{unbiased estimator} of a parameter $\theta$ if $E(T)=\theta$. Show that $\overline{Y}$ is an unbiased estimator of $\mu$. \item Let $a_1, \ldots, a_n$ be constants and define the linear combination $L$ by $L = \sum_{i=1}^n a_i Y_i$. What condition on the $a_i$ values makes $L$ an unbiased estimator of $\mu$? Show your work. \item Is $\overline{Y}$ a special case of $L$? If so, what are the $a_i$ values? \item What is $Var(L)$? \end{enumerate} \item % I'm sticking to capital Y_i because of the distribution function technique. Here is a simple linear regression model. Independently for $i=1, \ldots,n$, let $Y_i = \beta_0 + \beta_1 x_i + \epsilon_i$, where $\beta_0$ and $\beta_1$ are constants (typically unknown), $x_i$ is a known, observable constant, and $\epsilon_i$ is a random variable with expected value zero and variance $\sigma^2$. \begin{enumerate} \item What is $E(Y_i)?$ \item What is $Var(Y_i)$? \item Suppose that the distribution of $\epsilon_i$ is normal, so that it has density $f(\epsilon) = \frac{1}{\sigma\sqrt{2\pi}} e^{-\frac{\epsilon^2}{2\sigma^2}}$. Find the distribution of $Y_i$. Show your work. Hint: differentiate the cumulative distribution function of $Y_i$. \item Let $\widehat{\beta}_1 = \frac{\sum_{i=1}^nx_iY_i}{\sum_{i=1}^nx_i^2}$. Is $\widehat{\beta}_1$ an unbiased estimator of $\beta_1$? Answer Yes or No and show your work. \end{enumerate} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Matrices %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item \label{numbers} Let $\mathbf{A} = \left( \begin{array}{rr} 2 & 5 \\ 1 & -4 \\ 0 & 3 \end{array} \right)$ and $\mathbf{B} = \left( \begin{array}{rr} 1 & 0 \\ 2 & 3 \\ -1 & 3 \end{array} \right)$ be matrices of constants. Which of the following are possible to compute? Don't do the calculations. Just answer each one Yes or No. \renewcommand{\arraystretch}{1.5} \begin{displaymath} \begin{array}{lll} (a)~\mathbf{A}^{-1} & (b)~|\mathbf{B}| & (c)~\mathbf{A}+\mathbf{B} \\ (d)~\mathbf{A}-\mathbf{B} & (e)~\mathbf{AB} & (f)~\mathbf{BA} \\ (g)~\mathbf{A}^\prime\mathbf{B} & (h)~\mathbf{B}^\prime\mathbf{A} & (i)~\mathbf{A}/\mathbf{B} \\ \end{array} \end{displaymath} \renewcommand{\arraystretch}{1.0} \item For the matrices of Question~\ref{numbers}, calculate $ \mathbf{A}^\prime\mathbf{B}$. My answer is $\mathbf{A}^\prime\mathbf{B} = \left( \begin{array}{rr} 4 & 3 \\ -6 & -3 \end{array} \right)$. \item Let $\mathbf{c} = \left( \begin{array}{rr} 2 \\ 1 \\ 0 \end{array} \right)$ and $\mathbf{d} = \left( \begin{array}{rr} 1 \\2 \\ -1 \end{array} \right)$. Verify that $\mathbf{c}^\prime\mathbf{d} = 4$ and $\mathbf{c}\mathbf{d}^\prime = \left( \begin{array}{rrr} 2 & 4 & -2 \\ 1 & 2 & -1 \\ 0 & 0 & 0 \end{array} \right)$. % \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item \label{firstmat} Which statement is true? Quantities in boldface are matrices of constants. Assume the matrices are of the right size. \begin{enumerate} \item $\mathbf{A(B+C) = AB+AC}$ \item $\mathbf{A(B+C) = BA+CA}$ \item Both a and b \item Neither a nor b \end{enumerate} \item Which statement is true? \begin{enumerate} \item $a\mathbf{(B+C)}=a\mathbf{B} + a\mathbf{C}$ \item $a\mathbf{(B+C)}=\mathbf{B}a + \mathbf{C}a$ \item Both a and b \item Neither a nor b \end{enumerate} \item Which statement is true? \begin{enumerate} \item $\mathbf{(B+C)A = AB+AC}$ \item $\mathbf{(B+C)A = BA+CA}$ \item Both a and b \item Neither a nor b \end{enumerate} \item Which statement is true? \begin{enumerate} \item $\mathbf{(AB)^\prime = A^\prime B^\prime}$ \item $\mathbf{(AB)^\prime = B^\prime A^\prime}$ \item Both a and b \item Neither a nor b \end{enumerate} \item Which statement is true? \begin{enumerate} \item $\mathbf{A^{\prime\prime} = A }$ \item $\mathbf{A^{\prime\prime\prime} = A^\prime }$ \item Both a and b \item Neither a nor b \end{enumerate} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item Suppose that the square matrices $\mathbf{A}$ and $\mathbf{B}$ are of the right sizes, and both have inverses. Which statement is true? \begin{enumerate} \item $\mathbf{(AB)}^{-1} = \mathbf{A}^{-1}\mathbf{B}^{-1}$ \item $\mathbf{(AB)}^{-1} = \mathbf{B}^{-1}\mathbf{A}^{-1}$ \item Both a and b \item Neither a nor b \end{enumerate} \item Which statement is true? \begin{enumerate} \item $\mathbf{(A+B)^\prime = A^\prime + B^\prime}$ \item $\mathbf{(A+B)^\prime = B^\prime + A^\prime }$ \item $\mathbf{(A+B)^\prime = (B+A)^\prime}$ \item All of the above \item None of the above \end{enumerate} \item Which statement is true? \begin{enumerate} \item $(a+b)\mathbf{C} = a\mathbf{C}+ b\mathbf{C}$ \item $(a+b)\mathbf{C} = \mathbf{C}a+ \mathbf{C}b$ \item $(a+b)\mathbf{C} = \mathbf{C}(a+b)$ \item All of the above \item None of the above \end{enumerate} \item Let $\mathbf{A}$ be a square matrix with the determinant of $\mathbf{A}$ (denoted $|\mathbf{A}|$) equal to zero. What does this tell you about $\mathbf{A}^{-1}$? No proof is required here. \item Recall that $\mathbf{A}$ symmetric means $\mathbf{A=A^\prime}$. Let $\mathbf{X}$ be an $n$ by $p$ matrix. Prove that $\mathbf{X^\prime X}$ is symmetric. \item Matrix multiplication does not commute. That is, if $\mathbf{A}$ and $\mathbf{B}$ are matrices, in general it is \emph{not} true that $\mathbf{AB} = \mathbf{BA}$ unless both matrices are $1 \times 1$. Establish this important fact by making up a simple numerical example in which $\mathbf{A}$ and $\mathbf{B}$ are both $2 \times 2$ matrices. Carry out the multiplication, showing $\mathbf{AB} \neq \mathbf{BA}$. This is also the point of Question~\ref{firstmat}. \item Let $\mathbf{X}$ be an $n$ by $p$ matrix with $n \neq p$. Why is it incorrect to say that $(\mathbf{X^\prime X})^{-1}= \mathbf{X}^{-1}\mathbf{X}^{\prime -1}$? \item Let \begin{tabular}{ccc} $\mathbf{A} = \left( \begin{array}{c c} 1 & 2 \\ 2 & 4 \end{array} \right) $ & $\mathbf{B} = \left( \begin{array}{c c} 0 & 2 \\ 2 & 1 \end{array} \right) $ & $\mathbf{C} = \left( \begin{array}{c c} 2 & 0 \\ 1 & 2 \end{array} \right) $ \end{tabular} \begin{enumerate} \item Calculate $\mathbf{AB}$ and $\mathbf{AC}$ \item Do we have $\mathbf{AB} = \mathbf{AC}$? Answer Yes or No. \item Prove $\mathbf{B} = \mathbf{C}$. Show your work. \end{enumerate} \end{enumerate} % \vspace{130mm} \noindent \begin{center}\begin{tabular}{l} \hspace{6in} \\ \hline \end{tabular}\end{center} This assignment was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/302f20} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/302f20}} \end{document}