% 302f17Assignment1.tex REVIEW \documentclass[12pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb \usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} %\pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{STA 302f17 Assignment One}}\footnote{Copyright information is at the end of the last page.} \vspace{1 mm} \end{center} % Expected value review and part of Chapter 1. Use the text whenever possible. Say for problems bleble through blebaa, ... % Maybe just a bit of linear algebra too -- very basic. I could assign problems from Renscher and Schaalje \noindent This course involves a lot of scalar calculations of expected value, variance and covariance, and even more matrix calculations. The questions on this assignment are pure review, and too basic for our textbook. Questions like these will not appear directly on the final exam. The formulas below will be supplied as needed on Quiz One. \begin{center} \renewcommand{\arraystretch}{1.5} \begin{tabular}{ll} $E(x) = \sum_x \, x \, p_{_x}(x)$ & $E(x) = \int_{-\infty}^\infty x f_{_x}(x) \, dx$ \\ $E(g(x)) = \sum_x g(x) \, p_{_x}(x)$ & $E(g(\mathbf{x})) = \sum_{x_1} \cdots \sum_{x_p} g(x_1, \ldots, x_p) \, p_{_\mathbf{x}}(x_1, \ldots, x_p) $ \\ $E(g(x)) = \int_{-\infty}^\infty g(x) \, f_{_x}(x) \, dx$ & $E(g(\mathbf{x})) = \int_{-\infty}^\infty \cdots \int_{-\infty}^\infty g(x_1, \ldots, x_p) \, f_{_\mathbf{x}}(x_1, \ldots, x_p) \, dx_1 \ldots dx_p $ \\ $E(\sum_{i=1}^na_ix_i) = \sum_{i=1}^na_iE(x_i)$ & $Var(x) = E\left( \, (x-\mu_{_x})^2 \, \right)$ \\ $Cov(x,y) = E\left( \, (x-\mu_{_x})(y-\mu_{_y}) \, \right)$ & $Corr(x,y) = \frac{Cov(x,y)}{\sqrt{Var(x)Var(y)} } $ \end{tabular} \renewcommand{\arraystretch}{1.0} \end{center} \begin{enumerate} %%%%%%%%%%%%%%%%%%%%%%%%%%%%% Expected value etc. %%%%%%%%%%%%%%%%%%%%%%%%%%% \item The discrete random variable $x$ has probability mass function $p(x) = |x|/20$ for $x = -4, \ldots, 4$ and zero otherwise. Let $y=x^2-1$. \begin{enumerate} \item What is $E(x)$? The answer is a number. Show some work. % zero \item Calculate the variance of $x$. The answer is a number. My answer is 10. \item What is $P(y=8)$? My answer is 0.30 \item What is $P(y=-1)$? My answer is zero. \item What is $P(y=-4)$? My answer is zero. \item What is the probability distribution of $y$? Give the $y$ values with their probabilities. \begin{verbatim} y 0 3 8 15 p(y) 0.1 0.2 0.3 0.4 \end{verbatim} \item What is $E(y)$? The answer is a number. My answer is 9. \item What is $Var(y)$? The answer is a number. My answer is 30. \end{enumerate} \item This question clarifies the meaning of $E(a)$ and $Var(a)$ when $a$ is a constant. \begin{enumerate} \item Let $x$ be a discrete random variable with $P(x=a)=1$ (later we will call this a \emph{degenerate} random variable). Using the definitions above, calculate $E(x)$ and $Var(x)$. This is the real meaning of the concept. \item Let $a$ be a real constant and $x$ be a continuous random variable with density $f(x)$. Let $y = g(x) = a$. Using the formula for $E(g(x))$ above, calculate $E(y)$ and $Var(y)$. This reminds us that the change of variables formula (which is a very big theorem) applies to the case of a constant function. \end{enumerate} % See 2016 for another version of this question. \item The discrete random variables $x$ and $y$ have joint distribution \begin{center} \begin{tabular}{c|ccc} & $x=1$ & $x=2$ & $x=3$ \\ \hline $y=1$ & $3/12$ & $1/12$ & $3/12$ \\ $y=2$ & $1/12$ & $3/12$ & $1/12$ \\ \end{tabular} \end{center} \begin{enumerate} \item What is the marginal distribution of $x$? List the values with their probabilities. \item What is the marginal distribution of $y$? List the values with their probabilities. \item Calculate $E(x)$. Show your work. \item Denote a ``centered" version of $x$ by $x_c = x - E(x) = x-\mu_{_x}$. \begin{enumerate} \item What is the probability distribution of $x_c$? Give the values with their probabilities. \item What is $E(x_c)$? Show your work. \item What is the probability distribution of $x_c^2$? Give the values with their probabilities. \item What is $E(x_c^2)$? Show your work. \end{enumerate} \item What is $Var(x)$? If you have been paying attention, you don't have to show any work. \item Calculate $E(y)$. Show your work. \item Calculate $Var(y)$. Show your work. You may use Question~\ref{handyA} if you wish. \item Let $z_1 = g_1(x,y) = x+y$. What is the probability distribution of $z_1$? Show some work. \item Calculate $E(z_1)$. Show your work. \item Do we have $E(x+y) = E(x)+E(y)$? Answer Yes or No. Note that the answer \emph{does not require independence}, or even zero covariance. \item Let $z_2 = g_2(x,y) = xy$. What is the probability distribution of $Z_2$? List the values with their probabilities. Show some work. \item Calculate $E(z_2)$. Show your work. \item Do we have $E(xy) = E(x)E(y)$? Answer Yes or No. \item Using the well-known formula of Question~\ref{handyB}, what is $Cov(x,y)$? \item Are $x$ and $y$ independent? Answer Yes or No and show some work. \end{enumerate} \item \label{prod} Let $x_1$ and $x_2$ be continuous random variables that are \emph{independent}. Using the expression for $E(g(\mathbf{x}))$ at the beginning of this assignment, show $E(x_1 x_2) = E(x_1)E(x_2)$. Draw an arrow to the place in your answer where you use independence, and write ``This is where I use independence." Because $x_1$ and $x_2$ are continuous, you will need to integrate. Does your proof still apply if $x_1$ and $x_2$ are discrete? \item Using the definitions of variance and covariance along with the linear property $E(\sum_{i=1}^na_iy_i) = \sum_{i=1}^na_iE(y_i)$ (no integrals), show the following: \begin{enumerate} \item \label{handyA}$Var(y) = E(y^2)-\mu_y^2$ \item \label{handyB}$Cov(x,y)=E(xy)-E(x)E(y)$ \item If $x$ and $y$ are independent, $Cov(x,y) = 0$. Of course you may use Problem~\ref{prod}. \end{enumerate} \item Let $x$ be a random variable and let $a$ be a constant. Show \begin{enumerate} \item $Var(ax) = a^2Var(x)$. \item $Var(x+a) = Var(x)$. \end{enumerate} \item Show $Var(x+y) = Var(x) + Var(y) + 2Cov(x,y)$. \item Let $x$ and $y$ be random variables, and let $a$ and $b$ be constants. Show $Cov(x+a,y+b) = Cov(x,y)$. \item Let $x$ and $y$ be random variables, with $E(x)=\mu_x$, $E(y)=\mu_y$, $Var(x)=\sigma^2_x$, $Var(y)=\sigma^2_y$, $Cov(x,y) = \sigma_{xy}$ and $Corr(x,y) = \rho_{xy}$. Let $a$ and $b$ be non-zero constants. \begin{enumerate} \item Find $Cov(ax,by)$. \item Find $Corr(ax,by)$. Do not forget that $a$ and $b$ could be negative. \end{enumerate} \item Let $x_1$ and $x_2$ be discrete random variables. Using the formula for $E(g(\mathbf{x}))$ (note $\mathbf{x}$ is a vector), prove $E(x_1+x_2) = E(x_1) + E(x_2)$. If you assume independence you get a zero. Does your proof still apply if $x_1$ and $x_2$ are continuous? \item Let $y_1, \ldots, y_n$ be independent random variables with $E(y_i)=\mu$ and $Var(y_i)=\sigma^2$ for $i=1, \ldots, n$. For this question, please use definitions and familiar properties of expected value, not integrals. \begin{enumerate} \item Find $E(\sum_{i=1}^ny_i)$. Are you using independence? \item Find $Var\left(\sum_{i=1}^n y_i\right)$. What earlier questions are you using in connection with independence? \item Using your answer to the last question, find $Var(\overline{y})$. \item A statistic $T$ is an \emph{unbiased estimator} of a parameter $\theta$ if $E(T)=\theta$. Show that $\overline{y}$ is an unbiased estimator of $\mu$. \item Let $a_1, \ldots, a_n$ be constants and define the linear combination $L$ by $L = \sum_{i=1}^n a_i y_i$. Show that if $\sum_{i=1}^n a_i = 1$, then $L$ is an unbiased estimator of $\mu$. \item Is $\overline{y}$ a special case of $L$? If so, what are the $a_i$ values? \item What is $Var(L)$ for general $L$? \end{enumerate} \pagebreak %%%%%%%%%%%%%%%%%%%%%%% Basic matrices %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item \label{numbers} Let $\mathbf{A} = \left( \begin{array}{rr} 2 & 5 \\ 1 & -4 \\ 0 & 3 \end{array} \right)$ and $\mathbf{B} = \left( \begin{array}{rr} 1 & 0 \\ 2 & 3 \\ -1 & 3 \end{array} \right)$ be matrices of constants. Which of the following are possible to compute? Don't do the calculations. Just answer each one Yes or No. \renewcommand{\arraystretch}{1.5} \begin{displaymath} \begin{array}{lll} (a)~\mathbf{A}^{-1} & (b)~|\mathbf{B}| & (c)~\mathbf{A}+\mathbf{B} \\ (d)~\mathbf{A}-\mathbf{B} & (e)~\mathbf{AB} & (f)~\mathbf{BA} \\ (g)~\mathbf{A}^\prime\mathbf{B} & (h)~\mathbf{B}^\prime\mathbf{A} & (i)~\mathbf{A}/\mathbf{B} \\ \end{array} \end{displaymath} \renewcommand{\arraystretch}{1.0} \item For the matrices of Question~\ref{numbers}, calculate $ \mathbf{A}^\prime\mathbf{B}$. My answer is $\mathbf{A}^\prime\mathbf{B} = \left( \begin{array}{rr} 4 & 3 \\ -6 & -3 \end{array} \right)$. \item Let $\mathbf{c} = \left( \begin{array}{rr} 2 \\ 1 \\ 0 \end{array} \right)$ and $\mathbf{d} = \left( \begin{array}{rr} 1 \\2 \\ -1 \end{array} \right)$. Verify that $\mathbf{c}^\prime\mathbf{d} = 4$ and $\mathbf{c}\mathbf{d}^\prime = \left( \begin{array}{rrr} 2 & 4 & -2 \\ 1 & 2 & -1 \\ 0 & 0 & 0 \end{array} \right)$. \item Let \begin{tabular}{ccc} $\mathbf{A} = \left( \begin{array}{c c} 1 & 2 \\ 2 & 4 \end{array} \right) $ & $\mathbf{B} = \left( \begin{array}{c c} 0 & 2 \\ 2 & 1 \end{array} \right) $ & $\mathbf{C} = \left( \begin{array}{c c} 2 & 0 \\ 1 & 2 \end{array} \right) $ \end{tabular} \begin{enumerate} \item Calculate $\mathbf{AB}$ and $\mathbf{AC}$ \item Do we have $\mathbf{AB} = \mathbf{AC}$? Answer Yes or No. \item Prove $\mathbf{B} = \mathbf{C}$. Show your work. \end{enumerate} \item Matrix multiplication does not commute. That is, if $\mathbf{A}$ and $\mathbf{B}$ are matrices, in general it is \emph{not} true that $\mathbf{AB} = \mathbf{BA}$ unless both matrices are $1 \times 1$. Establish this important fact by making up a simple numerical example in which $\mathbf{A}$ and $\mathbf{B}$ are both $2 \times 2$ matrices. Carry out the multiplication, showing $\mathbf{AB} \neq \mathbf{BA}$. \item Let $\mathbf{A}$ be a square matrix with the determinant of $\mathbf{A}$ (denoted $|\mathbf{A}|$) equal to zero. What does this tell you about $\mathbf{A}^{-1}$? No proof is required here. \item Recall that $\mathbf{A}$ symmetric means $\mathbf{A=A^\prime}$. Let $\mathbf{X}$ be an $n$ by $p$ matrix. Prove that $\mathbf{X^\prime X}$ is symmetric. \item Let $\mathbf{X}$ be an $n$ by $p$ matrix with $n \neq p$. Why is it incorrect to say that $(\mathbf{X^\prime X})^{-1}= \mathbf{X}^{-1}\mathbf{X}^{\prime -1}$? \end{enumerate} % \vspace{130mm} \noindent \begin{center}\begin{tabular}{l} \hspace{6in} \\ \hline \end{tabular}\end{center} This assignment was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/302f17} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/302f17}} \end{document}