% 302f15Assignment4.tex \documentclass[11pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb \usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} %\pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{STA 302f15 Assignment Four}}\footnote{Copyright information is at the end of the last page.} \vspace{1 mm} \end{center} \noindent For this assignment, Chapter 3 in the text contains material on random vectors, and Chapter 6 contains material on simple regression. For this assignment, see just Sections 6.1 and 6.2. You are responsible for what is in this assignment, not everything that's in the text. This exercise set has an unusual feature. \emph{Some of the questions ask you to prove things that are false}. That is, they are not true in general. In such cases, just write ``The statement is false," and give a brief explanation to make it clear that you are not just guessing. The explanation is essential for full marks. A small counter-example is always good enough. All the problems are preparation for the quiz in tutorial on Friday October 8th, and are not to be handed in. \begin{enumerate} \item Do problem 3.9 in the text. \item Let $\mathbf{X} = [X_{ij}]$ be a random matrix. Show $E(\mathbf{X}^\prime) = E(\mathbf{X})^\prime$. \item Let $\mathbf{X}$ be a random matrix, and $\mathbf{B}$ be a matrix of constants. Show $E(\mathbf{XB})=E(\mathbf{X})\mathbf{B}$. Recall the definition $\mathbf{AB}=[\sum_{k}a_{i,k}b_{k,j}]$. %Let $\mathbf{X}$ and $\mathbf{Y}$ be random matrices of the same dimensions. Show %$E(\mathbf{X} + \mathbf{Y})=E(\mathbf{X})+E(\mathbf{Y})$. Recall the definition %$E(\mathbf{Z})=[E(Z_{i,j})]$. \item Let the $p \times 1$ random vector $\mathbf{X}$ have expected value $\boldsymbol{\mu}$ and variance-covariance matrix $\mathbf{\Sigma}$, and let $\mathbf{A}$ be an $m \times p$ matrix of constants. Prove that the variance-covariance matrix of $\mathbf{AX}$ is either \begin{itemize} \item $\mathbf{A} \boldsymbol{\Sigma} \mathbf{A}^\prime$, or \item $\mathbf{A}^2 \boldsymbol{\Sigma}$.. \end{itemize} Pick one and prove it. Start with the definition of a variance-covariance matrix on the formula sheet. % \item Let the $p \times 1$ random vector $\mathbf{X}$ have variance-covariance matrix $\mathbf{\Sigma}$. Prove that $\mathbf{\Sigma}$ must be non-negative definite. \item Do problem 3.10 in the text. % If the $p \times 1$ random vector $\mathbf{X}$ has mean $\boldsymbol{\mu}$ and variance-covariance matrix $\mathbf{\Sigma}$, show $\mathbf{\Sigma} = E(\mathbf{XX}^\prime) - \boldsymbol{\mu \mu}^\prime$. \item Let $\mathbf{X}$ be a $p \times 1$ random vector. Starting with the definition on the formula sheet, prove $cov(\mathbf{X})=\mathbf{0}$.. % FALSE \item Let the $p \times 1$ random vector $\mathbf{X}$ have mean $\boldsymbol{\mu}$ and variance-covariance matrix $\mathbf{\Sigma}$, let $\mathbf{A}$ be an $r \times p$ matrix of constants, and let $\mathbf{c}$ be an $r \times 1$ vector of constants. Find $cov(\mathbf{AX}+\mathbf{c})$. Show your work. \item Let the scalar random variable $Y = \mathbf{v}^\prime \mathbf{X}$. What is $Var(Y)$? Use this to prove that \emph{any} variance-covariance matrix must be positive semi-definite. \item The square matrix $\mathbf{A}$ has an eigenvalue equal to $\lambda$ with corresponding eigenvector $\mathbf{x} \neq \mathbf{0}$ if $\mathbf{Ax} = \lambda\mathbf{x}$. \begin{enumerate} \item Show that the eigenvalues of a variance-covariance matrix cannot be negative. \item How do you know that the determinant of a variance-covariance matrix must be greater than or equal to zero? The answer is one short sentence. \item Let $X$ and $Y$ be scalar random variables. Recall $Corr(X,Y) = \frac{Cov(X,Y)}{\sqrt{Var(X)Var(Y)}}$. Using what you have shown about the determinant, show $-1 \leq Corr(X,Y) \leq 1$. % You have just proved the Cauchy-Schwarz inequality using probability tools. \end{enumerate} \item Let $\mathbf{X}$ be a $p \times 1$ random vector with mean $\boldsymbol{\mu}_x$ and variance-covariance matrix $\mathbf{\Sigma}_x$, and let $\mathbf{Y}$ be a $q \times 1$ random vector with mean $\boldsymbol{\mu}_y$ and variance-covariance matrix $\mathbf{\Sigma}_y$. \begin{enumerate} \item What is the $(i,j)$ element of $cov(\mathbf{X},\mathbf{Y})$? See the definition on the formula sheet. \item Find an expression for $cov(\mathbf{X}+\mathbf{Y})$ in terms of $\mathbf{\Sigma}_x$, $\mathbf{\Sigma}_y$ and $cov(\mathbf{X},\mathbf{Y})$. Show your work. \item Simplify further for the special case where $Cov(X_i,Y_j)=0$ for all $i$ and $j$. \item Let $\mathbf{c}$ be a $p \times 1$ vector of constants and $\mathbf{d}$ be a $q \times 1$ vector of constants. Find $ cov(\mathbf{X}+\mathbf{c}, \mathbf{Y}+\mathbf{d})$. Show your work. \end{enumerate} \item Starting with the definition on the formula sheet, show $cov(\mathbf{X,Y})=cov(\mathbf{Y,X})$.. % FALSE \item Starting with the definition on the formula sheet, show $cov(\mathbf{X,Y})=\mathbf{0}$.. % FALSE \item Do problem 3.20 in the text. The answer is in the back of the book. \item Do problem 3.21 in the text. The answer is in the back of the book. \vspace{2mm} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \hrule \item In the textbook, do Problems 6.1, 6.2 and 6.3a. \item For the simple linear regression model (6.1) in the text, \begin{enumerate} \item What is $E(y_i)$? \item What is $Var(y_i)$? \item What is $Cov(y_i,y_j)$ for $i \neq j$? \item Differentiate to obtain (6.3) and (6.4). \item Prove that $\sum_{i=1}^n\widehat{\epsilon}_i = 0$. \item Prove that the least squares line always goes through the point $(\overline{x},\overline{y})$. \end{enumerate} \item Do problems 6.5 and 6.10 in the text. \end{enumerate} \vspace{30mm} \noindent \begin{center}\begin{tabular}{l} \hspace{6in} \\ \hline \end{tabular}\end{center} This assignment was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/302f15} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/302f15}} \end{document}