\documentclass[11pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb \usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links % \usepackage{fullpage} %\pagestyle{empty} % No page numbers % To use more of the top and bottom margins than fullpage \oddsidemargin=-.2in % Good for US Letter paper \evensidemargin=-.2in \textwidth=6.6in \topmargin=-1.1in \headheight=0.2in \headsep=0.5in \textheight=9.4in \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{STA 2053 Assignment 1 (Review)}}\footnote{This assignment was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/brunner/oldclass/2053f22} {\texttt{http://www.utstat.toronto.edu/brunner/oldclass/2053f22}}} \vspace{1 mm} \end{center} \noindent Questions \ref{ABneqBA} through \ref{SigInvPD} are not to be handed in. They are practice for the quiz on September 26th. Bring hard copy of your input and output for Question~\ref{mlequestion} to the quiz. It may be handed in. \begin{enumerate} \item \label{ABneqBA} % A2.11 Let $\mathbf{A}$ and $\mathbf{B}$ be $2 \times 2$ matrices. Either \begin{itemize} \item Prove $\mathbf{AB} = \mathbf{BA}$, or \item Give a numerical example in which $\mathbf{AB} \neq \mathbf{BA}$ \end{itemize} \item % A2.14 The formal definition of a matrix inverse is that an inverse of the matrix $\mathbf{A}$ (denoted $\mathbf{A}^{-1}$) is defined by two properties: $\mathbf{A}^{-1}\mathbf{A=I}$ and $\mathbf{AA}^{-1}=\mathbf{I}$. If you want to prove that one matrix is the inverse of another using the definition, you'd have two things to show. This homework problem establishes that you only need to do it in one direction. Let $\mathbf{A}$ and $\mathbf{B}$ be square matrices with $\mathbf{AB} = \mathbf{I}$. Show that $\mathbf{A} = \mathbf{B}^{-1}$ and $\mathbf{A} = \mathbf{B}^{-1}$. To make it easy, use well-known properties of determinants. \item % A2.15 Prove that inverses are unique, as follows. Let $\mathbf{B}$ and $\mathbf{C}$ both be inverses of $\mathbf{A}$. Show that $\mathbf{B=C}$. \item % A2.16 Let $\mathbf{X}$ be an $n$ by $p$ matrix with $n \neq p$. Why is it incorrect to say that $(\mathbf{X^\top X})^{-1}= \mathbf{X}^{-1}\mathbf{X}^{\top -1}$? \item % A2.17 Suppose that the matrices $\mathbf{A}$ and $\mathbf{B}$ both have inverses. Prove that $\mathbf{(AB)}^{-1} = \mathbf{B}^{-1}\mathbf{A}^{-1}$. \item % A2.18 \label{ivt} Let $\mathbf{A}$ be a non-singular matrix. Prove $(\mathbf{A}^{-1})^\top=(\mathbf{A}^\top)^{-1}$. \item % A2.19 Using $(\mathbf{A}^{-1})^\top=(\mathbf{A}^\top)^{-1}$, show that the inverse of a symmetric matrix is also symmetric. \item % A2.21 Let $\mathbf{a}$ be an $n \times 1$ matrix of real constants. How do you know $\mathbf{a}^\top\mathbf{a}\geq 0$? \item % 2.27 Recall that the square matrix $\mathbf{A}$ is said to have an eigenvalue $\lambda$ and corresponding eigenvector $\mathbf{x} \neq \mathbf{0}$ if $\mathbf{Ax} = \lambda\mathbf{x}$. \begin{enumerate} \item Suppose that an eigenvalue of $\mathbf{A}$ equals zero. Show that the columns of $\mathbf{A}$ are linearly dependent. \item Suppose that the columns of $\mathbf{A}$ are linearly dependent. Show that $\mathbf{A}^{-1}$ does not exist. \item Suppose that the columns of $\mathbf{A}$ are linearly independent. Show that the eigenvalues of $\mathbf{A}$ are all non-zero. \item Suppose $\mathbf{A}^{-1}$ exists. Show that the eigenvalues of $\mathbf{A}^{-1}$ are the reciprocals of the eigenvalues of $\mathbf{A}$. What about the eigenvectors? \end{enumerate} \item % A2.28 The (square) matrix $\boldsymbol{\Sigma}$ is said to be \emph{positive definite} if $\mathbf{a}^\top \boldsymbol{\Sigma} \mathbf{a} > 0$ for all vectors $\mathbf{a} \neq \mathbf{0}$. Show that the diagonal elements of a positive definite matrix are positive numbers. Hint: Choose the right vector $\mathbf{a}$. \item \label{top} % A2.29 Show that the eigenvalues of a positive definite matrix are strictly positive. % Hint: the $\mathbf{a}$ you want is an eigenvector. % \vspace{25mm} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item % A2.30 Recall the \emph{spectral decomposition} of a real symmetric matrix (For example, a variance-covariance matrix). Any such matrix $\boldsymbol{\Sigma}$ can be written as $\boldsymbol{\Sigma} = \mathbf{CDC}^\top$, where $\mathbf{C}$ is a matrix whose columns are the (orthonormal) eigenvectors of $\boldsymbol{\Sigma}$, $\mathbf{D}$ is a diagonal matrix of the corresponding (non-negative) eigenvalues, and $\mathbf{C}^\top\mathbf{C} =~\mathbf{CC}^\top =~\mathbf{I}$. \begin{enumerate} \item Let $\boldsymbol{\Sigma}$ be a real symmetric matrix with eigenvalues that are all strictly positive. \begin{enumerate} \item What is $\mathbf{D}^{-1}$? \item Show $\boldsymbol{\Sigma}^{-1} = \mathbf{C} \mathbf{D}^{-1} \mathbf{C}^\top$. So, the inverse exists. \end{enumerate} \item Let the eigenvalues of $\boldsymbol{\Sigma}$ be non-negative. \begin{enumerate} \item What do you think $\mathbf{D}^{1/2}$ might be? \item Define $\boldsymbol{\Sigma}^{1/2}$ as $\mathbf{C} \mathbf{D}^{1/2} \mathbf{C}^\top$. Show $\boldsymbol{\Sigma}^{1/2}$ is symmetric. \item Show $\boldsymbol{\Sigma}^{1/2}\boldsymbol{\Sigma}^{1/2} = \boldsymbol{\Sigma}$. \item Show that if the columns of $\boldsymbol{\Sigma}$ are linearly independent, then the columns of $\boldsymbol{\Sigma}^{1/2}$ are also linearly independent. \end{enumerate} \item Now return to the situation where the eigenvalues of the square symmetric matrix $\boldsymbol{\Sigma}$ are all strictly positive. Define $\boldsymbol{\Sigma}^{-1/2}$ as $\mathbf{C} \mathbf{D}^{-1/2} \mathbf{C}^\top$, where the elements of the diagonal matrix $\mathbf{D}^{-1/2}$ are the reciprocals of the corresponding elements of $\mathbf{D}^{1/2}$. \begin{enumerate} \item Show that the inverse of $\boldsymbol{\Sigma}^{1/2}$ is $\boldsymbol{\Sigma}^{-1/2}$, justifying the notation. \item Show $\boldsymbol{\Sigma}^{-1/2} \boldsymbol{\Sigma}^{-1/2} = \boldsymbol{\Sigma}^{-1}$. \end{enumerate} \end{enumerate} \item \label{bot} % A2.31 Let $\boldsymbol{\Sigma}$ be a real symmetric matrix. \begin{enumerate} \item Suppose that $\boldsymbol{\Sigma}$ has an inverse. Using the definition of linear independence, show that the columns of $\boldsymbol{\Sigma}$ are linearly independent. \item Let the columns of $\boldsymbol{\Sigma}$ be linearly independent, and also let $\boldsymbol{\Sigma}$ be at least non-negative definite (as, for example, a variance-covariance matrix must be). Show that $\boldsymbol{\Sigma}$ is strictly positive definite. \end{enumerate} % For a symmetric real matrix that is non-negative definite definite (as, for example, a variance-covariance matrix must be) Problems~\ref{top} through~\ref{bot} establish that positive definite, positive eigenvalues, inverse exists and linearly independent columns are all equivalent. \item \label{SigInvPD} % A2.32 Show that if the real symmetric matrix $\boldsymbol{\Sigma}$ is positive definite, then $\boldsymbol{\Sigma}^{-1}$ is also positive definite. %%%%%%%%%%%%%%%%%%%%%%%% \item \label{mlequestion} Let $x_, \ldots, x_n$ be a random sample from a distribution with density \begin{equation*} f(x;\mu,\alpha) = \frac{\alpha e^{\alpha (x-\mu)}} {\left(1 + e^{\alpha (x-\mu)} \right)^2}, \end{equation*} for all real $x$, where $\alpha>0$ and $\infty < \mu < \infty$. A sample of size $n=200$ is available \href{https://www.utstat.toronto.edu/brunner/openSEM/data/mystery2.data.txt} {here}. You can get the data into R with \\ \texttt{scan("https://www.utstat.toronto.edu/brunner/openSEM/data/mystery2.data.txt")}. \begin{enumerate} \item Find the maximum likelihood estimates of $\mu$ and $\alpha$; the answers are numbers. For a brief discussion of numerical maximum likelihood, see Section A.6.4 in Appendix A. Example A.6.2 is relevant. The \texttt{optim} function is better than \texttt{nlm}, even though \texttt{nlm} works in this case. See \texttt{help(optim)}. I need to re-do this example in the text. \item Give approximate 95\% confidence intervals for $\mu$ and $\alpha$. The answers are two pairs of numbers, a lower and an upper confidence limit for each parameter. Here, you may want to look at Section A.6.6 in Appendix A. I suggest using the inverse of the Hessian matrix (also known as the ``observed" Fisher information) to approximate the asymptotic covariance matrix of the parameter estimates. My lower confidence limit for $\mu$ is 1.895; my upper confidence limit for $\alpha$ is 3.183. \end{enumerate} \end{enumerate} % End of questions \vspace{2mm} \noindent Please bring your \underline{complete} R printout from Question~\ref{mlequestion} to the quiz, showing all input and output. It may be handed in. \end{document}