\documentclass[10pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb %\usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \oddsidemargin=0in % Good for US Letter paper \evensidemargin=0in \textwidth=6.3in \topmargin=-0.5in \headheight=0.1in \headsep=0.1in \textheight=9.4in \pagestyle{empty} % No page numbers \begin{document} \enlargethispage*{1000 pt} \begin{center} {\Large \textbf{STA 305 Formulas}}\\ % Version 3 \vspace{1 mm} \end{center} \noindent \renewcommand{\arraystretch}{2.0} \begin{tabular}{lll} $Var(Y) = E\{(Y-\mu_y)^2\}$ & ~~~~~ & $Cov(Y,T) = E\{(Y-\mu_y)(T-\mu_t)\}$ \\ $cov(\mathbf{Y}) = E\left\{(\mathbf{Y}-\boldsymbol{\mu}_y)(\mathbf{Y}-\boldsymbol{\mu}_y)^\prime\right\}$ & ~~~~~ & $C(\mathbf{Y,T}) = E\left\{ (\mathbf{Y}-\boldsymbol{\mu}_y) (\mathbf{T}-\boldsymbol{\mu}_t)^\prime\right\}$ \\ \multicolumn{3}{l}{If $W=W_1+W_2$ with $W_1$ and $W_2$ independent, $W\sim\chi^2(\nu_1+\nu_2)$, $W_2\sim\chi^2(\nu_2)$ then $W_1\sim\chi^2(\nu_1)$} \\ $T = \frac{Z}{\sqrt{W/\nu}} \sim t(\nu)$ & ~~~~~ & $F = \frac{W_1/\nu_1}{W_2/\nu_2} \sim F(\nu_1,\nu_2)$ \\ \multicolumn{3}{l}{For the multivariate normal distribution \emph{only}, zero covariance implies independence.} \\ If $\mathbf{Y} \sim N_p(\boldsymbol{\mu}, \boldsymbol{\Sigma})$, then $\mathbf{AY} \sim N_q(\mathbf{A}\boldsymbol{\mu}, \mathbf{A}\boldsymbol{\Sigma}\mathbf{A}^\prime)$, & ~~~~~ & and $W = (\mathbf{Y}-\boldsymbol{\mu})^\prime \boldsymbol{\Sigma}^{-1}(\mathbf{Y}-\boldsymbol{\mu}) \sim \chi^2(p)$ \\ $Y_i = \beta_0 + \beta_1 x_{i1} + \cdots + \beta_{p-1} x_{i,p-1} + \epsilon_i$ & ~~~~~ & $\epsilon_1, \ldots, \epsilon_n$ independent $N(0,\sigma^2)$ \\ $\mathbf{Y} = \mathbf{X} \boldsymbol{\beta} + \boldsymbol{\epsilon}$ & ~~~~~ & $\boldsymbol{\epsilon} \sim N_n(\mathbf{0},\sigma^2\mathbf{I}_n)$ \\ $\widehat{\boldsymbol{\beta}} = (\mathbf{X}^\prime \mathbf{X})^{-1} \mathbf{X}^\prime \mathbf{Y} $ & ~~~~~ & $\widehat{\mathbf{Y}} = \mathbf{X}\widehat{\boldsymbol{\beta}} = \mathbf{HY}$, where $\mathbf{H} = \mathbf{X}(\mathbf{X}^\prime \mathbf{X})^{-1} \mathbf{X}^\prime $ \\ $\sum_{i=1}^n(Y_i-\overline{Y})^2 = \sum_{i=1}^n(Y_i-\widehat{Y}_i)^2 + \sum_{i=1}^n(\widehat{Y}_i-\overline{Y})^2$ & ~~~~~ & $SST=SSE+SSR$ and $R^2 = \frac{SSR}{SST}$ \\ $\widehat{\boldsymbol{\epsilon}} = \mathbf{Y} - \widehat{\mathbf{Y}}$ & ~~~~~ & $\widehat{\boldsymbol{\beta}} \sim N_p\left(\boldsymbol{\beta}, \sigma^2 (\mathbf{X}^\prime \mathbf{X})^{-1}\right)$ \\ $\widehat{\boldsymbol{\beta}}$ and $\widehat{\boldsymbol{\epsilon}}$ are independent under normality. & ~~~~~ & $SSE/\sigma^2 = \hat{\boldsymbol{\epsilon}}^\prime \hat{\boldsymbol{\epsilon}}/\sigma^2 \sim \chi^2(n-p)$ \\ $T = \frac{\mathbf{a}^\prime \widehat{\boldsymbol{\beta}}-\mathbf{a}^\prime \boldsymbol{\beta}} {\sqrt{MSE \, \mathbf{a}^\prime (\mathbf{X}^\prime \mathbf{X})^{-1}\mathbf{a}}} \sim t(n-p)$ & ~~~~~ & $F^* = \frac{(\mathbf{C}\widehat{\boldsymbol{\beta}}-\mathbf{t})^\prime (\mathbf{C}(\mathbf{X}^\prime \mathbf{X})^{-1}\mathbf{C}^\prime)^{-1} (\mathbf{C}\widehat{\boldsymbol{\beta}}-\mathbf{t})} {q \, MSE} \sim F(q,n-p,\lambda)$ \\ $F^* = \frac{SSR-SSR(reduced)}{q \, MSE} \sim F(q,n-p,\lambda)$ & ~~~~~ & where $MSE = \frac{SSE}{n-p}$ \\ $ \lambda = \frac{(\mathbf{C}\boldsymbol{\beta}-\mathbf{t})^\prime (\mathbf{C}(\mathbf{X}^\prime \mathbf{X})^{-1}\mathbf{C}^\prime)^{-1} (\mathbf{C}\boldsymbol{\beta}-\mathbf{t})} {\sigma^2}$ & ~~~~~ & \\ \multicolumn{3}{l}{Simple random sample of $n$ units from $N$ without replacement. $Z_i=1$ if unit $i$ is chosen, zero otherwise.} \\ $E(Z_i)= P(Z_i=1)= \frac{n}{N}$ & ~~~~~ & $\overline{y}_u = \frac{1}{N}\sum_{i=1}^N y_i$ \\ $\overline{y} = \frac{1}{n}\sum_{i=1}^N Z_i y_i$ & ~~~~~ & $S^2 = \frac{1}{N-1}\sum_{i=1}^N (y_i-\overline{y}_u)^2$ \\ $c = a_1\mu_1 + a_2\mu_2 + \cdots + a_p\mu_p$ & ~~~~~ & $\widehat{c} = a_1\overline{Y}_1 + a_2\overline{Y}_2 + \cdots + a_p\overline{Y}_p$ \\ $Pr\left\{ \cup_{j=1}^k A_j \right\} \leq \sum_{j=1}^k Pr\{A_j\}$ & ~~~~~ & Reject $H_0$ with a Scheff\'e test if $F_2 > \frac{q}{s}f_\alpha(q,n-p)$ \\ $n = \frac{\sigma^2 \, z_{\alpha/2}^2 \sum_{j=1}^p\frac{a_j^2}{f_j} }{m^2}$ & ~~~~~ & \renewcommand{\arraystretch}{1.0} \begin{tabular}{ccccc} $1-\alpha$ & 0.80 & 0.90 & 0.95 & 0.99 \\ $z_{\alpha/2}$ & 1.28 & 1.64 & 1.96 & 2.58 \end{tabular} \renewcommand{\arraystretch}{1.5} \end{tabular} \renewcommand{\arraystretch}{1.0} \vspace{2mm} \noindent \vspace{20mm} %\vspace{120mm} \noindent \begin{center}\begin{tabular}{l} \hspace{6.5in} \\ \hline \end{tabular}\end{center} This formula sheet was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistics, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/305s14} {\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/305s14}} \end{document} \begin{tabular}{ccccc} $1-\alpha$ & 0.80 & 0.90 & 0.95 & 0.99 \\ $z_{\alpha/2}$ & 1.28 & 1.64 & 1.96 & 2.58 \end{tabular} \multicolumn{3}{l}{Columns of $\mathbf{A}$ \emph{linearly independent} means that $\mathbf{Av} = \mathbf{0}$ implies $\mathbf{v} = \mathbf{0}$.} \\ $\boldsymbol{\Sigma} = \mathbf{CD} \mathbf{C}^\prime$