% \documentclass[serif]{beamer} % Get Computer Modern math font. \documentclass[serif, handout]{beamer} % Handout mode to ignore pause statements \hypersetup{colorlinks,linkcolor=,urlcolor=red} \usefonttheme{serif} % Looks like Computer Modern for non-math text -- nice! \setbeamertemplate{navigation symbols}{} % Suppress navigation symbols % \usetheme{Berlin} % Displays sections on top \usetheme{Frankfurt} % Displays section titles on top: Fairly thin but still swallows some material at bottom of crowded slides %\usetheme{Berkeley} \usepackage{graphpap} % For graph paper in picture \usepackage[english]{babel} \usepackage{amsmath} % for binom % \usepackage{graphicx} % To include pdf files! % \definecolor{links}{HTML}{2A1B81} % \definecolor{links}{red} \setbeamertemplate{footline}[frame number] \mode % \mode{\setbeamercolor{background canvas}{bg=black!5}} % Comment this out for handout \title{Analysis of within-cases normal data\footnote{See last slide for copyright information.}} \subtitle{STA442/2101 Fall 2017} \date{} % To suppress date % Trying to shift big equations a bit to the left \setbeamersize{text margin left = 0.5cm} \begin{document} \begin{frame} \titlepage \end{frame} \begin{frame} \frametitle{Overview} \tableofcontents \end{frame} % I CUT OUT THE SAS PARTS: SEE 2012 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \section{Within Cases} \begin{frame}{Independent Observations}%{Subtitles are optional.} \begin{itemize} \item Most statistical models assume independent observations. \pause \item Sometimes the assumption of independence is unreasonable. \pause \item For example, times series and within cases designs. \end{itemize} \end{frame} \begin{frame}{Within Cases} \begin{itemize} \item A case contributes a value of the response variable for every value of a categorical explanatory variable. \pause \item As opposed to explanatory variables that are \emph{Between Cases}\pause, in which explanatory variables partition the sample. \pause \item It is natural to expect data from the same case to be correlated, \emph{not} independent. \pause \item For example, the same subject appears in several treatment conditions. \pause \item Hearing study: How does pitch affect our ability to hear faint sounds? \pause Subjects are presented with tones at a variety of different pitch and volume levels (in a random order). They press a key when they think they hear something. \pause \item A study can have both within and between cases factors. \end{itemize} \end{frame} \begin{frame}{You may hear terms like} \pause \begin{itemize} \item \textbf{Longitudinal}: The same variables are measured repeatedly over time. Usually lots of variables, including categorical ones, and large samples. If there's an experimental treatment, itÕs usually once at the beginning, like a surgery. Basically itÕs \emph{tracking} what happens over time. \pause \item \textbf{Repeated measures}: Usually, same subjects experience two or more experimental treatments. Usually quantitative explanatory variables and small samples. \end{itemize} \end{frame} \begin{frame} \frametitle{A simple model} \framesubtitle{Think of a problem solving study} \pause \begin{itemize} \item Each case contributes an individual shock that pushes all the data values from that case up or down by the same amount. \pause \item Observations from that case are not independent. \pause \item Example: Matched $t$-test. \pause \begin{eqnarray*} Y_{i,1} & = & \mu_1 + \tau_i + \epsilon_{i,1} \\ Y_{i,2} & = & \mu_2 + \tau_i + \epsilon_{i,2} \\ \pause d_i & = & (\mu_1-\mu_2) + (\epsilon_{i,1}-\epsilon_{i,2}) \end{eqnarray*} \pause \item The random shock from the case cancels. \pause \item Each case serves as its own control. \pause \item Power is much improved. \end{itemize} \end{frame} \begin{frame} \frametitle{Extending the idea} %\framesubtitle{} \begin{itemize} \item The random shock is a ``random effect." \pause \item The classical normal model approach to repeated measures is based on mixed (fixed and random effects) models. \end{itemize} \end{frame} \section{Random Effects} \begin{frame} \frametitle{General Mixed Linear Model} {\LARGE \begin{displaymath} \mathbf{y}~=~\mathbf{X} \boldsymbol{\beta} ~+~ \mathbf{Zb} ~+~\boldsymbol{\epsilon} \end{displaymath} } \pause \begin{itemize} \item $\mathbf{X}$ is an $n \times p$ matrix of known constants. \pause \item $\boldsymbol{\beta}$ is a $p \times 1$ vector of unknown constants. \pause \item $\mathbf{Z}$ is an $n \times q$ matrix of known constants. \pause \item $\mathbf{b} \sim N_q(\mathbf{0},\boldsymbol{\Sigma}_b)$ with $\boldsymbol{\Sigma}_b$ unknown but often diagonal. \pause \item $\boldsymbol{\epsilon} \sim N(\mathbf{0},\sigma^2 \mathbf{I}_n)$ , where $\sigma^2 > 0$ is an unknown constant. \end{itemize} \end{frame} \begin{frame} \frametitle{Random vs. fixed effects} {\LARGE \begin{displaymath} \mathbf{y}~=~\mathbf{X} \boldsymbol{\beta} ~+~ \mathbf{Zb} ~+~\boldsymbol{\epsilon} \end{displaymath} } \pause \begin{itemize} \item Elements of $\boldsymbol{\beta}$ are called fixed effects. \pause \item Elements of $\mathbf{b}$ are called random effects. \pause \item Models with both are called \emph{mixed}. \end{itemize} \end{frame} \begin{frame} \frametitle{Main application of random effects models} \pause %\framesubtitle{} A random factor is one in which the values of the factor are a random sample from a populations of values. \pause \begin{itemize} \item Randomly select 20 fast food outlets, survey customers in each about quality of the fries. \pause Outlet is a random effects factor with 20 values. \pause Amount of salt would be a fixed effects factor. \pause \item Randomly select 10 schools, test students at each school. \pause School is a random effects factor with 10 values. \pause \item Randomly select 15 naturopathic medicines for arthritis (there are quite a few)\pause, and then randomly assign arthritis patients to try them. \pause Drug is a random effects factor. \pause \item Randomly select 15 lakes. In each lake, measure how clear the water is at 20 randomly chosen points. \pause Lake is a random effects factor. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % \section{One Random Factor} \begin{frame} \frametitle{One random factor} \framesubtitle{A nice simple example} \pause \begin{itemize} \item Randomly select 5 farms. \pause \item Randomly select 10 cows from each farm\pause, milk them, and record the amount of milk from each one. \pause \item The one random factor is Farm. \pause \item Total $n=50$ \end{itemize} \pause The idea is that ``Farm" is a kind of random shock that pushes all the amounts of milk in a particular farm up or down by the same amount. \end{frame} \begin{frame} \frametitle{Farm is a random shock} \pause % \framesubtitle{White Whale Equation 25.38, p. 1047 (almost)} {\LARGE \begin{displaymath} Y_{ij} = \mu_. + \tau_i + \epsilon_{ij}, \end{displaymath} } \pause where \begin{itemize} \item[] $\mu_.$ is an unknown constant parameter. \pause \item[] $\tau_i \sim N(0,\sigma^2_\tau)$ \pause \item[] $\epsilon_{ij} \sim N(0,\sigma^2)$ \pause \item[] $\tau_i$ and $\epsilon_{ij}$ are all independent. \pause \item[] $\sigma^2_\tau \geq 0$ and $\sigma^2 > 0$ are unknown parameters. \pause \item[] $i=1, \ldots q$ and $j=1, \ldots, k$ \pause \item[] There are $q=5$ farms and $k=10$ cows from each farm. \end{itemize} \end{frame} \begin{frame} \frametitle{General Mixed Linear Model Notation} \begin{eqnarray*} Y_{ij} & = &\mu_. + \tau_i + \epsilon_{ij} \\ \mathbf{Y} & = & \mathbf{X} \boldsymbol{\beta} ~+~ \mathbf{Zb} ~+~\boldsymbol{\epsilon} \end{eqnarray*} \begin{displaymath} \left( \begin{array}{c} Y_{1,1} \\ Y_{1,2} \\ Y_{1,3} \\ \vdots \\ Y_{5,9} \\ Y_{5,10} \end{array} \right) ~=~ \left( \begin{array}{c} 1 \\ 1 \\ 1 \\ \vdots \\ 1 \\ 1 \end{array} \right) (\mu_.) ~+~ \left( \begin{array}{c c c c c} 1 & 0 & 0 & 0 & 0 \\ 1 & 0 & 0 & 0 & 0 \\ 1 & 0 & 0 & 0 & 0 \\ \vdots & \vdots & \vdots & \vdots & \vdots \\ 0 & 0 & 0 & 0 & 1 \\ 0 & 0 & 0 & 0 & 1 \\ \end{array} \right) \left( \begin{array}{c} \tau_1 \\ \tau_2 \\ \tau_3 \\\tau_4 \\ \tau_5 \end{array} \right) ~+~ \left( \begin{array}{c} \epsilon_{1,1} \\ \epsilon_{1,2} \\ \epsilon_{1,3} \\ \vdots \\ \epsilon_{5,9} \\ \epsilon_{5,10} \end{array} \right) \end{displaymath} \end{frame} \begin{frame} \frametitle{Distribution of $Y_{ij} = \mu_. + \tau_i + \epsilon_{ij}$} \pause %\framesubtitle{And associated statistics} \begin{itemize} \item $Y_{ij} \sim N(\mu_.,\sigma^2_\tau+\sigma^2)$ \pause \item $Cov(Y_{ij},Y_{i,j^\prime}) = \sigma^2_\tau$ for $j \neq j^\prime$ \pause \item $Cov(Y_{ij},Y_{i^\prime,j^\prime}) = 0$ for $i \neq i^\prime$ \pause \item Observations are not all independent. \pause \item Covariance matrix of $\mathbf{Y}$ is block diagonal: Matrix of matrices. \pause \begin{itemize} \item Off-diagonal matrices are all zeros. \pause \item Matrices on the diagonal ($k \times k$) have the \emph{compound symmetry} structure \pause \begin{displaymath} \left( \begin{array}{c c c} \sigma^2+\sigma^2_\tau & \sigma^2_\tau & \sigma^2_\tau \\ \sigma^2_\tau & \sigma^2+\sigma^2_\tau & \sigma^2_\tau \\ \sigma^2_\tau & \sigma^2_\tau & \sigma^2+\sigma^2_\tau \\ \end{array} \right) \end{displaymath} \pause (Except it's $10 \times 10$.) \end{itemize} \end{itemize} \end{frame} \begin{frame} \frametitle{Skipping lots of details} \framesubtitle{$Y_{ij} = \mu_. + \tau_i + \epsilon_{ij}$} \begin{itemize} \item Distribution theory. \item Components of variance. \item Testing $H_0: \sigma^2_\tau = 0$. \item Extension to mixed models. \item Nested effects. \item Choice of $F$ statistics based on expected mean squares. \end{itemize} \end{frame} \begin{frame} \frametitle{Repeated measures} \framesubtitle{Another way to describe \emph{within-cases}} \pause \begin{itemize} \item Sometimes an individual is tested under more than one condition, and contributes a response for each value of a categorical explanatory variable. \pause \item One can view ``subject" as just another random effects factor\pause, because subjects supposedly were randomly sampled. \pause \item Subject would be nested within sex, but might cross stimulus intensity. \pause \item This is the classical (old fashioned) way to analyze repeated measures. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \section{A modern approach} \begin{frame} \frametitle{Problems with the classical approach} \pause \begin{itemize} \item Normality matters in a serious way for the tests of random effects. \pause \item Sometimes (especially for complicated mixed models) a valid $F$-test for an effect of interest just doesn't exist. \pause \item When sample sizes are unbalanced, everything falls apart. \pause \begin{itemize} \item Mean squares are independent of $MSE$, but not of one another. \pause \item Chi-squared variables involve matrix inverses, and variance terms no longer cancel in numerator and denominator. \pause \item What about covariates? Now it gets really complicated. \end{itemize} % \item Standard large-sample methods are no help. \end{itemize} \end{frame} \begin{frame} \frametitle{A modern approach using the general mixed linear model} \begin{displaymath} \mathbf{y}~=~\mathbf{X} \boldsymbol{\beta} ~+~ \mathbf{Zb} ~+~\boldsymbol{\epsilon} \end{displaymath} \pause \begin{itemize} \item $\mathbf{y} \sim N_n(\mathbf{X}\boldsymbol{\beta}, \mathbf{Z} \boldsymbol{\Sigma}_b \mathbf{Z}^\top + \sigma^2 I_n)$ \pause \item Estimate $\boldsymbol{\beta}$ as usual with $(\mathbf{X}^\top \mathbf{X})^{-1} \mathbf{X}^\top \mathbf{Y}$. \pause \item Estimate $\boldsymbol{\Sigma}_b$ and $\sigma^2$ by maximum likelihood\pause, or by ``restricted" maximum likelihood. \end{itemize} \end{frame} \begin{frame} \frametitle{Restricted maximum likelihood} \pause \begin{displaymath} \mathbf{y}~=~\mathbf{X} \boldsymbol{\beta} ~+~ \mathbf{Zb} ~+~\boldsymbol{\epsilon} \end{displaymath} \pause %\vspace{5mm} \begin{itemize} \item Transform $\mathbf{y}$ by the $q \times n$ matrix $\mathbf{K}$. \pause \item The rows of $\mathbf{K}$ are orthoganal to the columns of $\mathbf{X}$, meaning \pause $\mathbf{KX} = \mathbf{0}$. \pause \item Then \begin{eqnarray*} \mathbf{Ky} & = & \mathbf{KX} \boldsymbol{\beta} + \mathbf{KZb} + \mathbf{K}\boldsymbol{\epsilon} \\ \pause & = & \mathbf{KZb} + \mathbf{K}\boldsymbol{\epsilon} \\ \pause & \sim & N(\mathbf{0}, \mathbf{KZ}\boldsymbol{\Sigma}_b\mathbf{Z^\top K^\top} + \sigma^2 \mathbf{KK}^\top) \pause \end{eqnarray*} \item Estimate $\boldsymbol{\Sigma}_b$ and $\sigma^2$ by maximum likelihood. \pause \item A big theorem says the result does not depend on the choice of $\mathbf{K}$. \end{itemize} \end{frame} \begin{frame} \frametitle{Nice results from restricted maximum likelihood} %\framesubtitle{} \begin{itemize} \item $F$ statistics that correspond to the classical ones for balanced designs. \item For unbalanced designs, ``$F$ statistics" that are actually excellent $F$ approximations --- not quite $F$, but very close. \item R's \texttt{nlme4} package and SAS \texttt{proc mixed}. % \item Like $cov(\boldsymbol{\epsilon})$ can be block diagonal, with useful structures \ldots \end{itemize} \end{frame} \section{Random Intercept Models} \begin{frame} \frametitle{Random Intercept Models} %\framesubtitle{} \begin{itemize} \item Drop the complicated classical mixed model machinery. \pause \item Retain the basic good idea. \pause \item Each subject (person, case) contributes an individual shock that pushes all the data values from that person up or down by the same amount. \pause Because cases are randomly sampled (pretend), it's a random shock. \pause \item This is still a mixed model, but it's much simpler. \end{itemize} \end{frame} \begin{frame} \frametitle{Example: The Noise study} \pause %\framesubtitle{} Females and males carry out a discrimination task under 3 levels of background noise. \pause Each subject contributes a discrimination score at each noise level. \pause \begin{itemize} \item It's a $2 \times 3$ factorial design. \pause \item Sex is between, noise is within. \pause \item Model: \end{itemize} For $i = 1, \ldots, n$ and $j = 1, \ldots, 3$, \pause \begin{eqnarray*} Y_{i,j} & = & \beta_0 + \beta_1s_i + \beta_2 d_{i,j,1} + \beta_3 d_{i,j,2} + \beta_4 s_i d_{i,j,1} + \beta_5 s_i d_{i,j,2} + b_i + \epsilon_{i,j} \\ \pause & = & (\beta_0 + b_i) + \beta_1s_i + \beta_2 d_{i,j,1} + \beta_3 d_{i,j,2} + \beta_4 s_i d_{i,j,1} + \beta_5 s_i d_{i,j,2} + \epsilon_{i,j} \pause \end{eqnarray*} You could say that the intercept is $N(\beta_0,\sigma^2_b). $ \end{frame} \begin{frame} \frametitle{In matrix form: $\mathbf{y}=\mathbf{X} \boldsymbol{\beta} + \mathbf{Zb} +\boldsymbol{\epsilon}$} \framesubtitle{For 2 females and 2 males} \pause $Y_{i,j} = \beta_0 + \beta_1s_i + \beta_2 d_{i,j,1} + \beta_3 d_{i,j,2} + \beta_4 s_i d_{i,j,1} + \beta_5 s_i d_{i,j,2} + b_i + \epsilon_{i,j} $ \pause {\scriptsize \begin{displaymath} \left(\begin{array}{c} Y_{11} \\ Y_{12} \\ Y_{13} \\ Y_{21} \\ Y_{22} \\ Y_{23} \\ Y_{31} \\ Y_{32} \\ Y_{33} \\ Y_{41} \\ Y_{42} \\ Y_{43} \end{array}\right) = \left(\begin{array}{rrrrrr} 1 & 1 & 1 & 0 & 1 & 0 \\ 1 & 1 & 0 & 1 & 0 & 1 \\ 1 & 1 & -1 & -1 & -1 & -1 \\ 1 & 1 & 1 & 0 & 1 & 0 \\ 1 & 1 & 0 & 1 & 0 & 1 \\ 1 & 1 & -1 & -1 & -1 & -1 \\ 1 & -1 & 1 & 0 & -1 & 0 \\ 1 & -1 & 0 & 1 & 0 & -1 \\ 1 & -1 & -1 & -1 & 1 & 1 \\ 1 & -1 & 1 & 0 & -1 & 0 \\ 1 & -1 & 0 & 1 & 0 & -1 \\ 1 & -1 & -1 & -1 & 1 & 1 \\ \end{array}\right) \left(\begin{array}{c} \beta_0 \\ \beta_1 \\ \beta_2 \\ \beta_3 \\ \beta_4 \\ \beta_5 \end{array}\right) + ~~ \cdots \end{displaymath} } % End size \end{frame} \begin{frame} \frametitle{Continuing $\mathbf{y}=\mathbf{X} \boldsymbol{\beta} + \mathbf{Zb} +\boldsymbol{\epsilon}$} % \framesubtitle{For 2 females and 2 males} $Y_{i,j} = \beta_0 + \beta_1s_i + \beta_2 d_{i,j,1} + \beta_3 d_{i,j,2} + \beta_4 s_i d_{i,j,1} + \beta_5 s_i d_{i,j,2} + b_i + \epsilon_{i,j} $ \pause {\scriptsize \begin{displaymath} \left(\begin{array}{c} Y_{11} \\ Y_{12} \\ Y_{13} \\ Y_{21} \\ Y_{22} \\ Y_{23} \\ Y_{31} \\ Y_{32} \\ Y_{33} \\ Y_{41} \\ Y_{42} \\ Y_{43} \end{array}\right) = \mathbf{X} \boldsymbol{\beta} + \pause \left(\begin{array}{cccc} 1 & 0 & 0 & 0 \\ 1 & 0 & 0 & 0 \\ 1 & 0 & 0 & 0 \\ 0 & 1 & 0 & 0 \\ 0 & 1 & 0 & 0 \\ 0 & 1 & 0 & 0 \\ 0 & 0 & 1 & 0 \\ 0 & 0 & 1 & 0 \\ 0 & 0 & 1 & 0 \\ 0 & 0 & 0 & 1 \\ 0 & 0 & 0 & 1 \\ 0 & 0 & 0 & 1 \\ \end{array}\right) \left(\begin{array}{c} b_1 \\ b_2 \\ b_3 \\ b_4 \end{array}\right) \pause + \left(\begin{array}{c} \epsilon_{11} \\ \epsilon_{12} \\ \epsilon_{13} \\ \epsilon_{21} \\ \epsilon_{22} \\ \epsilon_{23} \\ \epsilon_{31} \\ \epsilon_{32} \\ \epsilon_{33} \\ \epsilon_{41} \\ \epsilon_{42} \\ \epsilon_{43} \end{array}\right) \end{displaymath} } % End size where $cov(\mathbf{b}) = \sigma^2_b\mathbf{I}_4$ \end{frame} \begin{frame} \frametitle{Covariance matrix of $\mathbf{y}$ is block diagonal} \pause %\framesubtitle{} With four blocks on the diagonal that look like this: \pause {\LARGE \begin{displaymath} \left( \begin{array}{c c c} \sigma^2+\sigma^2_b & \sigma^2_b & \sigma^2_b \\ \sigma^2_b & \sigma^2+\sigma^2_b & \sigma^2_b \\ \sigma^2_b & \sigma^2_b & \sigma^2+\sigma^2_b \\ \end{array} \right) \end{displaymath} \pause } % End size \begin{itemize} \item This structure is called \emph{compound symmetry}. \pause \item Pause to reflect. \pause \item I like it for lab studies\pause, especially with several responses to the experimental conditions, in a different random order for each subject. \pause \item For longitudinal studies, not so much. \pause \item It implies $Cov(y_t,y_{t+1}) = Cov(y_t,y_{t+100})$. \end{itemize} \end{frame} \section{\texttt{lme4}} \begin{frame} \frametitle{\texttt{lme4}} \framesubtitle{Linear Mixed Effects Models} \pause \begin{itemize} \item Download and install the package. \item The \texttt{lmer} function acts like an extended version of \texttt{lm}. \item We will use just a fraction of its capabilities. \end{itemize} \end{frame} \begin{frame} \frametitle{Syntax}\pause % \framesubtitle{As I currently understand it.} \texttt{noise1 = lmer(discrim} $\sim$ \texttt{sex*noise + (1 | ident))} \pause \begin{itemize} \item Response variable $\sim$ Fixed effects + (Random effects) \pause \item \texttt{sex*noise} is short for \texttt{sex + noise + sex:noise}. \pause \item Specification of fixed effects is like \texttt{lm}. \pause \item Specification of random effects looks like ($A|B$). \pause \begin{itemize} \item $A$ is \texttt{lm}-like syntax for the random effects. \pause It creates the $\mathbf{Z}$ matrix in $\mathbf{y}=\mathbf{X} \boldsymbol{\beta} + \mathbf{Zb} +\boldsymbol{\epsilon}$. \pause \item With a new independent copy for every value of $B$. \end{itemize} \end{itemize} \end{frame} \begin{frame} \frametitle{Another example} \framesubtitle{Compare \texttt{noise1 = lmer(discrim} $\sim$ \texttt{sex*noise + (1 | ident))}} \pause \begin{itemize} \item Reaction time tested every day for days 0-9 of sleep deprivation. \pause \item Ten observations on each of 18 subjects. \pause \item Roughly linear, and each subject has her own slope and intercept. \pause \end{itemize} \begin{center} \texttt{Reaction} $\sim$ \texttt{Days + (Days | Subject)} \end{center} \pause Random slope and intercept. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Copyright Information} This slide show was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistics, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/appliedf17} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/appliedf17}} \end{frame} \end{document} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{} %\framesubtitle{} \begin{itemize} \item \item \item \end{itemize} \end{frame} {\LARGE \begin{displaymath} \end{displaymath} } %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{displaymath} \begin{array}{ll} Y_{i,1} = \mu_1 + \tau_i + \epsilon{i,1} & Y_{i,2} = \mu_1 + \tau_i + \epsilon{i,2} \\ Var(\tau_i) = \sigma^2_\tau & Var(\epsilon_{i,j}) = \sigma^2_\epsilon \mbox{ independent}\\ Var(Y_{i,1}) = \sigma^2_\tau + \sigma^2_\epsilon & Var(Y_{i,2}) = \sigma^2_\tau + \sigma^2_\epsilon \end{displaymath} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%