\documentclass[serif]{beamer} % Serif for Computer Modern math font. \documentclass[serif, handout]{beamer} % Handout mode to ignore pause statements % \hypersetup{colorlinks,linkcolor=,urlcolor=red} \usefonttheme{serif} % Looks like Computer Modern for non-math text -- nice! \setbeamertemplate{navigation symbols}{} % Suppress navigation symbols % \usetheme{Berlin} % Displays sections on top \usetheme{Frankfurt} % Displays section titles on top: Fairly thin but still swallows some material at bottom of crowded slides %\usetheme{Berkeley} \usepackage[english]{babel} \usepackage{amsmath} % for binom % \usepackage{graphicx} % To include pdf files! % \definecolor{links}{HTML}{2A1B81} % \definecolor{links}{red} \setbeamertemplate{footline}[frame number] \mode \title{Latent Model Rules\footnote{See last slide for copyright information.}} \subtitle{STA2053 Fall 2022} \date{} % To suppress date \begin{document} \begin{frame} \titlepage \end{frame} \begin{frame} \frametitle{The two-stage model: $cov(\mathbf{d}_i)=\boldsymbol{\Sigma}$} \framesubtitle{All variables are centered} {\LARGE \begin{eqnarray*} \mathbf{y}_i &=& \boldsymbol{\beta} \mathbf{y}_i + \boldsymbol{\Gamma} \mathbf{x}_i + \boldsymbol{\epsilon}_i \\ \mathbf{F}_i &=& \left( \begin{array}{c} \mathbf{x}_i \\ \mathbf{y}_i \end{array} \right) \\ \mathbf{d}_i &=& \boldsymbol{\Lambda}\mathbf{F}_i + \mathbf{e}_i \end{eqnarray*} } % End size \begin{itemize} \item $\mathbf{x}_i$ is $p \times 1$, $\mathbf{y}_i$ is $q \times 1$, $\mathbf{d}_i$ is $k \times 1$. \item $cov(\mathbf{x}_i)=\boldsymbol{\Phi}_x$, $cov(\boldsymbol{\epsilon}_i)=\boldsymbol{\Psi}$ \item $cov(\mathbf{F}_i) = cov\left( \begin{array}{c} \mathbf{x}_i \\ \mathbf{y}_i \end{array} \right) =\boldsymbol{\Phi} = \left( \begin{array}{c c} \boldsymbol{\Phi}_{11} & \boldsymbol{\Phi}_{12} \\ \boldsymbol{\Phi}_{12}^\top & \boldsymbol{\Phi}_{22} \\ \end{array} \right)$ \item $cov(\mathbf{e}_i)=\boldsymbol{\Omega}$ \end{itemize} \end{frame} \begin{frame} \frametitle{Identify parameter matrices in two steps} \framesubtitle{It does not really matter which one you do first.} %{\footnotesize \begin{itemize} \item $\mathbf{y}_i = \boldsymbol{\beta} \mathbf{y}_i + \boldsymbol{\Gamma} \mathbf{x}_i + \boldsymbol{\epsilon}_i$ \begin{itemize} \item[] $cov(\mathbf{x}_i)=\boldsymbol{\Phi}_x$, $cov(\boldsymbol{\epsilon}_i)=\boldsymbol{\Psi}$ \end{itemize} \item $\mathbf{d}_i = \boldsymbol{\Lambda}\mathbf{F}_i + \mathbf{e}_i $ \begin{itemize} \item[] $cov(\mathbf{F}_i)=\boldsymbol{\Phi}$, $cov(\boldsymbol{e}_i)=\boldsymbol{\Omega}$ \end{itemize} \end{itemize} %} % End size \vspace{3mm} \hrule \vspace{3mm} %{\Large \begin{enumerate} \item \emph{Latent model}: Show $\boldsymbol{\beta}$, $\boldsymbol{\Gamma}$, $\boldsymbol{\Phi}_x$ and $\boldsymbol{\Psi}$ can be recovered from $\boldsymbol{\Phi} = cov\left( \begin{array}{c} \mathbf{x}_i \\ \mathbf{y}_i \end{array} \right)$. \item \emph{Measurement model}: Show $\boldsymbol{\Phi}$ and $\boldsymbol{\Omega}$ can be recovered from $\boldsymbol{\Sigma}=cov(\mathbf{d}_i)$. \end{enumerate} This means all the parameters can be recovered from $\boldsymbol{\Sigma}$. %} % End size \end{frame} \begin{frame} \frametitle{Latent Model Rules} \begin{itemize} \item $\mathbf{y}_i = \boldsymbol{\beta} \mathbf{y}_i + \boldsymbol{\Gamma} \mathbf{x}_i + \boldsymbol{\epsilon}_i$ \pause \item Here, identifiability means that the parameters $\boldsymbol{\beta}$, $\boldsymbol{\Gamma}$, $\boldsymbol{\Phi}_x$ and $\boldsymbol{\Psi}$ are functions of $cov(\mathbf{F}_i)=\boldsymbol{\Phi}$. \end{itemize} \end{frame} \begin{frame} \frametitle{Regression Rule} \framesubtitle{Someimes called the Null Beta Rule} Suppose \begin{itemize} \item No endogenous variables influence other endogenous variables. \pause \item[] \item $\mathbf{y}_i = \boldsymbol{\Gamma} \mathbf{x}_i + \boldsymbol{\epsilon}_i$ \pause \item Of course $cov(\mathbf{x}_i, \boldsymbol{\epsilon}_i) = \mathbf{0}$, always. \pause \item $\boldsymbol{\Psi} = cov(\boldsymbol{\epsilon}_i)$ need not be diagonal. \pause \end{itemize} \vspace{5mm} Then $\boldsymbol{\Gamma}$ and $\boldsymbol{\Psi}$ are identifiable. \end{frame} \begin{frame} \frametitle{Acyclic Rule} \framesubtitle{Acyclic models are frequently called ``recursive."} \pause Parameters of the Latent Variable Model are identifiable if the model is acyclic (no feedback loops through straight arrows) and the following conditions hold. \pause \begin{itemize} \item Organize the variables that are not error terms into sets. Set 0 consists of all the exogenous variables. \item For $j=1,\ldots ,m$, each endogenous variable in set $j$ is influenced by at least one variable in set $j-1$, and also possibly by variables in earlier sets. \item Error terms may be correlated within sets, but not between sets. \pause \end{itemize} \vspace{5mm} Proof: Repeated application of the Regression Rule. \end{frame} \begin{frame} \frametitle{An Acyclic model} %\framesubtitle{} \begin{center} \includegraphics[width=4.5in]{AcyclicHand} \end{center} \end{frame} \begin{frame} \frametitle{Brand awareness model} %\framesubtitle{} \begin{center} \includegraphics[width=3.6in]{Doughnut5} \end{center} \end{frame} \begin{frame} \frametitle{Parameters of this model are just identifiable} \framesubtitle{Example from Ch. 5 of Duncan's \emph{Introduction to Structural Equation Models}} \begin{center} \includegraphics[width=4in]{Duncan-Just-Ident} \end{center} Shows that the acyclic rule is sufficient but not necessary. \end{frame} \begin{frame} \frametitle{The Pinwheel Model} \framesubtitle{Parameters are identifiable} \begin{center} \includegraphics[width=2.5in]{Pinwheel} \end{center} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Covariance matrix for the pinwheel model} %\framesubtitle{} \begin{columns} % Use Beamer's columns to use more of the margins \column{1.15\textwidth} {\tiny $\left(\begin{array}{cccc} \phi & -\frac{\gamma \phi}{\beta_{1} \beta_{2} \beta_{3} - 1} & -\frac{\beta_{2} \gamma \phi}{\beta_{1} \beta_{2} \beta_{3} - 1} & -\frac{\beta_{2} \beta_{3} \gamma \phi}{\beta_{1} \beta_{2} \beta_{3} - 1} \\ & \frac{\beta_{1}^{2} \beta_{3}^{2} \psi_{2} + \gamma^{2} \phi + \beta_{1}^{2} \psi_{3} + \psi_{1}}{{\left(\beta_{1} \beta_{2} \beta_{3} - 1\right)}^{2}} & \frac{\beta_{2} \gamma^{2} \phi + \beta_{1}^{2} \beta_{2} \psi_{3} + \beta_{1} \beta_{3} \psi_{2} + \beta_{2} \psi_{1}}{{\left(\beta_{1} \beta_{2} \beta_{3} - 1\right)}^{2}} & \frac{\beta_{2} \beta_{3} \gamma^{2} \phi + \beta_{1} \beta_{3}^{2} \psi_{2} + \beta_{2} \beta_{3} \psi_{1} + \beta_{1} \psi_{3}}{{\left(\beta_{1} \beta_{2} \beta_{3} - 1\right)}^{2}} \\ & & \frac{\beta_{2}^{2} \gamma^{2} \phi + \beta_{1}^{2} \beta_{2}^{2} \psi_{3} + \beta_{2}^{2} \psi_{1} + \psi_{2}}{{\left(\beta_{1} \beta_{2} \beta_{3} - 1\right)}^{2}} & \frac{\beta_{2}^{2} \beta_{3} \gamma^{2} \phi + \beta_{2}^{2} \beta_{3} \psi_{1} + \beta_{1} \beta_{2} \psi_{3} + \beta_{3} \psi_{2}}{{\left(\beta_{1} \beta_{2} \beta_{3} - 1\right)}^{2}} \\ & & & \frac{\beta_{2}^{2} \beta_{3}^{2} \gamma^{2} \phi + \beta_{2}^{2} \beta_{3}^{2} \psi_{1} + \beta_{3}^{2} \psi_{2} + \psi_{3}}{{\left(\beta_{1} \beta_{2} \beta_{3} - 1\right)}^{2}} \end{array}\right)$ } % End size \end{columns} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Copyright Information} This slide show was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \href{http://www.utstat.toronto.edu/~brunner/oldclass/2053f22} {\small\texttt{http://www.utstat.toronto.edu/brunner/oldclass/2053f22}} \end{frame} \end{document} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{} %\framesubtitle{} \begin{itemize} \item \item \item \end{itemize} \end{frame} {\LARGE \begin{displaymath} \end{displaymath} } %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Pinwheel model with sagemath: Call the sage notebook pinwheel3 # load sem package, set up Beta, Gamma, Phi, Psi sem = 'http://www.utstat.toronto.edu/~brunner/openSEM/sage/sem.sage' load('~/sem.sage') # Local version BETA = ZeroMatrix(3,3) BETA[0,2] = var('beta1'); BETA[1,0] = var('beta2') BETA[2,1] = var('beta3'); show(BETA) GAMMA = ZeroMatrix(3,1); GAMMA[0,0] = var('gamma'); show(GAMMA) PHI = ZeroMatrix(1,1); PHI[0,0] = var('phi'); show(PHI) PSI = DiagonalMatrix(3,'psi'); show(PSI) # Check the determinant of (I-beta). It can't be zero. det(IdentityMatrix(3)-BETA) # Calculate the covariance matrix. Sigma = NoGammaCov(BETA,PSI) show(Sigma) # Covariance structure equations eqns = SetupEqns(Sigma) for item in eqns: show(item) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%