\documentclass[12pt]{article} % \pagestyle{empty} % No page numbers \usepackage{amssymb} % for \mathbb \begin{document} \begin{center} {\large Math tools } \vspace{3 mm} \end{center} Here are some useful theorems and definitions from advanced calculus and real analysis. Most of them are in the books on reserve in the math library. \paragraph{Leibnitz's Rule} Let $f(x,\theta)$ and $\frac{\partial f}{\partial \theta}$ be continuous in some region of the $(x,\theta)$ plane including $u_1 \leq x \leq u_2$ and $a \leq \theta \leq b$, and let $u_1(\theta)$ and $u_2(\theta)$ have continuous derivatives for $a \leq \theta \leq b$. Then \begin{displaymath} \frac{d}{d \theta} \int_{u_1(\theta)}^{u_2(\theta)} f(x,\theta) \, dx = \int_{u_1(\theta)}^{u_2(\theta)} \frac{\partial}{\partial \theta}f(x,\theta) \, dx \;+\; f(u_2(\theta),\theta) \frac{d}{d\theta} u_2(\theta) \;-\; f(u_1(\theta),\theta) \frac{d}{d\theta} u_1(\theta) \end{displaymath} for $a \leq \theta \leq b$. \paragraph{Taylor's Theorem} Let the $n$th derivative $f^{(n)}$ be continuous in $[a,b]$ and differentiable in $(a,b)$, with $x$ and $x_0$ in $(a,b)$. Then there exists a point $\xi$ between $x$ and $x_0$ such that \begin{eqnarray*} f(x) & = & f(x_0) \;+\; f^\prime(x_0)\,(x-x_0) \;+\; \frac{f^{\prime\prime}(x_0)(x-x_0)^2}{2!} \;+\; \ldots \;+\; \frac{f^{(n)}(x_0)(x-x_0)^n}{n!} \\ & + & \; \frac{f^{(n+1)}(\xi)(x-x_0)^{n+1}}{(n+1)!} \end{eqnarray*} where $R_n = \frac{f^{(n+1)}(\xi)(x-x_0)^{n+1}}{(n+1)!}$ is called the \emph{remainder term}. If $R_n \rightarrow 0$ as $n \rightarrow \infty$, the resulting infinite series is called the \emph{Taylor Series} for $f(x)$. There are other forms for the remainder term (with a different value of $\xi$) that sometimes prove useful. \paragraph{Fubini's Theorem} If a double integral (sum) converges absolutely, then the order of integration (summation) may be exchanged. If the quantity being integrated (or added up) is positive, then integration (summation) may always be exchanged, and if the result is $\infty$ in one direction, it is $\infty$ in the other direction too. See Fraser's \emph{Probability and Statistics} or a measure theory book like Royden's \emph{Real Analysis} for more precision. \paragraph{Uniform Convergence of Sums} Let $u_1(x),\, u_2(x), \, \ldots \,$ be a sequence of functions, and define $S_n(x) = \sum_{k=1}^n u_k(x)$. $\lim_{n \rightarrow \infty} S_n(s) = S(x)$ means $\forall \, \epsilon>0, \, \exists \, N \ni$ if $n>N$, then $|S_n(x)-S(x)|<\epsilon$. In general, $N$ will depend on $x$ as well as $\epsilon$. If $N$ only depends on $\epsilon$ (often for all $x$ in some interval) then we will say that $S_n(x)$ converges \emph{uniformly} to $S(x)$ (again, often for the $x$ in that interval). \paragraph{Tests for Uniform Convergence of Sums} \begin{description} \item[Weierstrass M test] If there is a sequence of positive constants $M_1,\, M_2, \, \ldots \,$ such that $|u_n(x)| \leq M_n$ in some interval for each $n$, and if $\sum_{n=1}^\infty M_n$ converges, then $\sum_{n=1}^\infty u_n(x)$ converges uniformly in that interval. \item[Dirichlet's test] If the sequence of constants $a_n\downarrow 0$, and there exist constants $N$ and $P$ such that for $a \leq x \leq b$, $|S_n(x)|
N$, then the series $\sum_{n=1}^\infty a_nu_n(x)$ is
uniformly convergent for $a \leq x \leq b$. \end{description}
\paragraph{Theorems about Uniform Convergence of Sums}
\begin{itemize}
\item If $u_n(x)$ is continuous in $[a,b]$ for $n=1, 2, \ldots$ , and if
$S_n(x)$ converges uniformly to $S(x)$ in $[a,b]$, then $\lim_{x \rightarrow
x_0} \sum_{n=1}^\infty u_n(x) = \sum_{n=1}^\infty \lim_{x \rightarrow x_0}
u_n(x) = \sum_{n=1}^\infty u_n(x_0)$, where $x_0 \in [a,b]$, and right or left
hand limits are used if $x_0$ is an endpoint of $[a,b]$.
\item If $u_n(x)$ has a continuous derivative in $[a,b]$ for $n=1, 2,
\ldots$ , and if $S_n(x)$ converges to $S(x)$ while $\sum_{n=1}^\infty
\frac{d}{dx} u_n(x)$ converges uniformly in $[a,b]$, then $\frac{d}{dx}
\sum_{n=1}^\infty u_n(x) = \sum_{n=1}^\infty \frac{d}{dx} u_n(x)$.
\end{itemize}
\paragraph{Power Series} A series of the form $ \sum_{n=0}^\infty a_n (x-a)^n$
is called a \emph{power series} in $x$. If it converges for $|x| a$ and $\theta \in
[\theta_1,\theta_2]$, then $\int_a^\infty f(x,\theta)\psi(x)\,dx$ is uniformly
convergent for $\theta \in [\theta_1,\theta_2]$. \end{description}
\paragraph{Theorems about Uniform Convergence of Integrals}
\begin{itemize}
\item If $f(x,\theta)$ is continuous for $x \geq a$ and $\theta \in
[\theta_1,\theta_2]$, and if $\int_a^\infty f(x,\theta)\,dx$ converges
uniformly for $\theta \in [\theta_1,\theta_2]$, then
$\lim_{\theta \rightarrow \theta_0} \int_a^\infty f(x,\theta)\,dx =
\int_a^\infty \lim_{\theta \rightarrow \theta_0} f(x,\theta)\,dx =
\int_a^\infty f(x,\theta_0)\,dx$, where $\theta_0 \in [\theta_1,\theta_2]$,
and right or left hand limits are used if $\theta_0$ is an endpoint of
$[a,b]$.
\item If $f(x,\theta)$ has a continuous derivative with respect to $\theta$
in $[\theta_1,\theta_2]$ for $x \geq a$ and if $\int_a^u f(x,\theta)\,dx$
converges to $\int_a^\infty f(x,\theta)\,dx$ while $\int_a^\infty
\frac{\partial}{\partial \theta}f(x,\theta)\,dx$ converges uniformly in
$[\theta_1,\theta_2]$, then $\frac{\partial}{\partial \theta}\int_a^\infty
f(x,\theta)\,dx = \int_a^\infty \frac{\partial}{\partial
\theta}f(x,\theta)\,dx$, provided $a$ does not depend on $\theta$. If it does,
use Leibnitz's rule.
\end{itemize}
\end{document}