\documentclass[12pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb %\usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} %\usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} % Good for US Letter paper \topmargin=-0.75in \textheight=9.5in \usepackage{fancyhdr} \renewcommand{\headrulewidth}{0pt} % Otherwise there's a rule under the header \setlength{\headheight}{15.2pt} \fancyhf{} \pagestyle{fancy} \cfoot{Page \thepage {} of 2} % %\pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{flushright} Name \underline{\hspace{60mm}} \\ $\,$ \\ Student Number \underline{\hspace{60mm}} \end{flushright} \vspace{2mm} \begin{center} {\Large \textbf{STA 312 f2023 Quiz 1}}\\ \vspace{1 mm} \end{center} \noindent Let the random variable $X$ have an exponential distribution (see formula sheet on reverse), and let $Y=a \, X$, where the constant $a>0$. Derive the probability density function of $Y$. Show your work. Do not forget to indicate where the density is non-zero. \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ~ \vspace{100mm} \begin{center} {\Large \textbf{STA 312f23 Formulas}}\\ % \vspace{1 mm} \end{center} \renewcommand{\arraystretch}{1.75} %%%%%%%%%%%%%%%%%%%%%%%%%%%% Common Distributions %%%%%%%%%%%%%%%%%%%%%%%%%% \noindent \begin{tabular}{|l|l|c|c|} \hline \textbf{Distribution} & \textbf{Density or probability mass function} & \textbf{Expected value} & \textbf{Variance} \\ \hline Bernoulli($\theta$) & $p(x) = \theta^x(1-\theta)^{1-x}$ for $x=0,1$ & $\theta$ & $\theta(1-\theta)$\\ \hline Binomial($n,\theta$) & $p(k) = \binom{n}{k}\theta^k(1-\theta)^{n-k}$ for $k = 0, 1, \ldots, n$ & $n\theta$ & $n\theta(1-\theta)$ \\ \hline Geometric($\theta$) & $p(k) = (1-\theta)^{k-1}\,\theta$ for $k = 1, 2, \ldots$ & $1/\theta$ & $(1-\theta)/\theta^2$ \\ \hline Poisson($\lambda$) & $p(k) = \frac{e^{-\lambda}\, \lambda^k}{k!}$ for $k = 0, 1, \ldots $ & $\lambda$ & $\lambda$ \\ \hline Exponential($\lambda$) & $f(x) = \lambda e^{-\lambda x}$ for $x \geq 0$ % \hspace{5mm} $F(x) = 1-e^{-\lambda x}$ for $x \geq 0$ & $1/\lambda$ & $1/\lambda^2$ \\ \hline Gamma($\alpha,\lambda$) & $f(x) = \frac{\lambda^\alpha}{\Gamma(\alpha)} e^{-\lambda x} \, x^{\alpha-1}$ for $x \geq 0$ & $\alpha/\lambda$ & $\alpha/\lambda^2$ \\ \hline Normal($\mu,\sigma^2$) & $f(x) = \frac{1}{\sigma \sqrt{2\pi}}\exp - \left\{{\frac{(x-\mu)^2}{2\sigma^2}}\right\}$ & $\mu$ & $\sigma^2$ \\ \hline Chi-squared($\nu$) & $f(x) = \frac{1}{2^\frac{\nu}{2}\Gamma(\frac{\nu}{2})} e^{-\frac{x}{2}} \, x^{\frac{\nu}{2}-1}$ for $x \geq 0$ & $\nu$ & $2\nu$ \\ \hline \end{tabular} \vspace{2mm} \begin{tabular}{lcl} %%%%%%%%%%%%%%%%%%%%%%%%%%%% Moments %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% $E(X) \stackrel{def}{=} \sum_x x \, p_x(x)$ or $\int_{-\infty}^\infty x \, f_x(x) \, dx$ & ~ & $E(g(X)) = \sum_x g(x) \, p_x(x)$ or $\int_{-\infty}^\infty g(x) \, f_x(x) \, dx$ \\ $Var(X) \stackrel{def}{=} E\left( (X-\mu)^2 \right)$ & ~ & $Var(X) = E(X^2)-[E(X)]^2$ \\ If $X \sim N(\mu,\sigma^2)$, then $\frac{X-\mu}{\sigma} \sim N(0,1)$. & ~ & If $Z \sim N(0,1)$, then $Z^2 \sim \chi^2(1)$. \\ \end{tabular} \end{document}