-
Notifications
You must be signed in to change notification settings - Fork 0
/
NotesSheetExam2.tex
188 lines (187 loc) · 8.66 KB
/
NotesSheetExam2.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
\documentclass[letter, 12pt]{article}
\usepackage[margin=0.6in,paperwidth=8.5in, paperheight=11in]{geometry}
\usepackage{graphicx,longtable, stmaryrd, ulem, setspace,listings,enumerate,tikz,fancyhdr,multicol, hyperref, calrsfs, float,ifpdf, url, amsmath, amssymb, comment,color,xcolor,dsfont}
\usepackage[mathscr]{euscript}
\pagestyle{fancy}
\renewcommand{\headrulewidth}{0pt}
\providecommand{\e}[1]{\ensuremath{\times 10^{#1}}}
\newcommand{\red}[1]{\textcolor{red}{#1}}
\newcommand{\blue}[1]{\textcolor{blue}{#1}}
\newcommand{\green}[1]{\textcolor{green}{#1}}
\newcommand{\grey}[1]{\textcolor{gray}{#1}}
\newcommand{\ohm}{$\Omega$}
\DeclareMathOperator{\Error}{Error}
\allowdisplaybreaks
\graphicspath {{figures/}}
\usepackage[utf8]{inputenc}
\usepackage{listings}
\usepackage{color}
\definecolor{dkgreen}{rgb}{0,0.6,0}
\definecolor{gray}{rgb}{0.5,0.5,0.5}
\definecolor{mauve}{rgb}{0.58,0,0.82}
\lstset{frame=tb,
language=R,
aboveskip=3mm,
belowskip=3mm,
showstringspaces=false,
columns=flexible,
basicstyle={\small\ttfamily},
numbers=none,
numberstyle=\tiny\color{gray},
keywordstyle=\color{blue},
commentstyle=\color{dkgreen},
stringstyle=\color{mauve},
breaklines=true,
breakatwhitespace=true,
tabsize=3
}
\begin{document}
\begin{center}
STAT 4033
\end{center}
Exam 2 Notes \hfill Name: \uline{Cooper Morris}
\begin{multicols}{2}
\textbf{\uline{C2S4:}}\\
\textbf{Random Variable:} X, assigns a numeric value to outcomes in a sample space.\\
\textbf{Discrete Random Variable:} Possible values are countable, sum of two dice.\\
\textbf{Continuous Random Variable:} Possible values continuous, weight of random person.\\
\textbf{Probability Mass Function:} \(P(X=x) = p(x)\) \\
\textbf{Cumulative Distribution Function:} \(P(X\leq x) \) Denoted by \(F(x)\) \\
\(0\leq p(x) \leq 1\)\\
\(\sum_xp(x) = 1\)\\
\textbf{Support:} the set of values such that \(f(x) > 0 \)\\
\textbf{Percentile:} \(x_p\) such that \(P(X\leq x_p) = \frac{p}{100}\)\\
\(F(x_p) = \int_{-\infty}^{x_p} f(x)\,dx = \frac{p}{100}\)\\
\textbf{\uline{C2S6:}}\\
\textbf{Jointly Distributed Random Variable:} Two or more random variables that are related when considering ``individuals" in a population.\\
\textbf{Joint Probability Mass Function:} \(P(X=x, Y=y) = p(x,y)\) \(P(X=x \cap Y=y)\) \\
\textbf{Marginal Probability Mass Function:} \(P_x(x) = \sum_y p(x,y)\)\\
\(f_x(x) = \int_{-\infty}^\infty f(x,y)\,dy\)\\
Summing or integrating out the opposite variable gives you the marginal probability mass function for the variable you desire.\\
\textbf{\uline{C4S1: The Bernoulli Distribution}}\\
X \(\sim\) Bernoulli(P)\\
P is the probability of a success happening.
\(P(X=x) = p^X(1-p^{1-x})\)\\
\(\mathds{E}[X] = \mu_X = p\)\\
\(\textnormal{Var}(X) = p\cdot(1-p)\)\\
\textbf{\uline{C4S2: The Binomial Distribution}}\\
Chaining together \textit{n} independent Bernoulli trials. Think of a for loop running \textit{n} times.
\(X \sim \textnormal{Bin}(n, p)\)\\
\(p(x) = {n \choose x} p^x(1-p)^{n-x}\)\\
\(\mathds{E}[X] = \mu_X = n\cdot p\)\\
\(\textnormal{Var}(X) = n\cdot p\cdot(1-p)\)\\
Mass function for \(X \sim \textnormal{Bin}(n, p)\):\\
\(P(x) = P(X = x) = {n \choose x} \cdot p^x \cdot (1-p)^{n-x}\)\\
Table A1 gives probabilities for the Binomial distribution in the form \(P(X \leq x)\)\\
\(\mathds{E}[X] = \mu_X = n\cdot p\)\\
\(\sigma^2_X = \textnormal{Var}(X) = n\cdot p \cdot (1-p)\)\\
\(\sigma_X = \sqrt{n\cdot p \cdot (1-p)}\)\\
\(\hat{p} = \frac{x}{n} = \frac{\sum Y_i}{n}\) where \(\hat{p}\) is an estimator of \textit{p}.\\
\(\mathds{E}[\hat{p}] = \mu_{\hat{p}} = p\)\\
\(\textnormal{Var}(\hat{p}) = \frac{p\cdot (1-p)}{n}\)\\
\textbf{\uline{C4S3: The Poisson Distribution}}\\
Taking the limit of the binomial distribution\\
\(\lambda = n \cdot p\)\\
\(X \sim \textnormal{Poisson}(\lambda)\)\\
\(p(x) = \frac{e^{-\lambda}\cdot \lambda^x}{x!}\)\\
\(\mathds{E}[X] = \mu_X = \lambda\)\\
\(\textnormal{Var}(X) = \lambda\)\\
Estimating over \textit{t} units over time or space:\\
\(\hat{\lambda} = \frac{x}{t}\)\\
\(\mathds{E}[\hat{\lambda}] = \lambda\)\\
\(\textnormal{Var}(\hat{\lambda}) = \frac{\lambda}{t}\)\\
\textbf{\uline{C4S4: The Hypergeometric Distribution}}\\
A finite population of size \textit{N}, divided into two groups. \textit{R} items in group one and \textit{N-R} in group two. \textit{n} items selected without replacement.\\
\(X \sim H(N, R, n)\)\\
Probability Mass Function:\\
\(p(x) = \frac{{R \choose X}{{N-R} \choose {n-x}}}{{N \choose n}}\)\\
\(\mathds{E}[X] = \mu_X = n\cdot \frac{R}{N}\)\\
\(\textnormal{Var}(X) = n\frac{R}{N}\cdot(1-\frac{R}{N})\cdot(\frac{N-n}{N-1})\)\\
\textbf{\uline{Geometric Distribution:}}\\
A sequence of independent Bernoulli trials where \textit{p} is constant and \textit{X} is the number of trials up to and including the first success. Think of a while loop with the first success as the terminating condition.\\
\(X \sim \textnormal{Geom}(P)\)\\
Probability Mass Function:\\
\(p(x) = (1-p)^{x-1} \cdot p\)\\
\(\mathds{E}[X] = \mu_X = \frac{1}{p}\)\\
\(\textnormal{Var}(X) = \frac{1-p}{p^2}\)\\
\(\sigma_X = \sqrt{\frac{1-p}{p^2}}\)\\
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\end{multicols}
\newpage
\begin{multicols}{2}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\textbf{Discrete Random Variables:}\\
\(\mu_x = \mathds{E}[X] = \sum_xx\cdot p(x)\)\\
\(\sigma^2 = \sum_xx^2\cdot p(x) - \mu_x^2\)\\
\(\mathds{E}[g(X)] = \sum_xg(x)\cdot p(x)\)\\
\textbf{Continuous Random Variables:}\\
\(P(a\leq X \leq b) = \int_a^b f(x)\,dx\)\\
\(\mu_x = \mathds{E}[X] = \int_{-\infty}^\infty x \cdot f(x)\,dx\)\\
\(\sigma^2 = \int_{-\infty}^\infty x^2\cdot f(x)\,dx - \mu_x^2 \)\\
\(\mathds{E}[g(X)] = \int_{-\infty}^\infty g(x) \cdot f(x)\,dx\)\\
\\ \( (\mu \pm k\sigma_x)\)\\
\(P(\vert X - \mu_x \vert > k\sigma_x) = \frac{1}{k^2}\)\\
\textbf{\uline{C2S5:}}\\
\textbf{Linear Functions of Random Variables}\\
\textbf{Linear Function:} \(Y = aX + b\)\\
\textbf{Expected Value:} \( a\mathds{E}[X] + b\)\\
\textbf{Variance:} \(a^2\cdot \textnormal{Var}(X)\)\\
\textbf{Standard Deviation:} \(\vert a \vert \cdot \sigma_x\)\\
\\ \textbf{Linear Combinations of Random Variables}\\
\textbf{Linear Combinations:} \(\sum_{i=1}^n c_iX_i\)\\
\textbf{Expected Value:} \(\sum_{i=1}^n c_i \mathds{E}[X_i]\)\\
\textbf{Variance:} \(\sum_{i=1}^n c_i^2\textnormal{Var}(X)\) where \(X_i\)s are independent\\
Var(x+y) = Var(x) + Var(y)\\
Var(x-y) = Var(x) + Var(y)\\
\\ \textbf{Simple Random Samples}\\
\(\bar{x} = \sum_{i=1}^n\frac{1}{n}\cdot x_i\)\\
\(\mu_{\bar{x}} = \mathds{E}[\bar{x}] = \mu\)\\
\(\sigma_x^2 = \textnormal{Var}(\bar{x})\)\\
\(\sigma_{\bar{x}} = \frac{\sigma_x}{\sqrt{n}}\)\\
\textbf{\uline{C2S6:}}\\
\(0 \leq p(x,y) \leq 1\)\\
\(\sum_x \sum_y p(x,y) = 1\)\\
\(\int_{-\infty}^\infty \int_{-\infty}^\infty f(x,y)\,dx\,dy = 1\)\\
\(P(a \leq x \leq b, c \leq y \leq d) = \int_c^d \int_a^b f(x,y) \,dx\,dy\)\\
\(\mathds{E}[h(X,Y)] = \sum_x \sum_y h(x, y) p(x, y)\)\\
\(\mathds{E}[h(X,Y)] = \int_{-\infty}^\infty \int_{-\infty}^\infty h(x, y) f(x, y)\,dx\,dy\)\\
\(\textnormal{Cov}(X,Y) = \mu_{XY}-\mu_x\mu_y\)\\
\(\rho_{X,Y} = \frac{\textnormal{Cov}(X,Y)}{\sqrt{\textnormal{Var}(X)\textnormal{Var}(Y)}}\)\\
\textbf{\uline{C3:}}\\
\(\sigma_{g(X)} \approx \vert g'(X)\vert \cdot \sigma_X\)\\
\textbf{\uline{C4S1: The Bernoulli Distribution}}\\
\(P(X=x) = p^X(1-p^{1-x})\)\\
\(\mathds{E}[X] = \mu_X = p\)\\
\(\textnormal{Var}(X) = p\cdot(1-p)\)\\
\\
\textbf{\uline{C4S2: The Binomial Distribution}}\\
\(X \sim \textnormal{Bin}(n, p)\)\\
\(p(x) = {n \choose x} p^x(1-p)^{n-x}\)\\
\(\mathds{E}[X] = \mu_X = n\cdot p\)\\
\(\textnormal{Var}(X) = n\cdot p\cdot(1-p)\)\\
\(P(x) = P(X = x) = {n \choose x} \cdot p^x \cdot (1-p)^{n-x}\)\\
\(\mathds{E}[X] = \mu_X = n\cdot p\)\\
\(\sigma^2_X = \textnormal{Var}(X) = n\cdot p \cdot (1-p)\)\\
\(\sigma_X = \sqrt{n\cdot p \cdot (1-p)}\)\\
\(\hat{p} = \frac{x}{n} = \frac{\sum Y_i}{n}\) where \(\hat{p}\) is an estimator of \textit{p}.\\
\(\mathds{E}[\hat{p}] = \mu_{\hat{p}} = p\)\\
\(\textnormal{Var}(\hat{p}) = \frac{p\cdot (1-p)}{n}\)\\
\textbf{\uline{C4S3: The Poisson Distribution}}\\
\(\lambda = n \cdot p\)\\
\(X \sim \textnormal{Poisson}(\lambda)\)\\
\(p(x) = \frac{e^{-\lambda}\cdot \lambda^x}{x!}\)\\
\(\mathds{E}[X] = \mu_X = \lambda\)\\
\(\textnormal{Var}(X) = \lambda\)\\
\textbf{\uline{C4S4: The Hypergeometric Distribution}}\\
\(X \sim H(N, R, n)\)\\
\(p(x) = \frac{{R \choose X}{{N-R} \choose {n-x}}}{{N \choose n}}\)\\
\(\mathds{E}[X] = \mu_X = n\cdot \frac{R}{N}\)\\
\(\textnormal{Var}(X) = n\frac{R}{N}\cdot(1-\frac{R}{N})\cdot(\frac{N-n}{N-1})\)\\
\textbf{\uline{Geometric Distribution:}}\\
\(X \sim \textnormal{Geom}(P)\)\\
\(p(x) = (1-p)^{x-1} \cdot p\)\\
\(\mathds{E}[X] = \mu_X = \frac{1}{p}\)\\
\(\textnormal{Var}(X) = \frac{1-p}{p^2}\)\\
\(\sigma_X = \sqrt{\frac{1-p}{p^2}}\)\\
\end{multicols}
\end{document}