2023-06-07 18:13:25 +02:00
|
|
|
\lecture{1}{2023-04-04}{}
|
2023-05-10 18:56:36 +02:00
|
|
|
|
|
|
|
First, let us recall some basic definitions:
|
|
|
|
\begin{definition}
|
|
|
|
A \vocab{probability space} is a triplet $(\Omega, \cF, \bP)$,
|
|
|
|
such that
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\Omega \neq \emptyset$,
|
|
|
|
\item $\cF$ is a $\sigma$-algebra over $\Omega$, i.e.~$\cF \subseteq \cP(\Omega)$ and
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\emptyset, \Omega \in \cF$,
|
|
|
|
\item $A \in \cF \implies A^c \in \cF$,
|
|
|
|
\item $A_1, A_2,\ldots \in \cF \implies \bigcup_{i \in \N} A_i \in \cF$.
|
|
|
|
\end{itemize}
|
|
|
|
The elements of $\cF$ are called \vocab[Event]{events}.
|
|
|
|
\item $\bP$ is a \vocab{probability measure}, i.e.~$\bP$ is a function $\bP: \cF \to [0,1]$
|
|
|
|
such that
|
|
|
|
\begin{itemize}
|
2023-06-29 20:31:46 +02:00
|
|
|
\item $\bP(\emptyset) = 0$, $\bP(\Omega) = 1$,
|
2023-05-10 18:56:36 +02:00
|
|
|
\item $\bP\left( \bigsqcup_{n \in \N} A_n \right) = \sum_{n \in \N} \bP(A_n)$
|
|
|
|
for mutually disjoint $A_n \in \cF$.
|
|
|
|
\end{itemize}
|
|
|
|
\end{itemize}
|
|
|
|
\end{definition}
|
2023-07-18 22:33:10 +02:00
|
|
|
\begin{definition}+
|
|
|
|
Let $X$ be a random variable and $k \in \N$.
|
|
|
|
Then the $k$-th \vocab{moment} of $X$ is defined as
|
|
|
|
$\bE[X^k]$.
|
|
|
|
\end{definition}
|
2023-05-10 18:56:36 +02:00
|
|
|
|
|
|
|
\begin{definition}
|
|
|
|
A \vocab{random variable} $X : (\Omega, \cF) \to (\R, \cB(\R))$
|
|
|
|
is a measurable function, i.e.~for all $B \in \cB(\R)$ we have $X^{-1}(B) \in \cF$.
|
|
|
|
(Equivalently $X^{-1}\left( (a,b] \right) \in \cF$ for all $a < b \in \R$ ).
|
|
|
|
\end{definition}
|
|
|
|
|
|
|
|
\begin{definition}
|
|
|
|
$F: \R \to \R_+$ is a \vocab{distribution function} iff
|
|
|
|
\begin{itemize}
|
|
|
|
\item $F$ is monotone non-decreasing,
|
|
|
|
\item $F$ is right-continuous,
|
|
|
|
\item $\lim_{x \to -\infty} F(x) = 0$ and $\lim_{x \to \infty} F(x) = 1$.
|
|
|
|
\end{itemize}
|
|
|
|
\end{definition}
|
|
|
|
\begin{fact}
|
|
|
|
Let $\bP$ be a probability measure on $(\R, \cB(\R))$.
|
|
|
|
Then $F(x) \coloneqq\bP\left( (-\infty, x] \right)$
|
|
|
|
is a probability distribution function.
|
|
|
|
(See lemma 2.4.2 in the lecture notes of Stochastik)
|
|
|
|
\end{fact}
|
|
|
|
The converse to this fact is also true:
|
|
|
|
\begin{theorem}[Kolmogorov's existence theorem / basic existence theorem of probability theory]
|
2023-05-25 18:02:30 +02:00
|
|
|
\label{kolmogorovexistence}
|
2023-05-10 18:56:36 +02:00
|
|
|
Let $\cF(\R)$ be the set of all distribution functions on $\R$
|
|
|
|
and let $\cM(\R)$ be the set of all probability measures on $\R$.
|
|
|
|
Then there is a one-to-one correspondence between $\cF(\R)$ and $\cM(\R)$
|
|
|
|
given by
|
|
|
|
\begin{IEEEeqnarray*}{rCl}
|
|
|
|
\cM(\R) &\longrightarrow & \cF(\R)\\
|
|
|
|
\bP &\longmapsto & \begin{pmatrix*}[l]
|
|
|
|
\R &\longrightarrow & \R_+ \\
|
|
|
|
x &\longmapsto & \bP((-\infty, x]).
|
|
|
|
\end{pmatrix*}
|
|
|
|
\end{IEEEeqnarray*}
|
|
|
|
\end{theorem}
|
|
|
|
\begin{proof}
|
|
|
|
See theorem 2.4.3 in Stochastik.
|
|
|
|
\end{proof}
|
2023-07-05 17:53:41 +02:00
|
|
|
|
2023-07-18 22:33:10 +02:00
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
\begin{example}[Some important probability distribution functions]\hfill
|
|
|
|
\begin{enumerate}[(1)]
|
|
|
|
\item \vocab{Uniform distribution} on $[0,1]$:
|
|
|
|
\[
|
|
|
|
F(x) = \begin{cases}
|
|
|
|
0 & x \in (-\infty, 0],\\
|
|
|
|
x & x \in (0,1],\\
|
|
|
|
1 & x \in (1,\infty).\\
|
|
|
|
\end{cases}
|
|
|
|
\]
|
2023-05-25 00:33:14 +02:00
|
|
|
|
|
|
|
\begin{figure}[H]
|
|
|
|
\centering
|
|
|
|
\begin{tikzpicture}
|
|
|
|
\begin{axis}[samples=1000, xmin=-1, xmax=2, width=10cm, height=5cm]
|
|
|
|
\addplot[] {and(x>0,x<=1) * x + (x>1)};
|
|
|
|
\end{axis}
|
|
|
|
\end{tikzpicture}
|
|
|
|
\end{figure}
|
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
\item \vocab{Exponential distribution}:
|
|
|
|
\[
|
|
|
|
F(x) = \begin{cases}
|
|
|
|
1 - e^{-\lambda x} & x \ge 0,\\
|
|
|
|
0 & x < 0.
|
|
|
|
\end{cases}
|
|
|
|
\]
|
2023-05-25 00:33:14 +02:00
|
|
|
\begin{figure}[H]
|
|
|
|
\centering
|
|
|
|
\begin{tikzpicture}
|
|
|
|
\begin{axis}[samples=1000, smooth, width=10cm, height=5cm, xmin=-2, xmax=5]
|
|
|
|
\addplot[] {(x > 0) * (1 - exp( - 5 * x))};
|
|
|
|
\end{axis}
|
|
|
|
\end{tikzpicture}
|
|
|
|
\end{figure}
|
2023-05-10 18:56:36 +02:00
|
|
|
\item \vocab{Gaussian distribution}:
|
|
|
|
\[
|
|
|
|
\Phi(x) \coloneqq \frac{1}{\sqrt{2\pi}} \int_{-\infty}^x e^{-\frac{y^2}{2}} dy.
|
|
|
|
\]
|
2023-05-25 00:33:14 +02:00
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
\item $\bP[X = 1] = \bP[X = -1] = \frac{1}{2}$ :
|
|
|
|
\[
|
|
|
|
F(x) = \begin{cases}
|
|
|
|
0 & x \in (-\infty, -1),\\
|
|
|
|
\frac{1}{2} & x \in [-1,1),\\
|
|
|
|
1 & x \in [1, \infty).
|
|
|
|
\end{cases}
|
|
|
|
\]
|
2023-05-25 00:33:14 +02:00
|
|
|
\begin{figure}[H]
|
|
|
|
\centering
|
|
|
|
\begin{tikzpicture}
|
2023-07-14 22:07:36 +02:00
|
|
|
\begin{axis}[samples=1000, width=10cm, height=5cm, xmin=-2, xmax=2]
|
|
|
|
\addplot[ domain=-2.5:-1]{ 0 };
|
|
|
|
\addplot[ domain=-1:1] { 1 / 2 };
|
|
|
|
\addplot[ domain=1:2.5] { 1 };
|
2023-05-25 00:33:14 +02:00
|
|
|
\end{axis}
|
|
|
|
\end{tikzpicture}
|
|
|
|
\end{figure}
|
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{example}
|