% This section provides a short recap of things that should be known % from the lecture on stochastics. \subsection{Notions of convergence} \begin{definition} Fix a probability space $(\Omega,\cF,\bP)$. Let $X, X_1, X_2,\ldots$ be random variables. \begin{itemize} \item We say that $X_n$ converges to $X$ \vocab[Convergence!almost surely]{almost surely} ($X_n \xrightarrow{a.s.} X$) iff \[ \bP(\{\omega | X_n(\omega) \to X(\omega)\}) = 1. \] \item We say that $X_n$ converges to $X$ \vocab[Convergence!in probability]{in probability} ($X_n \xrightarrow{\bP} X$) iff \[ \lim_{n \to \infty}\bP[|X_n - X| > \epsilon] = 0 \] for all $\epsilon > 0$. \item We say that $X_n$ converges to $X$ \vocab[Convergence!in mean]{in the $p$-th mean} ($X_n \xrightarrow{L^p} X$ ) iff \[ \bE[|X_n - X|^p] \xrightarrow{n \to \infty} 0. \] \end{itemize} \end{definition} % TODO Connect to ANaIII \begin{theorem} \vspace{10pt} Let $X$ be a random variable and $X_n, n \in \N$ a sequence of random variables. Then \begin{figure}[H] \centering \begin{tikzpicture} \node at (0,1.5) (as) { $X_n \xrightarrow{a.s.} X$}; \node at (1.5,0) (p) { $X_n \xrightarrow{\bP} X$}; \node at (3,1.5) (L1) { $X_n \xrightarrow{L^1} X$}; \draw[double equal sign distance, -implies] (as) -- (p); \draw[double equal sign distance, -implies] (L1) -- (p); \end{tikzpicture} \end{figure} and none of the other implications hold. \end{theorem} \begin{proof} \begin{claim} $X_n \xrightarrow{a.s.} X \implies X_n \xrightarrow{\bP} X$. \end{claim} \begin{subproof} $\Omega_0 \coloneqq \{\omega \in \Omega : \lim_{n\to \infty} X_n(\omega) = X(\Omega)\} $. Let $\epsilon > 0$ and consider $A_n \coloneqq \bigcup_{m \ge n} \{\omega \in \Omega: |X_m(\omega) - X(\Omega)| > \epsilon\}$. Then $A_n \supseteq A_{n+1} \supseteq \ldots$ Define $A \coloneqq \bigcap_{n \in \N} A_n$. Then $\bP[A_n] \xrightarrow{n\to \infty} \bP[A]$. Since $X_n \xrightarrow{a.s.} X$ we have that $\forall \omega \in \Omega_0 \exists n \in \N \forall m \ge n |X_m(\omega) - X(\omega)| < \epsilon$. We have $A \subseteq \Omega_0^{c}$, hence $\bP[A_n] \to 0$. Thus \[ \bP[\{\omega \in \Omega | ~|X_n(\omega) - X(\omega)| > \epsilon\}] < \bP[A_n] \to 0. \] \end{subproof} \begin{claim} $X_n \xrightarrow{L^1} X \implies X_n\xrightarrow{\bP} X$ \end{claim} \begin{subproof} We have $\bE[|X_n - X|] \to 0$. Suppose there exists an $\epsilon > 0$ such that $\lim_{n \to \infty} \bP[|X_n - X| > \epsilon] = c > 0$. We have \begin{IEEEeqnarray*}{rCl} \bE[|X_n - X|] &=& \int_\Omega |X_n - X | d\bP\\ &=& \int_{|X_n - X| > \epsilon} |X_n - X| d\bP + \underbrace{\int_{|X_n - X| \le \epsilon} |X_n - X | d\bP}_{\ge 0}\\ &\ge& \epsilon \int_{|X_n -X | > \epsilon} d\bP\\ &=& \epsilon \cdot c > 0 \lightning \end{IEEEeqnarray*} \todo{Improve this with Markov} \end{subproof} \begin{claim} $X_n \xrightarrow{\bP} X \notimplies X_n\xrightarrow{L^1} X$ \end{claim} \begin{subproof} Take $([0,1], \cB([0,1 ]), \lambda)([0,1], \cB([0,1 ]), \lambda)$ and define $X_n \coloneqq n \One_{[0, \frac{1}{n}]}$. We have $\bP[|X_n| > \epsilon] = \frac{1}{n}$ for $n$ large enough. However $\bE[|X_n|] = 1$. \end{subproof} \begin{claim} $X_n \xrightarrow{a.s.} X \notimplies X_n\xrightarrow{L^1} X$. \end{claim} \begin{subproof} We can use the same counterexample as in c). $\bP[\lim_{n \to \infty} X_n = 0] \ge \bP[X_n = 0] = 1 - \frac{1}{n} \to 0$. We have already seen, that $X_n$ does not converge in $L_1$. \end{subproof} \begin{claim} $X_n \xrightarrow{L^1} X \notimplies X_n\xrightarrow{a.s.} X$. \end{claim} \begin{subproof} Take $\Omega = [0,1], \cF = \cB([0,1]), \bP = \lambda$. Define $A_n \coloneqq [j 2^{-k}, (j+1) 2^{-k}]$ where $n = 2^k + j$. We have \[ \bE[|X_n|] = \int_{\Omega}|X_n| d\bP = \frac{1}{2^k} \to 0. \] However $X_n$ does not converge a.s.~as for all $\omega \in [0,1]$ the sequence $X_n(\omega)$ takes the values $0$ and $1$ infinitely often. \end{subproof} \end{proof} How do we prove that something happens almost surely? The first thing that should come to mind is: \begin{lemma}[Borel-Cantelli] If we have a sequence of events $(A_n)_{n \ge 1}$ such that $\sum_{n \ge 1} \bP(A_n) < \infty$, then $\bP[ A_n \text{for infinitely many $n$}] = 0$ (more precisely: $\bP[\limsup_{n \to \infty} A_n] = 0$). For independent events $A_n$ the converse holds as well. \end{lemma} \iffalse \todo{Add more stuff here} \subsection{Some inequalities} % TODO: Markov \begin{theorem}[Chebyshev's inequality] % TODO Proof Let $X$ be a r.v.~with $\Var(x) < \infty$. Then $\forall \epsilon > 0 : \bP \left[ \left| X - \bE[X] \right| > \epsilon\right] \le \frac{\Var(x)}{\epsilon^2}$. \end{theorem} We used Chebyshev's inequality. Linearity of $\bE$, $\Var(cX) = c^2\Var(X)$ and $\Var(X_1 +\ldots + X_n) = \Var(X_1) + \ldots + \Var(X_n)$ for independent $X_i$. Modes of covergence: $L^p$, in probability, a.s. \fi