2023-05-10 18:56:36 +02:00
% This section provides a short recap of things that should be known
% from the lecture on stochastics.
2023-04-27 17:07:50 +02:00
2023-05-10 18:56:36 +02:00
\subsection { Notions of convergence}
\begin { definition}
Fix a probability space $ ( \Omega , \cF , \bP ) $ .
Let $ X, X _ 1 , X _ 2 , \ldots $ be random variables.
\begin { itemize}
\item We say that $ X _ n $ converges to $ X $
\vocab [Convergence!almost surely] { almost surely}
($ X _ n \xrightarrow { a.s. } X $ )
iff
\[
\bP (\{ \omega | X_ n(\omega ) \to X(\omega )\} ) = 1.
\]
\item We say that $ X _ n $ converges to $ X $
\vocab [Convergence!in probability] { in probability}
($ X _ n \xrightarrow { \bP } X $ )
iff
\[
\lim _ { n \to \infty } \bP [|X_n - X| > \epsilon] = 0
\]
for all $ \epsilon > 0 $ .
\item We say that $ X _ n $ converges to $ X $
\vocab [Convergence!in mean] { in the $ p $ -th mean}
($ X _ n \xrightarrow { L ^ p } X $ )
iff
\[
\bE [|X_n - X|^p] \xrightarrow { n \to \infty } 0.
\]
\end { itemize}
\end { definition}
% TODO Connect to ANaIII
\begin { theorem}
\vspace { 10pt}
Let $ X $ be a random variable and $ X _ n, n \in \N $ a sequence of random variables.
Then
\begin { figure} [H]
\centering
\begin { tikzpicture}
\node at (0,1.5) (as) { $ X _ n \xrightarrow { a.s. } X $ } ;
\node at (1.5,0) (p) { $ X _ n \xrightarrow { \bP } X $ } ;
\node at (3,1.5) (L1) { $ X _ n \xrightarrow { L ^ 1 } X $ } ;
\draw [double equal sign distance, -implies] (as) -- (p);
\draw [double equal sign distance, -implies] (L1) -- (p);
\end { tikzpicture}
\end { figure}
and none of the other implications hold.
2023-04-27 17:07:50 +02:00
\end { theorem}
2023-05-10 18:56:36 +02:00
\begin { proof}
\begin { claim}
$ X _ n \xrightarrow { a.s. } X \implies X _ n \xrightarrow { \bP } X $ .
\end { claim}
\begin { subproof}
$ \Omega _ 0 \coloneqq \{ \omega \in \Omega : \lim _ { n \to \infty } X _ n ( \omega ) = X ( \Omega ) \} $ .
Let $ \epsilon > 0 $ and consider $ A _ n \coloneqq \bigcup _ { m \ge n } \{ \omega \in \Omega : |X _ m ( \omega ) - X ( \Omega ) | > \epsilon \} $ .
Then $ A _ n \supseteq A _ { n + 1 } \supseteq \ldots $
Define $ A \coloneqq \bigcap _ { n \in \N } A _ n $ .
Then $ \bP [ A _ n ] \xrightarrow { n \to \infty } \bP [ A ] $ .
Since $ X _ n \xrightarrow { a.s. } X $ we have that
$ \forall \omega \in \Omega _ 0 \exists n \in \N \forall m \ge n |X _ m ( \omega ) - X ( \omega ) | < \epsilon $ .
We have $ A \subseteq \Omega _ 0 ^ { c } $ , hence $ \bP [ A _ n ] \to 0 $ .
Thus \[
\bP [\{\omega \in \Omega | ~|X_n(\omega) - X(\omega)| > \epsilon\}] < \bP [A_n] \to 0.
\]
\end { subproof}
\begin { claim}
$ X _ n \xrightarrow { L ^ 1 } X \implies X _ n \xrightarrow { \bP } X $
\end { claim}
\begin { subproof}
We have $ \bE [ |X _ n - X| ] \to 0 $ .
Suppose there exists an $ \epsilon > 0 $ such that
$ \lim _ { n \to \infty } \bP [ |X _ n - X| > \epsilon ] = c > 0 $ .
We have
\begin { IEEEeqnarray*} { rCl}
\bE [|X_n - X|] & =& \int _ \Omega |X_ n - X | d\bP \\
& =& \int _ { |X_ n - X| > \epsilon } |X_ n - X| d\bP + \underbrace { \int _ { |X_ n - X| \le \epsilon } |X_ n - X | d\bP } _ { \ge 0} \\
& \ge & \epsilon \int _ { |X_ n -X | > \epsilon } d\bP \\
& =& \epsilon \cdot c > 0 \lightning
\end { IEEEeqnarray*}
\todo { Improve this with Markov}
\end { subproof}
\begin { claim}
$ X _ n \xrightarrow { \bP } X \notimplies X _ n \xrightarrow { L ^ 1 } X $
\end { claim}
\begin { subproof}
Take $ ( [ 0 , 1 ] , \cB ( [ 0 , 1 ] ) , \lambda ) ( [ 0 , 1 ] , \cB ( [ 0 , 1 ] ) , \lambda ) $
and define $ X _ n \coloneqq n \One _ { [ 0 , \frac { 1 } { n } ] } $ .
We have $ \bP [ |X _ n| > \epsilon ] = \frac { 1 } { n } $
for $ n $ large enough.
2023-04-27 17:07:50 +02:00
2023-05-10 18:56:36 +02:00
However $ \bE [ |X _ n| ] = 1 $ .
\end { subproof}
2023-04-27 17:07:50 +02:00
2023-05-10 18:56:36 +02:00
\begin { claim}
$ X _ n \xrightarrow { a.s. } X \notimplies X _ n \xrightarrow { L ^ 1 } X $ .
\end { claim}
\begin { subproof}
We can use the same counterexample as in c).
$ \bP [ \lim _ { n \to \infty } X _ n = 0 ] \ge \bP [ X _ n = 0 ] = 1 - \frac { 1 } { n } \to 0 $ .
We have already seen, that $ X _ n $ does not converge in $ L _ 1 $ .
\end { subproof}
2023-04-27 17:07:50 +02:00
2023-05-10 18:56:36 +02:00
\begin { claim}
$ X _ n \xrightarrow { L ^ 1 } X \notimplies X _ n \xrightarrow { a.s. } X $ .
\end { claim}
\begin { subproof}
Take $ \Omega = [ 0 , 1 ] , \cF = \cB ( [ 0 , 1 ] ) , \bP = \lambda $ .
Define $ A _ n \coloneqq [ j 2 ^ { - k } , ( j + 1 ) 2 ^ { - k } ] $ where $ n = 2 ^ k + j $ .
We have
\[
\bE [|X_n|] = \int _ { \Omega } |X_ n| d\bP = \frac { 1} { 2^ k} \to 0.
\]
However $ X _ n $ does not converge a.s.~as for all $ \omega \in [ 0 , 1 ] $
the sequence $ X _ n ( \omega ) $ takes the values $ 0 $ and $ 1 $ infinitely often.
\end { subproof}
\end { proof}
2023-04-27 17:07:50 +02:00
How do we prove that something happens almost surely?
2023-05-10 18:56:36 +02:00
The first thing that should come to mind is:
2023-04-27 17:07:50 +02:00
\begin { lemma} [Borel-Cantelli]
If we have a sequence of events $ ( A _ n ) _ { n \ge 1 } $
such that $ \sum _ { n \ge 1 } \bP ( A _ n ) < \infty $ ,
then $ \bP [ A _ n \text { for infinitely many $ n$ } ] = 0 $
(more precisely: $ \bP [ \limsup _ { n \to \infty } A _ n ] = 0 $ ).
2023-05-10 18:56:36 +02:00
For independent events $ A _ n $ the converse holds as well.
2023-04-27 17:07:50 +02:00
\end { lemma}
2023-05-10 18:56:36 +02:00
\iffalse
\todo { Add more stuff here}
\subsection { Some inequalities}
% TODO: Markov
\begin { theorem} [Chebyshev's inequality] % TODO Proof
Let $ X $ be a r.v.~with $ \Var ( x ) < \infty $ .
Then $ \forall \epsilon > 0 : \bP \left [ \left | X - \bE [ X ] \right | > \epsilon \right ] \le \frac { \Var ( x ) } { \epsilon ^ 2 } $ .
\end { theorem}
We used Chebyshev's inequality. Linearity of $ \bE $ , $ \Var ( cX ) = c ^ 2 \Var ( X ) $ and $ \Var ( X _ 1 + \ldots + X _ n ) = \Var ( X _ 1 ) + \ldots + \Var ( X _ n ) $ for independent $ X _ i $ .
2023-04-27 17:07:50 +02:00
Modes of covergence: $ L ^ p $ , in probability, a.s.
2023-05-10 18:56:36 +02:00
\fi