s23-probability-theory/inputs/lecture_8.tex

151 lines
6.4 KiB
TeX
Raw Normal View History

2023-05-10 18:56:36 +02:00
% Lecture 8 2023-05-02
\subsection{Kolmogorov's 0-1-law}
Some classes of events always have probability $0$ or $1$.
One example of such a 0-1-law is the Borel-Cantelli Lemma
and its inverse statement.
We now want to look at events that capture certain aspects of long term behaviour
of sequences of random variables.
\begin{definition}
Let $X_n, n \in \N$ be a sequence of random variables
on a probability space $(\Omega, \cF, \bP)$.
Let $\cT_i \coloneqq \sigma(X_i, X_{i+1}, \ldots )$
be the $\sigma$-algebra generated by $X_i, X_{i+1}, \ldots$.
Then the \vocab{tail-$\sigma$-algebra} is defined as
\[
\cT \coloneqq \bigcap_{i \in \N} \cT_i.
\]
The events $A \in \cT \subseteq \cF$ are called \vocab[Tail event]{tail events}.
\end{definition}
\begin{remark}
\begin{enumerate}[(i)]
\item Since intersections of arbitrarily many $\sigma$-algebras
is again a $\sigma$-algebra, $\cT$ is indeed a $\sigma$-algebra.
\item We have
\[
\cT = \{A \in \cF ~|~ \forall i ~ \exists B \in \cB(\R)^{\otimes \N} : A = \{\omega | (X_i(\omega), X_{i+1}(\omega), \ldots) \in B\} \}. % TODO?
2023-05-10 18:56:36 +02:00
\]
\end{enumerate}
\end{remark}
\begin{example}[What are tail events?]
Let $X_n, n \in \N$ be a sequence of independent random variables on a probability
space $(\Omega, \cF, \bP)$. Then
\begin{enumerate}[(i)]
\item $\left\{\omega | \sum_{n \in \N} X_n(\omega) \text{ converges} \right\}$ is a tail event,
since for all $\omega \in \Omega$ we have
\begin{IEEEeqnarray*}{rCl}
&& \sum_{i=1}^\infty X_i(\omega) \text{ converges}\\
&\iff& \sum_{i=2}^\infty X_i(\omega) \text{ converges}\\
&\iff& \ldots \\
&\iff& \sum_{i=k}^\infty X_i(\omega) \text{ converges}.\\
\end{IEEEeqnarray*}
(Since the $X_i$ are independent, the convergence
of $\sum_{n \in \N} X_n$ is not influenced by $X_1,\ldots, X_k$
for any $k$.)
\item $\left\{\omega | \sum_{n \in \N} X_n(\omega) = c\right\} $
for some $c \in \R$
is not a tail event,
because $\sum_{n \in \N} X_n$ depends on $X_1$.
\item $\{\omega | \lim_{n \to \infty} \frac{1}{n} \sum_{i=1}^{n} X_i(\omega) = c\}$
is a tail event, since
\[
c = \lim_{n \to \infty} \sum_{i=1}^{n} X_i = \underbrace{\lim_{n \to \infty} \frac{1}{n} X_1}_{= 0} + \lim_{n \to \infty} \frac{1}{n} \sum_{i=2}^n X_i = \ldots = \lim_{n \to \infty} \frac{1}{n} \sum_{i=k}^n X_i.
\]
\end{enumerate}
\end{example}
So $\cT$ includes all long term behaviour of $X_n, n \in \N$,
which does not depend on the realisation of the first $k$ random variables
for any $k \in \N$.
\begin{theorem}[Kolmogorov's 0-1 law]
\label{kolmogorov01}
Let $X_n, n \in \N$ be a sequence of independent random variables
and let $\cT$ denote their tail-$\sigma$-algebra.
Then $\cT$ is \vocab{$\bP$-trivial}, i.e.~$\bP[A] \in \{0,1\}$
for all $A \in \cT$.
\end{theorem}
\begin{idea}
The idea behind proving, that a $\cT$ is $\bP$-trivial is to show that
for any $A, B \in \cF$ we have
\[
\bP[A \cap B] = \bP[A] \cdot \bP[B].
\]
Taking $A = B$, it follows that $\bP[A] = \bP[A]^2$, hence $\bP[A] \in \{0,1\}$.
\end{idea}
\begin{refproof}{kolmogorov01}
Let $\cF_n \coloneqq \sigma(X_1,\ldots,X_n)$
and remember that $\cT_{n} = \sigma(X_{n}, X_{n+1},\ldots)$.
The proof rests on two claims:
\begin{claim}
For all $n \ge 1$, $A \in \cF_n$ and $B \in \cT_{n+1}$
we have $\bP[A \cap B] = \bP[A]\bP[B]$.
\end{claim}
\begin{subproof}
This follows from the independence of the $X_i$.
It is
\[
\sigma\left( X_1,\ldots,X_n \right) = \sigma\left(\underbrace{\{X_{1}^{-1}(B_1) \cap \ldots \cap X_n^{-1}(B_n)\} | B_1,\ldots,B_n \in \cB(\R)\}}_{\text{\reflectbox{$\coloneqq$}}\cA} \right).
\]
$\cA$ is a semi-algebra, since
\begin{enumerate}[(i)]
\item $\emptyset, \Omega \in \cA$,
\item $A, B \in \cA \implies A \cap B \in \cA$,
\item for $A \in \cA$, $A^c = \bigsqcup_{i=1}^n A_i$
for disjoint sets $A_1,\ldots,A_n \in \cA$.
\end{enumerate}
Hence it suffices to show the claim for sets $A \in \cA$.
Similarly
\[
\sigma(\cT_{n+1}) = \sigma \left( \underbrace{ \{X_{n+1}^{-1}(M_1) \cap \ldots \cap X_{n+k}^{-1}(M_k) | k \in \N, M_1,\ldots, M_k \in \cB(\R)\}}_{\text{\reflectbox{$\coloneqq$}} \cB} \right).
\]
Again, $\cB$ is closed under intersection.
So let $A \in \cA$ and $B \in \cB$.
Then
\[
\bP[A \cap B] = \bP[A] \cdot \bP[B)
\]
by the independence of $\{X_1,\ldots,X_{n+k}\}$,
and since $A$ only depends on $\{X_1,\ldots,X_n\}$
and $B$ only on $\{X_{n+1},\ldots, X_{n+k}\}$.
\end{subproof}
\begin{claim}
$\bigcup_{n \in \N} \cF_n$ is an algebra
and
\[
\sigma\left( \bigcup_{n \in \N} \cF_n \right) = \sigma(X_1,X_2,\ldots) = \cT_1.
\]
\end{claim}
\begin{subproof}
``$\supseteq$ '' If $A_n \in \sigma(X_n)$, then $A_n \in \cF_n$.
Hence $A_n \in \bigcup_{n \in \N} \cF_n$.
Since $\sigma(X_1,X_2,\ldots)$ is generated by $\{A_n \in \sigma(X_n) : n \in \N\}$,
this also means $\sigma(X_1,X_2,\ldots) \subseteq\sigma\left( \bigcup_{n \in \N} \cF_n \right)$.
``$\subseteq$ '' Since $\cF_n = \sigma(X_1,\ldots,X_n)$,
obviously $\cF_n \subseteq \sigma(X_1,\ldots,X_n)$
for all $n$.
It follows that $\bigcup_{n \in \N} \cF_n \subseteq \sigma(X_1,X_2,\ldots)$.
Hence $\sigma\left( \bigcup_{n \in \N} \cF_n \right) \subseteq\sigma(X_1,X_2,\ldots)$.
\end{subproof}
Now let $T \in \cT$.
Then $T \in \cT_{n+1}$ for any $n$.
Hence $\bP[A \cap T] = \bP[A] \bP[T]$
for all $A \in \cF_n$ by the first claim.
It follows that the same folds for all $A \in \bigcup_{n \in \N} \cF_n$,
hence for all $A \in \sigma\left( \bigcup_{n \in \N} \cF_n \right)$,
and by the second claim for all $A \in \sigma(X_1,X_2,\ldots) = \cT_1$.
But since $T \in \cT$, in particular $T \in \cT_1$,
so by choosing $A = T$, we get
\[
\bP[T] = \bP[T \cap T] = \bP[T]^2
\]
hence $\bP[T] \in \{0,1\}$.
\end{refproof}