From ed642130d96bdfce0b2dd6d3f706a21934c8d97d Mon Sep 17 00:00:00 2001 From: Josia Pietsch Date: Fri, 7 Jul 2023 17:41:18 +0200 Subject: [PATCH] lecture 6 --- inputs/lecture_06.tex | 142 +++++++++++++++++++++++++++++++++++------- 1 file changed, 119 insertions(+), 23 deletions(-) diff --git a/inputs/lecture_06.tex b/inputs/lecture_06.tex index ed2e6f1..cb92fec 100644 --- a/inputs/lecture_06.tex +++ b/inputs/lecture_06.tex @@ -1,61 +1,157 @@ -\lecture{6}{}{} -\todo{Large parts of lecture 6 are missing} +\lecture{6}{}{Proof of SLLN} \begin{refproof}{lln} We want to deduce the SLLN (\autoref{lln}) from \autoref{thm2}. - W.l.o.g.~let us assume that $\bE[X_i] = 0$ (otherwise define $X'_i \coloneqq X_i - \bE[X_i]$). + W.l.o.g.~let us assume that $\bE[X_i] = 0$ + (otherwise define $X'_i \coloneqq X_i - \bE[X_i]$). We will show that $\frac{S_n}{n} \xrightarrow{a.s.} 0$. Define $Y_i \coloneqq \frac{X_i}{i}$. Then the $Y_i$ are independent and we have $\bE[Y_i] = 0$ and $\Var(Y_i) = \frac{\sigma^2}{i^2}$. Thus $\sum_{i=1}^\infty \Var(Y_i) < \infty$. - From \autoref{thm2} we obtain that $\sum_{i=1}^\infty Y_i < \infty$ a.s. + From \autoref{thm2} we obtain that $\sum_{i=1}^\infty Y_i$ converges a.s. \begin{claim} - Let $(a_n)$ be a sequence in $\R$ such that $\sum_{n=1}^{\infty} \frac{a_n}{n}$, then $\frac{a_1 + \ldots + a_n}{n} \to 0$. + Let $(a_n)$ be a sequence in $\R$ + such that $\sum_{n=1}^{\infty} \frac{a_n}{n}$ converges, + then $\frac{a_1 + \ldots + a_n}{n} \to 0$. \end{claim} \begin{subproof} Let $S_m \coloneqq \sum_{n=1}^\infty \frac{a_n}{n}$. By assumption, there exists $S \in \R$ - such that $S_m \to S$ as $m \to \infty$. + such that $S_m \xrightarrow{m \to \infty} S$. Note that $j \cdot (S_{j} - S_{j-1}) = a_j$. Define $S_0 \coloneqq 0$. - Then $a_1 + \ldots + a_n = (S_1 - S_0) + 2(S_2 - S_1) + 3(S_3 - S_2) + - \ldots + n (S_n - S_{n-1})$. - Thus $a_1 + \ldots + a_n = n S_n - (S1 $ % TODO - + Then + \begin{IEEEeqnarray*}{rCl} + a_1 + \ldots + a_n &=& + (S_1 - S_0) + 2(S_2 - S_1) + \ldots + n(S_n - S_{n-1})\\ + &=& n S_n - (S_1 + S_2 + \ldots + S_{n-1}). + \end{IEEEeqnarray*} + Thus + \begin{IEEEeqnarray*}{rCl} + \frac{a_1 + \ldots + a_n}{n} + &=& S_n - \frac{S_1 + \ldots + S_{n-1}}{n}\\ + &=& \underbrace{S_n}_{\to S} + - \underbrace{\left( \frac{n-1}{n} \right)}_{\mathclap{\to 1}} + \cdot \underbrace{\frac{S_1 + \ldots + S_{n-1}}{n-1}}_{\to S}\\ + &\to & 0, + \end{IEEEeqnarray*} + where we have used + \begin{fact} + \[ + \lim_{n \to \infty} S_n = \lim_{n \to \infty} \frac{1}{n}\sum_{i=1}^{n} S_i + \]. + \end{fact} \end{subproof} The SLLN follows from the claim. \end{refproof} -We need the fol] +In order to prove \autoref{thm2}, we need the following: \begin{theorem}[Kolmogorov's inequality] + \label{thm:kolmogorovineq} If $X_1,\ldots, X_n$ are independent with $\bE[X_i] = 0$ and $\Var(X_i) = \sigma_i^2$, then \[ - \bP\left[\max_{1 \le i \le n} \left| \sum_{j=1}^{i} X_j \right| > \epsilon \right] \le \frac{1}{\epsilon ^2} \sum_{i=1}^m \sigma_i^2 % TODO + \bP\left[\max_{1 \le i \le n} \left| \sum_{j=1}^{i} X_j \right| > \epsilon \right] + \le \frac{1}{\epsilon^2} \sum_{i=1}^m \sigma_i^2. \] \end{theorem} \begin{proof} - Let $A_1 \coloneqq \{\omega : |X_1(\omega)| > \epsilon\}, \ldots, - A_i := \{\omega: |X_1(\omega)| \le \epsilon, |X_1(\omega) + X_2(\omega)| \le \epsilon, \ldots, |X_1(\omega) + \ldots + X_{i-1}(\omega)| \le \epsilon, - |X_1(\omega) + \ldots + X_i(\omega)| > \epsilon\}$. + Let + \begin{IEEEeqnarray*}{rCl} + A_1 &\coloneqq& \{\omega : |X_1(\omega)| > \epsilon\},\\ + A_2 &\coloneqq & \{\omega: |X_1(\omega)| \le \epsilon, + |X_1(\omega) + X_2(\omega)| > \epsilon \},\\ + \ldots\\ + A_i &\coloneqq& \{\omega: |X_1(\omega)| \le \epsilon, + |X_1(\omega) + X_2(\omega)| \le \epsilon, \ldots, + |X_1(\omega) + \ldots + X_{i-1}(\omega)| \le \epsilon, + |X_1(\omega) + \ldots + X_i(\omega)| > \epsilon\}. + \end{IEEEeqnarray*} + It is clear, that the $A_i$ are disjoint. We are interested in $\bigcup_{1 \le i \le n} A_i$. We have \begin{IEEEeqnarray*}{rCl} - &&\int_{A_i} (\underbrace{X_1 + \ldots + X_i}_C + \underbrace{X_{i+1} + \ldots + X_n}_D)^2 d \bP\\ - &=& \int_{A_i} C^2 d\bP + \underbrace{\int_{A_i} D^2 d \bP}_{\ge 0} + 2 \int_{A_i} CD d\bP\\ - &\ge & \int_{A_i} \underbrace{C^2}_{\ge \epsilon^2} d \bP + 2 \int \underbrace{\One_{A_i} (X_1 + \ldots + X_i)}_E \underbrace{(X_{i+1} + \ldots + X_n)}_D d \bP\\ - &\ge& \int_{A_i} \epsilon^2 d\bP + &&\int_{A_i} (\underbrace{X_1 + \ldots + X_i}_C + + \underbrace{X_{i+1} + \ldots + X_n}_D)^2 d \bP\\ + &=& \int_{A_i} C^2 d\bP + + \underbrace{\int_{A_i} D^2 d \bP}_{\ge 0} + + 2 \int_{A_i} CD d\bP\\ + &\ge& \int_{A_i} \underbrace{C^2}_{\ge \epsilon^2} d \bP + + 2 \int \underbrace{\One_{A_i} (X_1 + \ldots + X_i)}_E \underbrace{(X_{i+1} + \ldots + X_n)}_D d \bP\\ + &\ge& \int_{A_i} \epsilon^2 d\bP, \end{IEEEeqnarray*} - (By the independence of $X_1,\ldots, X_n$ and therefore that of $E$ and $D$ and $\bE(X_{i+1}) = \ldots = \bE(X_n) = 0$ we have $\int D E d\bP = 0$.) - - % TODO + since by the independence of $E$ and $D$, + and $\bE(X_{i+1}) = \ldots = \bE(X_n) = 0$ we have $\int D E d\bP = 0$. + Hence + \[ + \bP(A_i) + \le \frac{1}{\epsilon^2} \int_{A_i} (X_1 + \ldots + X_n)^2 \dif \bP. + \] + Since the $A_i$ are disjoint, we obtain + \begin{IEEEeqnarray*}{rCl} + \bP\left( \bigcup_{i \in \N} A_i \right) + &\le & \frac{1}{\epsilon^2} + \int_{\bigcup_{i \in \N} A_i} (X_1 + \ldots + X_n)^2 \dif \bP\\ + &\le & \frac{1}{\epsilon^2} + \int_{\Omega} (X_1 + \ldots + X_n)^2 \dif \bP\\ + &\overset{\text{independence}}{=}& + \frac{1}{\epsilon^2}(\bE[X_1^2] + \ldots + \bE[X_n^2])\\ + &\overset{\bE[X_i] = 0}{=}& \frac{1}{\epsilon^2} + \left( \Var(X_1) + \ldots + \Var(X_n)\right). + \end{IEEEeqnarray*} \end{proof} \begin{refproof}{thm2} - % TODO + Let $S_n \coloneqq x_1 + \ldots + x_n$. + We'll show that $\{S_n(\omega)\}_{n \in \N}$ + is a Cauchy sequence + for almost every $\omega$. + Let + \[ + a_m(\omega) \coloneqq \sup_{k \in \N} + \{ | S_{ m+k}(\omega) - S_m(\omega)|\} + \] + and + \[ + a(\omega) \coloneqq \inf_{m \in \N} a_m(\omega). + \] + Then $\{S_n(\omega)\}_{n \in \R}$ + is a Cauchy sequence iff $a(\omega) = 0$. + + We want to show that $\bP[a(\omega) > 0] = 0$. + For this, it suffices to show that $\bP(a(\omega) > \epsilon] = 0$ + for all $\epsilon > 0$. + For a fixed $\epsilon > 0$, we obtain: + \begin{IEEEeqnarray*}{rCl} + \bP[a_m > \epsilon] + &=& \bP[ \sup_{k \in \N} | S_{m+k} - S_m| > \epsilon]\\ + &=& \lim_{l \to \infty} \bP[% + \underbrace{\sup_{k \le l} |S_{m+k} - S_m| > \epsilon}_{% + \text{\reflectbox{$\coloneqq$}} B_l \uparrow% + B \coloneqq \{\sup_{k \in \N} |S_{m+k} - S_m| > \epsilon\}}% + ] + \end{IEEEeqnarray*} + + Now, + \begin{IEEEeqnarray*}{rCl} + &&\max \{|S_{m+1} - S_m|, |S_{m+2} - S_m|, \ldots, |S_{m+l} - S_m|\}\\ + &=& \max \{|X_{m+1}|, |X_{m+1} + X_{m+2}|, \ldots, |X_{m+1} + X_{m+2} + \ldots + X_{m+l}|\}\\ + &\overset{\text{\autoref{thm:kolmogorovineq}}}{\le}& + \frac{1}{\epsilon^2} \sum_{i=m}^{l} \Var(X_i)\\ + &\le & \frac{1}{\epsilon^2} \sum_{i=m}^\infty \Var(X_i) + \xrightarrow{m \to \infty} 0, + \end{IEEEeqnarray*} + since by our assumption, $\sum_{n \in \N} \Var(X_i) < \infty$. + + Hence + \[ + \bP[a_m > \epsilon] \xrightarrow{m \to \infty} 0. + \] + It follows that $\bP[a > \epsilon] = 0$, + as claimed. \end{refproof}