97 lines
4.4 KiB
TeX
97 lines
4.4 KiB
TeX
\todo{Large parts of lecture 6 are missing}
|
|
\begin{refproof}{lln}
|
|
We want to deduce the SLLN (\autoref{lln}) from \autoref{thm2}.
|
|
W.l.o.g.~let us assume that $\bE[X_i] = 0$ (otherwise define $X'_i \coloneqq X_i - \bE[X_i]$).
|
|
We will show that $\frac{S_n}{n} \xrightarrow{a.s.} 0$.
|
|
Define $Y_i \coloneqq \frac{X_i}{i}$.
|
|
Then the $Y_i$ are independent and we have $\bE[Y_i] = 0$
|
|
and $\Var(Y_i) = \frac{\sigma^2}{i^2}$.
|
|
Thus $\sum_{i=1}^\infty \Var(Y_i) < \infty$.
|
|
From \autoref{thm2} we obtain that $\sum_{i=1}^\infty Y_i < \infty$ a.s.
|
|
\begin{claim}
|
|
Let $(a_n)$ be a sequence in $\R$ such that $\sum_{n=1}^{\infty} \frac{a_n}{n}$, then $\frac{a_1 + \ldots + a_n}{n} \to 0$.
|
|
\end{claim}
|
|
\begin{subproof}
|
|
Let $S_m \coloneqq \sum_{n=1}^\infty \frac{a_n}{n}$.
|
|
By assumption, there exists $S \in \R$
|
|
such that $S_m \to S$ as $m \to \infty$.
|
|
Note that $j \cdot (S_{j} - S_{j-1}) = a_j$.
|
|
Define $S_0 \coloneqq 0$.
|
|
Then $a_1 + \ldots + a_n = (S_1 - S_0) + 2(S_2 - S_1) + 3(S_3 - S_2) +
|
|
\ldots + n (S_n - S_{n-1})$.
|
|
Thus $a_1 + \ldots + a_n = n S_n - (S1 $ % TODO
|
|
|
|
\end{subproof}
|
|
The SLLN follows from the claim.
|
|
\end{refproof}
|
|
|
|
We need the following inequality:
|
|
\begin{theorem}[Kolmogorov's inequality]
|
|
If $X_1,\ldots, X_n$ are independent with $\bE[X_i] = 0$
|
|
and $\Var(X_i) = \sigma_i^2$, then
|
|
\[
|
|
\bP\left[\max_{1 \le i \le n} \left| \sum_{j=1}^{i} X_j \right| > \epsilon \right] \le \frac{1}{\epsilon ^2} \sum_{i=1}^m \sigma_i^2 % TODO
|
|
\]
|
|
\end{theorem}
|
|
\begin{proof}
|
|
Let $A_1 \coloneqq \{\omega : |X_1(\omega)| > \epsilon\}, \ldots,
|
|
A_i := \{\omega: |X_1(\omega)| \le \epsilon, |X_1(\omega) + X_2(\omega)| \le \epsilon, \ldots, |X_1(\omega) + \ldots + X_{i-1}(\omega)| \le \epsilon,
|
|
|X_1(\omega) + \ldots + X_i(\omega)| > \epsilon\}$.
|
|
We are interested in $\bigcup_{1 \le i \le n} A_i$.
|
|
|
|
We have
|
|
\begin{IEEEeqnarray*}{rCl}
|
|
\int_{A_i} (\underbrace{X_1 + \ldots + X_i}_C + \underbrace{X_{i+1} + \ldots + X_n}_D)^2 d \bP &=& \int_{A_i} C^2 d\bP + \underbrace{\int_{A_i} D^2 d \bP}_{\ge 0} + 2 \int_{A_i} CD d\bP\\
|
|
&\ge & \int_{A_i} \underbrace{C^2}_{\ge \epsilon^2} d \bP + 2 \int \underbrace{\One_{A_i} (X_1 + \ldots + X_i)}_E \underbrace{(X_{i+1} + \ldots + X_n)}_D d \bP\\
|
|
&\ge& \int_{A_i} \epsilon^2 d\bP
|
|
\end{IEEEeqnarray*}
|
|
(By the independence of $X_1,\ldots, X_n$ and therefore that of $E$ and $D$ and $\bE(X_{i+1}) = \ldots = \bE(X_n) = 0$ we have $\int D E d\bP = 0$.)
|
|
|
|
% TODO
|
|
|
|
\end{proof}
|
|
|
|
\begin{refproof}{thm2}
|
|
% TODO
|
|
|
|
\end{refproof}
|
|
|
|
|
|
|
|
\subsubsection{Application: Renewal Theorem}
|
|
|
|
\begin{theorem}[Renewal theorem]
|
|
Let $X_1,X_2,\ldots$ i.i.d.~random variables with $X_i \ge 0$, $\bE[X_i] = m > 0$. The $X_i$ model waiting times.
|
|
Let $S_n \coloneqq \sum_{i=1}^n X_i$.
|
|
For all $t > 0$ let \[
|
|
N_t \coloneqq \sup \{n : S_n \le t\}.
|
|
\]
|
|
Then $\frac{N_t}{t} \xrightarrow{a.s.} \frac{1}{m}$ as $t \to \infty$.
|
|
\end{theorem}
|
|
|
|
The $X_i$ can be thought of as waiting times.
|
|
$S_i$ models how long you have to wait for $i$ events to occur.
|
|
|
|
\begin{proof}
|
|
By SLLN, $\frac{S_n}{n} \xrightarrow{a.s.} m$ as $n \to \infty$.
|
|
Note that $N_t \uparrow \infty$ a.s.~as $t \to \infty (\ast\ast)$, since
|
|
$\{N_t \ge n\} = \{X_1 + \ldots+ X_n \le t\}$ thus $N_t \uparrow \infty$ as $t \uparrow \infty$.
|
|
|
|
\begin{claim}
|
|
$\bP[\frac{S_n}{n} \xrightarrow{n \to \infty} m , N_t \xrightarrow{t \to \infty} \infty] = 1$.
|
|
\end{claim}
|
|
\begin{subproof}
|
|
Let $A \coloneqq \{\omega: \frac{S_n(\omega)}{n} \xrightarrow{n \to \infty} m\}$ and $B \coloneqq \{\omega : N_t(\omega \xrightarrow{t \to \infty} \infty\}$.
|
|
By the SLLN, we have $\bP(A^C) = 0$ and $\ast\ast \implies \bP(B^C) = 0$.
|
|
\end{subproof}
|
|
|
|
Equivalently, $\bP\left[ \frac{S_{N_t}}{N_t} \xrightarrow{t \to \infty} m, \frac{S_{N_t + 1}}{N_t + 1} \xrightarrow{t \to \infty} m \right] = 1$.
|
|
|
|
By definition, we have $S_{N_t} \le t \le S_{N_t + t}$.
|
|
Then $\frac{S_{N_t}}{N_t} \le \frac{t}{N_t} \le S_{N_t + 1}{N_t} \le \frac{S_{N_t + 1}}{N_t + 1} \cdot \frac{N_t + 1}{N_t}$.
|
|
Hence $\frac{t}{N_t} \to m$.
|
|
|
|
\end{proof}
|
|
|
|
|
|
|