2023-07-07 17:42:38 +02:00
|
|
|
\lecture{5}{2023-04-21}{Laws of large numbers}
|
2023-07-05 17:53:41 +02:00
|
|
|
\subsection{The Laws of Large Numbers}
|
2023-05-10 18:56:36 +02:00
|
|
|
|
|
|
|
|
2023-04-27 17:07:50 +02:00
|
|
|
We want to show laws of large numbers:
|
|
|
|
The LHS is random and represents ``sane'' averaging.
|
|
|
|
The RHS is constant, which we can explicitly compute from the distribution of the RHS.
|
|
|
|
|
|
|
|
We fix a probability space $(\Omega, \cF, \bP)$ once and for all.
|
|
|
|
|
|
|
|
\begin{theorem}
|
|
|
|
\label{lln}
|
|
|
|
Let $X_1, X_2,\ldots$ be i.i.d.~random variables on $(\R, \cB(\R))$
|
|
|
|
and $m = \bE[X_i] < \infty$
|
|
|
|
and $\sigma^{2} = \Var(X_i) = \bE[ (X_i - \bE(X_i))^2] = \bE[X_i^2] - \bE[X_i]^2 < \infty$.
|
|
|
|
|
|
|
|
Then
|
|
|
|
\begin{enumerate}[(a)]
|
|
|
|
\item $\frac{X_1 + \ldots + X_n}{n} \xrightarrow{n \to \infty} m$
|
|
|
|
in probability (\vocab{weak law of large numbers}, WLLN),
|
|
|
|
\item $\frac{X_1 + \ldots + X_n}{n} \xrightarrow{n \to \infty} m$
|
|
|
|
almost surely (\vocab{strong law of large numbers}, SLLN).
|
|
|
|
\end{enumerate}
|
|
|
|
\end{theorem}
|
|
|
|
\begin{refproof}{lln}
|
|
|
|
\begin{enumerate}[(a)]
|
|
|
|
\item Given $\epsilon > 0$, we need to show that
|
|
|
|
\[
|
2023-07-11 23:37:14 +02:00
|
|
|
\bP\left[
|
|
|
|
\left| \frac{X_1 + \ldots + X_n}{n} - m\right| > \epsilon
|
|
|
|
\right] \xrightarrow{n \to 0} 0.
|
|
|
|
\]
|
2023-04-27 17:07:50 +02:00
|
|
|
|
|
|
|
Let $S_n \coloneqq X_1 + \ldots + X_n$.
|
|
|
|
Then $\bE[S_n] = \bE[X_1] + \ldots + \bE[X_n] = nm$.
|
|
|
|
We have
|
|
|
|
\begin{IEEEeqnarray*}{rCl}
|
2023-07-11 23:37:14 +02:00
|
|
|
\bP\left[ \left| \frac{X_1 + \ldots + X_n}{n} - m\right| > \epsilon\right]
|
|
|
|
&=& \bP\left[\left|\frac{S_n}{n}-m\right| > \epsilon\right]\\
|
|
|
|
&\overset{\text{Chebyshev}}{\le }&
|
|
|
|
\frac{\Var\left( \frac{S_n}{n} \right) }{\epsilon^2}
|
|
|
|
= \frac{1}{n} \frac{\Var(X_1)}{\epsilon^2}
|
|
|
|
\xrightarrow{n \to \infty} 0
|
2023-04-27 17:07:50 +02:00
|
|
|
\end{IEEEeqnarray*}
|
|
|
|
since
|
2023-07-11 23:37:14 +02:00
|
|
|
\[\Var\left(\frac{S_n}{n}\right)
|
|
|
|
= \frac{1}{n^2} \Var\left(S_n\right)
|
|
|
|
= \frac{1}{n^2} n \Var(X_i).\]
|
2023-04-27 17:07:50 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{refproof}
|
|
|
|
For the proof of (b) we need the following general result:
|
|
|
|
\begin{theorem}
|
|
|
|
\label{thm2}
|
|
|
|
Let $X_1, X_2, \ldots$ be independent (but not necessarily identically distributed) random variables with $\bE[X_i] = 0$ for all $i$
|
2023-07-07 17:42:38 +02:00
|
|
|
and
|
|
|
|
\[\sum_{i=1}^n \Var(X_i) < \infty.\]
|
2023-04-27 17:07:50 +02:00
|
|
|
Then $\sum_{n \ge 1} X_n$ converges almost surely.
|
|
|
|
\end{theorem}
|
2023-07-07 17:42:38 +02:00
|
|
|
We'll prove this later\todo{Move proof}
|
2023-04-27 17:07:50 +02:00
|
|
|
|
|
|
|
\begin{question}
|
|
|
|
Does the converse hold? I.e.~does $\sum_{n \ge 1} X_n < \infty$ a.s.~
|
|
|
|
then $\sum_{n \ge 1} \Var(X_n) < \infty$.
|
|
|
|
\end{question}
|
2023-07-11 23:37:14 +02:00
|
|
|
This does not hold.
|
|
|
|
Consider the following:
|
|
|
|
\begin{example}
|
|
|
|
Let $X_1,X_2,\ldots$ be independent random variables,
|
|
|
|
where $X_n$ has distribution
|
|
|
|
$\frac{1}{n^2} \delta_n + \frac{1}{n^2} \delta_{-n} + (1-\frac{2}{n^2}) \delta_0$.
|
|
|
|
We have $\bP[X_n \neq 0] = \frac{2}{n^2}$.
|
|
|
|
Since this is summable, Borel-Cantelli yields
|
|
|
|
\[
|
|
|
|
\bP[X_{n} \neq 0 \text{ for infinitely many $n$}] = 0.
|
|
|
|
\]
|
|
|
|
In particular, $X_n$ is summable almost surely.
|
|
|
|
However $\Var(X_n) = 2$ is not summable.
|
|
|
|
\end{example}
|