2023-05-23 17:10:43 +02:00
% Lecture 13 2023-05
%The difficult part is to show \autoref{levycontinuity}.
%This is the last lecture, where we will deal with independent random variables.
We have seen, that
if $ X _ 1 , X _ 2 , \ldots $ are i.i.d.~with $ \mu = \bE [ X _ 1 ] $ ,
$ \sigma ^ 2 = \Var ( X _ 1 ) $ ,
then $ \frac { \sum _ { i = 1 } ^ { n } ( X _ i - \mu ) } { \sigma \sqrt { n } } \xrightarrow { ( d ) } \cN ( 0 , 1 ) $ .
\begin { question}
What happens if $ X _ 1 , X _ 2 , \ldots $ are independent, but not identically distributed? Do we still have a CLT?
\end { question}
\begin { theorem} [Lindeberg CLT]
\label { lindebergclt}
Assume $ X _ 1 , X _ 2 , \ldots , $ are independent (but not necessarily identically distributed) with $ \mu _ i = \bE [ X _ i ] < \infty $ and $ \sigma _ i ^ 2 = \Var ( X _ i ) < \infty $ .
Let $ S _ n = \sqrt { \sum _ { i = 1 } ^ { n } \sigma _ i ^ 2 } $
2023-05-25 00:33:14 +02:00
and assume that
\[ \lim _ { n \to \infty } \frac { 1 } { S _ n ^ 2 } \bE \left [ ( X _ i - \mu _ i ) ^ 2 \One _ { |X _ i - \mu _ i| > \epsilon \S _ n } \right ] = 0 \]
for all $ \epsilon > 0 $
(\vocab { Lindeberg condition} \footnote { ``The truncated variance is negligible compared to the variance.''} ).
2023-05-23 17:10:43 +02:00
Then the CLT holds, i.e.~
\[
\frac { \sum _ { i=1} ^ n (X_ i - \mu _ i)} { S_ n} \xrightarrow { (d)} \cN (0,1).
\]
\end { theorem}
\begin { theorem} [Lyapunov condition]
\label { lyapunovclt}
Let $ X _ 1 , X _ 2 , \ldots $ be independent, $ \mu _ i = \bE [ X _ i ] < \infty $ ,
$ \sigma _ i ^ 2 = \Var ( X _ i ) < \infty $
and $ S _ n \coloneqq \sqrt { \sum _ { i = 1 } ^ n \sigma _ i ^ 2 } $ .
Then, assume that, for some $ \delta > 0 $ ,
\[
\lim _ { n \to \infty } \sum _ { i=1} ^ { n} \bE [(X_i - \mu_i)^{2 + \delta}] = 0
\]
(\vocab { Lyapunov condition} ).
Then the CLT holds.
\end { theorem}
\begin { remark}
The Lyapunov condition implies the Lindeberg condition.
(Exercise).
\end { remark}
2023-05-23 18:02:54 +02:00
We will not prove the \autoref { lindebergclt} or \autoref { lyapunovclt}
2023-05-23 17:10:43 +02:00
in this lecture. However, they are quite important.
We will now sketch the proof of \autoref { levycontinuity} ,
details can be found in the notes.\todo { Complete this}
A generalized version of \autoref { levycontinuity} is the following:
\begin { theorem} [A generalized version of Levy's continuity \autoref { levycontinuity} ]
\label { genlevycontinuity}
Suppose we have random variables $ ( X _ n ) _ n $ such that
$ \bE [ e ^ { \i t X _ n } ] \xrightarrow { n \to \infty } \phi ( t ) $ for all $ t \in \R $
for some function $ \phi $ on $ \R $ .
Then the following are equivalent:
\begin { enumerate} [(a)]
\item The distribution of $ X _ n $ is \vocab [Distribution!tight] { tight} (dt. ``straff''),
i.e.~$ \lim _ { a \to \infty } \sup _ { n \in \N } \bP [ |X _ n| > a ] = 0 $ .
\item $ X _ n \xrightarrow { ( d ) } X $ for some real-valued random variable $ X $ .
\item $ \phi $ is the characteristic function of $ X $ .
\item $ \phi $ is continuous on all of $ \R $ .
\item $ \phi $ is continuous at $ 0 $ .
\end { enumerate}
\end { theorem}
\begin { example}
Let $ Z \sim \cN ( 0 , 1 ) $ and $ X _ n \coloneqq n Z $ .
We have $ \phi _ { X _ n } ( t ) = \bE [ [ e ^ { \i t X _ n } ] = e ^ { - \frac { 1 } { 2 } t ^ 2 n ^ 2 } \xrightarrow { n \to \infty } \One _ { \{ t = 0 \} } $ .
$ \One _ { \{ t = 0 \} } $ is not continuous at $ 0 $ .
By \autoref { genlevycontinuity} , $ X _ n $ can not converge to a real-valued
random variable.
Exercise: $ X _ n \xrightarrow { ( d ) } \overline { X } $ ,
where $ \bP [ \overline { X } = \infty ] = \frac { 1 } { 2 } = \bP [ \overline { X } = - \infty ] $ .
Similar examples are $ \mu _ n \coloneqq \delta _ n $ and
$ \mu _ n \coloneqq \frac { 1 } { 2 } \delta _ n + \frac { 1 } { 2 } \delta _ { - n } $ .
\end { example}
\begin { example}
Suppose that $ X _ 1 , X _ 2 , \ldots $ are i.d.d.~with $ \bE [ X _ 1 ] = 0 $ .
Let $ \sigma ^ 2 \coloneqq \Var ( X _ i ) $ .
Then the distribution of $ \frac { S _ n } { \sigma \sqrt { n } } $ is tight:
\begin { IEEEeqnarray*} { rCl}
\bE \left [ \left( \frac{S_n}{\sqrt{n} }^2 \right)^2 \right] & =&
\frac { 1} { n} \bE [ (X_1+ \ldots + X_n)^2] \\
& =& \sigma ^ 2
\end { IEEEeqnarray*}
For $ a > 0 $ , by Chebyshev's inequality, % TODO
we have
\[
\bP \left [ \left| \frac{S_n}{\sqrt{n}} \right| > a \right] \leq \frac { \sigma ^ 2} { a^ 2} \xrightarrow { a \to \infty } 0.
\]
verifying \autoref { genlevycontinuity} .
\end { example}
\begin { example}
2023-05-25 00:33:14 +02:00
Suppose $ C $ is a random variable which is \vocab [Cauchy distribution] { Cauchy distributed} , i.e.~$ C $
2023-05-23 17:10:43 +02:00
has probability distribution $ f _ C ( x ) = \frac { 1 } { \pi } \frac { 1 } { 1 + x ^ 2 } $ .
2023-05-25 00:33:14 +02:00
\begin { figure} [H]
\centering
\begin { tikzpicture}
\begin { axis} [samples=100, smooth]
\addplot [] { (1/3.14159265358979323846) * (1 / ( 1 + x * x))} ;
\end { axis}
\end { tikzpicture}
\caption { Probability density function of $ C $ }
\end { figure}
2023-05-23 17:10:43 +02:00
We know that $ \bE [ |C| ] = \infty $ .
We have $ \phi _ C ( t ) = \bE [ e ^ { \i t C } ] = e ^ { - |t| } $ .
Suppose $ C _ 1 , C _ 2 , \ldots , C _ n $ are i.i.d.~Cauchy distributed
and let $ S _ n \coloneqq C _ 1 + \ldots + C _ n $ .
2023-05-25 00:33:14 +02:00
Exercise: $ \phi _ { \frac { S _ n } { n } } ( t ) = e ^ { - |t| } = \phi _ { C _ 1 } ( t ) $ , thus $ \frac { S _ n } { n } \sim C $ .
2023-05-23 17:10:43 +02:00
\end { example}
2023-05-23 18:02:54 +02:00
We will prove \autoref { levycontinuity} assuming
\autoref { lec10_ thm1} .
\autoref { lec10_ thm1} will be shown in the notes.\todo { TODO}
We will need the following:
\begin { lemma}
\label { lec13_ lem1}
Given a sequence $ ( F _ n ) _ n $ of probability distribution functions,
there is a subsequence $ ( F _ { n _ k } ) _ k $ of $ F _ n $
and a right continuous, non-decreasing function $ F $ ,
such that $ F _ { n _ k } \to F $ at all continuity points of $ F $ .
(We do not yet claim, that $ F $ is a probability distribution function,
as we ignore $ \lim _ { x \to \infty } F ( x ) $ and $ \lim _ { x \to - \infty } F ( x ) $ for now).
\end { lemma}
\begin { lemma}
\label { s7e1}
Let $ \mu \in M _ 1 ( \R ) $ , $ A > 0 $ and $ \phi $ the characteristic function of $ \mu $ .
Then $ \mu \left ( ( - A,A ) \right ) \ge \frac { A } { 2 } \left | \int _ { - \frac { 2 } { A } } ^ { \frac { 2 } { A } } \phi ( t ) d t \right | - 1 $ .
\end { lemma}
\begin { refproof} { s7e1}
Exercise.\todo { TODO}
\end { refproof}
\begin { refproof} { levycontinuity}
``$ \implies $ '' If $ \mu _ n \implies \mu $ , then
$ \int f d \mu _ n \to \int f d \mu $
for all $ f \in C _ b $ and $ x \to e ^ { \i t x } $ is continuous and bounded.
``$ \impliedby $ ''
% Step 1:
\begin { claim}
\label { levyproofc1}
Given $ \epsilon > 0 $ there exists $ A > 0 $ such that
$ \liminf _ n \mu _ n \left ( ( - A,A ) \right ) \ge 1 - 2 \epsilon $ .
\end { claim}
\begin { refproof} { levyproofc1}
If $ f $ is continuous, then
\[
\frac { 1} { \eta } \int _ { x - \eta } ^ { x + \eta } f(t) d t \xrightarrow { \eta \downarrow 0} f(x).
\]
Applying this to $ \phi $ at $ t = 0 $ , one obtains:
\begin { equation}
\left | \frac { A} { 4} \int _ { -\frac { 2} { A} } ^ { \frac { 2} { A} } \phi (t) dt - 1 \right | < \frac { \epsilon } { 2}
\label { levyproofc1eqn1}
\end { equation}
\begin { claim}
For $ n $ large enough, we have
\begin { equation}
\left | \frac { A} { 4} \int _ { -\frac { 2} { A} } ^ { \frac { 2} { A} } \phi _ n(t) d t - 1\right | < \epsilon .
\label { levyproofc1eqn2}
\end { equation}
\end { claim}
\begin { subproof}
Apply dominated convergence.
\end { subproof}
So to prove $ \mu _ n \left ( ( - A,A ) \right ) \ge 1 - 2 \epsilon $ ,
apply \autoref { s7e1} .
It suffices to show that
\[
\frac { A} { 2} \left | \int _ { -\frac { 2} { A} } ^ { \frac { 2} { A} } \phi _ n(t) dt\right | - 1 \ge 1 - 2\epsilon
\]
or
\[
1 - \frac { A} { 4} \left |\int _ { -\frac { 2} { A} } ^ { \frac { 2} { A} } \phi _ n(t) dt \right | \le \epsilon ,
\]
which follows from \autoref { levyproofc1eqn2} .
\end { refproof}
% Step 2
By \autoref { lec13_ lem1}
there exists a right continuous, non-decreasing $ F $
and a subsequence $ ( F _ { n _ k } ) _ k $ of $ ( F _ n ) _ n $ where $ F _ n $ is
the probability distribution function of $ \mu _ n $ ,
such that $ F _ { n _ k } ( x ) \to F ( x ) $ for all $ x $ where $ F $ is continuous.
\begin { claim}
\[
\lim _ { n \to -\infty } F(x) = 0
\]
and
\[
\lim _ { n \to \infty } F(x) = 1,
\]
i.e.~$ F $ is a probability distribution function.\footnote { This does not hold in general!}
\end { claim}
\begin { subproof}
We have
\[
\mu _ { n_ k} \left ( (- \infty , x] \right ) = F_ { n_ k} (x) \to F(x).
\]
Again, given $ \epsilon > 0 $ , there exists $ A > 0 $ , such that
$ \mu _ { n _ k } \left ( ( - A,A ) \right ) > 1 - 2 \epsilon $ (\autoref { levyproofc1} ).
Hence $ F ( x ) \ge 1 - 2 \epsilon $ for $ x > A $
and $ F ( x ) \le 2 \epsilon $ for $ x < - A $ .
This proves the claim.
\end { subproof}
Since $ F $ is a probability distribution function, there exists
a probability measure $ \nu $ on $ \R $ such that $ F $ is the distribution
function of $ \nu $ .
Since $ F _ { n _ k } ( x ) \to F _ n ( x ) $ at all continuity points $ x $ of $ F $ .
By \autoref { lec10_ thm1} we obtain that
$ \mu _ { n _ k } \overset { k \to \infty } { \implies } \nu $ .
Hence
$ \phi _ { \mu _ { n _ k } } ( t ) \to \phi _ \nu ( t ) $ , by the other direction of that theorem.
But by assumption,
$ \phi _ { \mu _ { n _ k } } ( \cdot ) \to \phi _ n ( \cdot ) $ so $ \phi _ { \mu } ( \cdot ) = \phi _ { \nu } ( \cdot ) $ .
By \autoref { charfuncuniqueness} , we get $ \mu = \nu $ .
We have shown, that $ \mu _ { n _ k } \implies \mu $ along a subsequence.
We still need to show that $ \mu _ n \implies \mu $ .
\begin { fact}
Suppose $ a _ n $ is a bounded sequence in $ \R $ ,
such that any subsequence converges to $ a \in \R $ .
Then $ a _ n \to a $ .
\end { fact}
\begin { subproof}
\todo { in the notes}
\end { subproof}
2023-05-25 00:33:14 +02:00
Assume that $ \mu _ n $ does not converge to $ \mu $ .
2023-05-23 18:02:54 +02:00
By \autoref { lec10_ thm1} , pick a continuity point $ x _ 0 $ of $ F $ ,
such that $ F _ n ( x _ 0 ) \not \to F ( x _ 0 ) $ .
Pick $ \delta > 0 $ and a subsequence $ F _ { n _ 1 } ( x _ 0 ) , F _ { n _ 2 } ( x _ 0 ) , \ldots $
which are all outside $ ( F ( x _ 0 ) - \delta , F ( x _ 0 ) + \delta ) $ .
Then $ \phi _ { n _ 1 } , \phi _ { n _ 2 } , \ldots \to \phi $ .
Now, there exists a further subsequence $ G _ 1 , G _ 2 , \ldots $ of $ F _ { n _ i } $ ,
which converges.
$ G _ 1 , G _ 2 , \ldots $ is a subsequence of $ F _ 1 , F _ 2 , \ldots $ .
However $ G _ 1 , G _ 2 , \ldots $ is not converging to $ F $ ,
as this would fail at $ x _ 0 $ . This is a contradiction.
\end { refproof}
% IID is over now
\subsection { Summary}
What did we learn:
\begin { itemize}
\item How to construct product measures
\item WLLN and SLLN
\item Kolmogorov's three series theorem
\item Fourier transform, weak convergence and CLT
\end { itemize}
2023-05-23 17:10:43 +02:00