179 lines
7.9 KiB
TeX
179 lines
7.9 KiB
TeX
\lecture{7}{}{Kolmogorov's three series theorem}
|
|
\begin{goal}
|
|
We want to drop our assumptions on finite mean or variance
|
|
and say something about the behaviour of $ \sum_{n \ge 1} X_n$
|
|
when the $X_n$ are independent.
|
|
\end{goal}
|
|
\begin{theorem}[Kolmogorov's three-series theorem] % Theorem 3
|
|
\yalabel{Kolmogorov's Three-Series Theorem}{3 Series}{thm:kolmogorovthreeseries}
|
|
\label{thm3}
|
|
Let $X_n$ be a family of independent random variables.
|
|
\begin{enumerate}[(a)]
|
|
\item Suppose for some $C \ge 0$, the following three series
|
|
of numbers converge:
|
|
\begin{itemize}
|
|
\item $\sum_{n \ge 1} \bP(|X_n| > C)$,
|
|
\item $\sum_{n \ge 1} \underbrace{\int_{|X_n| \le C} X_n \dif\bP}_{\text{\vocab{truncated mean}}}$,
|
|
\item $\sum_{n \ge 1} \underbrace{\int_{|X_n| \le C} X_n^2 \dif\bP - \left( \int_{|X_n| \le C} X_n \dif\bP \right)^2}_{\text{\vocab{truncated variance} }}$.
|
|
\end{itemize}
|
|
Then $\sum_{n \ge 1} X_n$ converges almost surely.
|
|
\item Suppose $\sum_{n \ge 1} X_n$ converges almost surely.
|
|
Then all three series above converge for every $C > 0$.
|
|
\end{enumerate}
|
|
\end{theorem}
|
|
For the proof we'll need a slight generalization of \yaref{thm2}:
|
|
\begin{theorem} %[Theorem 4]
|
|
\label{thm4}
|
|
Let $\{X_n\}_n$ be independent and \vocab{uniformly bounded}
|
|
(i.e. $\exists M < \infty : \sup_n \sup_\omega |X_n(\omega)| \le M$).
|
|
Then $\sum_{n \ge 1} X_n$ converges almost surely
|
|
$\iff$ $\sum_{n \ge 1} \bE(X_n)$ and $\sum_{n \ge 1} \Var(X_n)$
|
|
converge.
|
|
\end{theorem}
|
|
\begin{refproof}{thm3}
|
|
Assume, that we have already proved \yaref{thm4}.
|
|
We prove part (a) first.
|
|
Put $Y_n = X_n \cdot \One_{\{|X_n| \le C\}}$.
|
|
Since the $X_n$ are independent, the $Y_n$ are independent as well.
|
|
Furthermore, the $Y_n$ are uniformly bounded.
|
|
By our assumption, the series
|
|
$\sum_{n \ge 1} \int_{|X_n| \le C} X_n \dif\bP = \sum_{n \ge 1} \bE[Y_n]$
|
|
and $\sum_{n \ge 1} \int_{|X_n| \le C} X_n^2 \dif\bP - \left( \int_{|X_n| \le C} X_n \dif\bP \right)^2 = \sum_{n \ge 1} \Var(Y_n)$
|
|
converges.
|
|
By \yaref{thm4} it follows that $\sum_{n \ge 1} Y_n < \infty$
|
|
almost surely.
|
|
Let $A_n \coloneqq \{\omega : |X_n(\omega)| > C\}$.
|
|
Since $\sum_{n \ge 1} \bP(A_n) < \infty$ by assumption,
|
|
\yaref{thm:borelcantelli} yields $\bP[\text{infinitely many $A_n$ occur}] = 0$.
|
|
|
|
|
|
For the proof of (b), suppose $\sum_{n\ge 1} X_n(\omega) < \infty$
|
|
for almost every $\omega$.
|
|
Fix an arbitrary $C > 0$.
|
|
Define
|
|
\[
|
|
Y_n(\omega) \coloneqq \begin{cases}
|
|
X_n(\omega) & \text{if} |X_n(\omega)| \le C,\\
|
|
C &\text{if } |X_n(\omega)| > C.
|
|
\end{cases}
|
|
\]
|
|
Then the $Y_n$ are independent and $\sum_{n \ge 1} Y_n(\omega) < \infty$
|
|
almost surely and the $Y_n$ are uniformly bounded.
|
|
By \yaref{thm4} $\sum_{n \ge 1} \bE[Y_n]$ and $\sum_{n \ge 1} \Var(Y_n)$
|
|
converge.
|
|
Define
|
|
\[
|
|
Z_n(\omega) \coloneqq \begin{cases}
|
|
X_n(\omega) &\text{if } |X_n| \le C,\\
|
|
-C &\text{if } |X_n| > C.
|
|
\end{cases}
|
|
\]
|
|
Then the $Z_n$ are independent, uniformly bounded and $\sum_{n \ge 1} Z_n(\omega) < \infty$
|
|
almost surely.
|
|
By \yaref{thm4} we have
|
|
$\sum_{n \ge 1} \bE(Z_n) < \infty$
|
|
and $\sum_{n \ge 1} \Var(Z_n) < \infty$.
|
|
|
|
We have
|
|
\begin{IEEEeqnarray*}{rCl}
|
|
\bE(Y_n) &=& \int_{|X_n| \le C} X_n \dif\bP + C \bP(|X_n| \ge C),\\
|
|
\bE(Z_n) &=& \int_{|X_n| \le C} X_n \dif\bP - C \bP(|X_n| \ge C).
|
|
\end{IEEEeqnarray*}
|
|
Since $\bE(Y_n) + \bE(Z_n) = 2 \int_{|X_n| \le C} X_n \dif\bP$
|
|
the second series converges,
|
|
and since
|
|
$\bE(Y_n) - \bE(Z_n)$ converges, the first series converges.
|
|
For the third series, we look at
|
|
$\sum_{n \ge 1} \Var(Y_n)$ and
|
|
$\sum_{n \ge 1} \Var(Z_n)$ to conclude that this series converges
|
|
as well.
|
|
\end{refproof}
|
|
Recall \yaref{thm2}.
|
|
We will see, that the converse of \yaref{thm2} is true if the $X_n$ are uniformly bounded.
|
|
More formally:
|
|
\begin{theorem}[Theorem 5]
|
|
\label{thm5}
|
|
Let $X_n$ be a series of independent variables with mean $0$,
|
|
that are uniformly bounded.
|
|
If $\sum_{n \ge 1} X_n < \infty$ almost surely,
|
|
then $\sum_{n \ge 1} \Var(X_n) < \infty$.
|
|
\end{theorem}
|
|
\begin{refproof}{thm4}
|
|
Assume we have proven \yaref{thm5}.
|
|
|
|
``$\impliedby$'' Assume $\{X_n\} $ are independent, uniformly bounded
|
|
and $\sum_{n \ge 1} \bE(X_n) < \infty$ as well as $\sum_{n \ge 1} \Var(X_n) < \infty$.
|
|
We need to show that $\sum_{n \ge 1} X_n < \infty$ a.s.
|
|
Let $Y_n \coloneqq X_n - \bE(X_n)$.
|
|
Then the $Y_n$ are independent, $\bE(Y_n) = 0$ and $\Var(Y_n) = \Var(X_n)$.
|
|
By \yaref{thm2} $\sum_{n \ge 1} Y_n < \infty$ a.s.
|
|
Thus $\sum_{n \ge 1} X_n < \infty$ a.s.
|
|
|
|
``$\implies$'' We assume that $\{X_n\}$ are independent, uniformly bounded
|
|
and $\sum_{n \ge 1} X_n(\omega) < \infty$ a.s.
|
|
We have to show that $\sum_{n \ge 1} \bE(X_n) < \infty$
|
|
and $\sum_{n \ge 1} \Var(X_n) < \infty$.
|
|
|
|
Consider the product space $(\Omega, \cF, \bP) \otimes (\Omega, \cF, \bP)$.
|
|
On this product space, we define
|
|
$Y_n \left( (\omega, \omega') \right) \coloneqq X_n(\omega)$
|
|
and $Z_n \left( (\omega, \omega') \right) \coloneqq X_n(\omega')$.
|
|
\begin{claim}
|
|
For every fixed $n$, $Y_n$ and $Z_n$ are independent.
|
|
\end{claim}
|
|
\begin{subproof}
|
|
This is obvious, but we will prove it carefully here.
|
|
\begin{IEEEeqnarray*}{rCl}
|
|
&&(\bP \otimes \bP) [Y_n \in (a,b) , Z_n \in (a',b') ]\\
|
|
&=& (\bP\otimes\bP) \left( (\omega, \omega') : X_n(\omega) \in (a,b) \land X_n(\omega') \in (a',b') \right)\\
|
|
&=& (\bP\otimes\bP)(A \times A') \text{where }
|
|
A \coloneqq X_n^{-1}\left( (a,b)\right) \text{ and } A' \coloneqq X_n^{-1}\left( (a',b') \right)\\
|
|
&=& \bP(A)\bP(A')
|
|
\end{IEEEeqnarray*}
|
|
\end{subproof}
|
|
Now $\bE[Y_n - Z_n] = 0$ (by definition) and $\Var(Y_n - Z_n) = 2\Var(X_n)$.
|
|
Obviously, $(Y_n - Z_n)_{n \ge 1}$ is also uniformly bounded.
|
|
\begin{claim}
|
|
$\sum_{n \ge 1} (Y_n - Z_n) < \infty$ almost surely
|
|
on $(\Omega \otimes \Omega, \cF \otimes\cF, \bP \otimes\bP)$.
|
|
\end{claim}
|
|
\begin{subproof}
|
|
Suppose $\Omega_0 = \{\omega: \sum_{n \ge 1} X_n(\omega) < \infty\}$.
|
|
Then $\bP(\Omega_0) = 1$.
|
|
Thus $(\bP\otimes\bP)(\Omega_0 \otimes \Omega_0) = 1$.
|
|
Furthermore
|
|
$\sum_{n \ge 1} \left(Y_n(\omega, \omega') - Z_n(\omega, \omega') \right)= \sum_{n \ge 1} \left(X_n(\omega) - X_n(\omega')\right)$.
|
|
Thus $\sum_{n \ge 1} \left( Y_n(\omega, \omega') - Z_n(\omega, \omega') \right) < \infty$ a.s.~on $\Omega_0\otimes\Omega_0$.
|
|
\end{subproof}
|
|
By \yaref{thm5}, $\sum_{n} \Var(X_n) = \frac{1}{2}\sum_{n \ge 1} \Var(Y_n - Z_n) < \infty$ a.s.
|
|
Define $U_n \coloneqq X_n - \bE(X_n)$.
|
|
Then $\bE(U_n) = 0$ and the $U_n$ are independent
|
|
and uniformly bounded.
|
|
We have $\sum_{n} \Var(U_n) = \sum_{n} \Var(X_n) < \infty$.
|
|
Thus $\sum_{n} U_n$ converges a.s.~by \yaref{thm2}.
|
|
Since by assumption $\sum_{n} X_n < \infty$ a.s.,
|
|
it follows that $\sum_{n} \bE(X_n) < \infty$.
|
|
\end{refproof}
|
|
\begin{remark}
|
|
In the proof of \yaref{thm4}
|
|
``$\impliedby$'' is just a trivial application of \yaref{thm2}
|
|
and uniform boundedness was not used.
|
|
The idea of `` $\implies$ '' will lead to coupling. % TODO ?
|
|
\end{remark}
|
|
A proof of \yaref{thm5} can be found in the notes.\notes
|
|
\begin{example}[Application of \yaref{thm4}]
|
|
The series $\sum_{n} \frac{1}{n^{\frac{1}{2} + \epsilon}}$
|
|
does not converge for $\epsilon < \frac{1}{2}$.
|
|
However
|
|
\[
|
|
\sum_{n} X_n \frac{1}{n^{\frac{1}{2} + \epsilon}}
|
|
\]
|
|
where $\bP[X_n = 1] = \bP[X_n = -1] = \frac{1}{2}$
|
|
converges almost surely for all $\epsilon > 0$.
|
|
And
|
|
\[
|
|
\sum_{n} X_n \frac{1}{n^{\frac{1}{2} - \epsilon}}
|
|
\]
|
|
does not converge.
|
|
|
|
\end{example}
|