s23-probability-theory/inputs/lecture_18.tex

227 lines
7.5 KiB
TeX
Raw Normal View History

2023-06-20 17:56:38 +02:00
\lecture{18}{2023-06-20}{}
2023-06-29 22:18:23 +02:00
Recall our key lemma \ref{lec17l3} for supermartingales from last time:
2023-06-20 17:56:38 +02:00
\[
(b-a) \bE[U_N([a,b])] \le \bE[(X_n - a)^-].
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
What happens for submartingales?
If $(X_n)_{n \in \N}$ is a submartingale, then $(-X_n)_{n \in \N}$ is a supermartingale.
Hence the same holds for submartingales, i.e.
\begin{lemma}
A (sub-/super-) martingale bounded in $L^1$ converges
a.s.~to a finite limit, which is a.s.~finite.
\end{lemma}
2023-07-05 17:53:41 +02:00
\subsection{Doob's $L^p$ Inequality}
2023-06-29 22:18:23 +02:00
2023-06-20 17:56:38 +02:00
\begin{question}
What about $L^p$ convergence of martingales?
\end{question}
2023-06-29 22:18:23 +02:00
\begin{example}[\vocab{Branching process}]
2023-06-20 17:56:38 +02:00
Let $ (Z_n)_{n \ge 1}$ be i.i.d.~$\pm 1$ with
$\bP[Z_n = 1] = p \in (0,1)$.
Fix $u > 0$. Let $X_0 = x > 0$.
Define $X_{n+1} \coloneqq u^{Z_{n+1}} X_{n}$.
2023-06-29 22:18:23 +02:00
\begin{exercise}
Given $u \ge 0$, find $p = p(u)$
such that $(X_n)_n$ is a martingale w.r.t.~the canonical filtration.
\end{exercise}
\todo{TODO}
2023-06-20 17:56:38 +02:00
By \autoref{doobmartingaleconvergence}, there is an
a.s.~limit $X_\infty$.
By the SLLN, we have
\[
\frac{1}{n} \sum_{k=1}^{n} Z_k \xrightarrow{a.s.} \bE[Z_1] = zp - 1.
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
Hence
\[
\left(\frac{X_n}{x}\right)^{\frac{1}{n}} = u^{\frac{1}{n} \sum_{k=1}^n Z_k}
\xrightarrow{a.s.} u^{zp -1}.
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
Since $(X_n)_{n \ge 0}$ is a martingale, we must have $\bE[u^{Z_1}] = 1$.
Hence $2p - 1 < 0$, because $u > 1$.
Hence, if $\epsilon > 0$ is small, there exists
$N_0(\epsilon)$ (possibly random)
such that for all $n > N_0(\epsilon)$
\[
2023-06-29 22:18:23 +02:00
\left( \frac{X_n}{x} \right)^{\frac{1}{n}} \le u^{2p - 1}(1 + \epsilon) %
\implies x [\underbrace{u^{2p - 1} (1+\epsilon)}_{<1}]^n \xrightarrow{n \to \infty} 0.
2023-06-22 17:50:06 +02:00
\]
2023-06-29 22:18:23 +02:00
Thus it can not converge in $L^1$.
% TODO Make this less confusing
2023-06-20 17:56:38 +02:00
\end{example}
$L^2$ is nice, since it is a Hilbert space. So we will first
consider $L^2$.
\begin{fact}[Martingale increments are orthogonal in $L^2$ ]
\label{martingaleincrementsorthogonal}
Let $(X_n)_n$ be a martingale
2023-07-06 00:36:26 +02:00
and let $Y_n \coloneqq X_n - X_{n-1}$
2023-06-20 17:56:38 +02:00
denote the \vocab{martingale increments}.
Then for all $m \neq n$ we have that
\[
\langle Y_m | Y_n\rangle_{L^2} = \bE[Y_n Y_m] = 0.
\]
\end{fact}
\begin{proof}
Since $\bE[X_n | \cF_{n-1}] = X_{n-1}$ a.s.,
by induction $\bE[X_n | \cF_{k}] = X_k$ a.s.~for all $k \le n$.
Play with conditional expectation.
2023-06-29 22:18:23 +02:00
\todo{Exercise}
2023-06-20 17:56:38 +02:00
\end{proof}
2023-06-29 22:18:23 +02:00
\begin{fact}[\vocab{Parallelogram identity}]
Let $X, Y \in L^2$.
Then
\[
2 \bE[X^2] + 2 \bE[Y^2] = \bE[(X+Y)^2] + \bE[(X-Y)^2].
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
\end{fact}
2023-06-29 22:18:23 +02:00
\begin{theorem}\label{martingaleconvergencel2}
2023-06-20 17:56:38 +02:00
Suppose that $(X_n)_n$ is a martingale bounded in
2023-06-29 22:18:23 +02:00
$L^2$,\\
i.e.~$\sup_n \bE[X_n^2] < \infty$.
2023-06-20 17:56:38 +02:00
Then there is a random variable $X_\infty$ such that
\[
X_n \xrightarrow{L^2} X_\infty.
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
\end{theorem}
\begin{proof}
Let $Y_n \coloneqq X_n - X_{n-1}$ and write
\[
X_n = \sum_{j=1}^{n} Y_j.
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
We have
\[
\bE[X_n^2] = \bE[X_0^2] + \sum_{j=1}^{n} \bE[Y_j^2]
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
by \autoref{martingaleincrementsorthogonal}
2023-06-29 22:18:23 +02:00
% (this is known as the \vocab{parallelogram identity}). % TODO how exactly is this used here?
2023-06-20 17:56:38 +02:00
In particular,
\[
\sup_n \bE[X_n^2] < \infty \iff \sum_{j=1}^{\infty} \bE[Y_j^2] < \infty.
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
Since $(X_n)_n$ is bounded in $L^2$,
there exists $X_\infty$ such that $X_n \xrightarrow{\text{a.s.}} X_\infty$
by \autoref{doob}.
It remains to show $X_n \xrightarrow{L^2} X_\infty$.
For any $r \in \N$, consider
\[\bE[(X_{n+r} - X_n)^2] = \sum_{j=n+1}^{n+r} \bE[Y_j^2] \xrightarrow{n \to \infty} 0\]
as a tail of a convergent series.
Hence $(X_n)_n$ is Cauchy, thus it converges in $L^2$.
Since $\bE[(X_\infty - X_n)^2]$ converges to the increasing
limit
\[
\sum_{j \ge n + 1} \xrightarrow{n\to \infty} 0
2023-07-06 00:36:26 +02:00
\]
2023-06-20 17:56:38 +02:00
we get $\bE[(X_\infty - X_n)^2] \xrightarrow{n\to \infty} 0$.
\end{proof}
Now let $p \ge 1$ be not necessarily $2$.
First, we need a very important inequality:
\begin{theorem}[Doob's $L^p$ inequality]
\label{dooblp}
Suppose that $(X_n)_n$ is a sub-martingale.
Let $X_n^\ast \coloneqq \max \{|X_1|, |X_2|, \ldots, |X_n|\}$
denote the \vocab{running maximum}.
\begin{enumerate}[(1)]
2023-06-22 17:50:06 +02:00
\item Then \[ \forall \ell > 0 .~\bP[X_n^\ast \ge \ell] \le \frac{1}{\ell} \int_{\{X_n^\ast \ell\}} |X_n| \dif \bP \le \frac{1}{\ell} \bE[|X_n|]. \]
2023-06-20 17:56:38 +02:00
(Doob's $L^1$ inequality).
\item Fix $p > 1$. Then \[
\bE[(X_n^\ast)^p] \le \left( \frac{p}{p-1} \right)^p \bE[|X_n|^p].
2023-06-22 17:50:06 +02:00
\]
2023-06-20 17:56:38 +02:00
(Doob's $L^p$ inequality).
\end{enumerate}
\end{theorem}
2023-06-29 22:18:23 +02:00
In order to prove \autoref{dooblp}, we first need
2023-06-20 17:56:38 +02:00
\begin{lemma}
\label{dooplplemma}
Let $p > 1$ and $X,Y$ non-negative random variable
such that
\[
\forall \ell > 0 .~ \bP[Y \ge \ell] \le \frac{1}{\ell} \int_{\{Y \ge \ell\} } x \dif \bP
2023-06-22 17:50:06 +02:00
\]
2023-06-20 17:56:38 +02:00
Then
\[
\bE[Y^p] \le \left( \frac{p}{p-1} \right)^p \bE[X^p].
2023-06-22 17:50:06 +02:00
\]
2023-06-20 17:56:38 +02:00
\end{lemma}
\begin{proof}
First, assume $Y \in L^p$.
Then
\begin{IEEEeqnarray}{rCl}
\|Y\|_{L^p}^p &=& \bE[Y^p]\\
&=& \int Y(\omega)^p \dif \bP(\omega)\\
&=&k \int_{\Omega} \left( \int_0^{Y(\omega)} p \ell^{p-1} \dif \ell \right) \dif \bP(\omega)\\
&\overset{\text{Fubini}}{=}& \int_0^\infty \int_\Omega \underbrace{\One_{Y \ge \ell}\dif \bP\dif}_{\bP[Y \ge \ell]} \ell. \label{l18star}\\
\end{IEEEeqnarray}
We have
\begin{IEEEeqnarray*}{rCl}
2023-06-22 17:50:06 +02:00
\eqref{l18star} &\le & \int_0^\infty \frac{1}{\ell} \int_{\{Y(\omega) \ge \ell\}} \ell^p \dif \ell\\
2023-06-20 17:56:38 +02:00
&\overset{\text{Fubini}}{=}& \int_\Omega X(\omega) \int_{0}^{Y(\omega)} p \ell^{p-2} \dif \ell\bP(\dif \omega)\\
&=& \frac{p}{p-1} \int X(\omega) Y (\omega)^{p-1} \bP(\dif \omega)\\
&\overset{\text{Hölder}}{\le}& \frac{p}{p-1} \|X\|_{L^p} \|Y\|_{p}^{p-1},
\end{IEEEeqnarray*}
where the assumption was used to apply Hölder.
Suppose now $Y \not\in L^p$.
Then look at $Y_M = Y \wedge M$.
2023-06-29 22:18:23 +02:00
Apply the above to $Y_M \in L^p$ and use the monotone convergence theorem.
2023-06-20 17:56:38 +02:00
\end{proof}
\begin{refproof}{dooblp}
Let $E \coloneqq \{X_n^\ast \ge \ell\} = E_1 \sqcup \ldots \sqcup E_n$
where
\[
E_j = \{|X_1| \le \ell, |X_2| \le \ell, \ldots, |X_{j-1}| \le \ell, |X_j| \ge \ell\}.
2023-07-06 00:36:26 +02:00
\]
Then
2023-06-29 22:18:23 +02:00
\begin{equation}
\bP[E_j] \overset{\text{Markov}}{\le } \frac{1}{\ell} \int_{E_j} |X_j| \dif \bP
\label{lec18eq2star}
\end{equation}
2023-06-20 17:56:38 +02:00
Since $(X_n)_n$ is a sub-martingale, $(|X_n|)_n$ is also a sub-martingale
2023-06-29 22:18:23 +02:00
(by \autoref{cjensen}).
2023-06-20 17:56:38 +02:00
Hence
\begin{IEEEeqnarray*}{rCl}
\bE[\One_{E_j}(|X_n| - |X_{j}|) | \cF_j]
&=& \One_{E_j} \bE[(|X_n| - |X_{j}|)|\cF_j]\\
&\overset{\text{a.s.}}{\ge }& 0.
\end{IEEEeqnarray*}
By the law of total expectation, \autoref{totalexpectation},
it follows that
2023-06-29 22:18:23 +02:00
\begin{equation}
\bE[\One_{E_j} (|X_n| - |X_j|)] \ge 0. \label{lec18eq3star}
\end{equation}
2023-07-06 00:36:26 +02:00
2023-06-20 17:56:38 +02:00
Now
\begin{IEEEeqnarray*}{rCl}
\bP(E) &=& \sum_{j=1}^n \bP(E_j)\\
2023-06-29 22:18:23 +02:00
&\overset{\eqref{lec18eq2star}, \eqref{lec18eq3star}}{\le }& \frac{1}{\ell} \left( \int_{E_1} |X_n| \dif \bP + \ldots + \int_{E_n} |X_n| \dif \bP \right)\\
2023-06-20 17:56:38 +02:00
&=& \frac{1}{\ell} \int_E |X_n| \dif \bP
\end{IEEEeqnarray*}
This proves the first part.
2023-07-06 00:36:26 +02:00
For the second part, we apply the first part and
2023-06-20 17:56:38 +02:00
\autoref{dooplplemma} (choose $Y \coloneqq X_n^\ast$).
\end{refproof}