lecture 19

This commit is contained in:
Josia Pietsch 2023-06-22 17:50:06 +02:00
parent 559fc6dd27
commit f775ff03c3
Signed by: josia
GPG key ID: E70B571D66986A2D
3 changed files with 247 additions and 8 deletions

View file

@ -51,7 +51,7 @@ Hence the same holds for submartingales, i.e.
$N_0(\epsilon)$ (possibly random) $N_0(\epsilon)$ (possibly random)
such that for all $n > N_0(\epsilon)$ such that for all $n > N_0(\epsilon)$
\[ \[
\left( \frac{X_n}{x} \right)^{\frac{1}{n}} \le u^{2p - 1}(1 + \epsilon) \implies x [\underbrace_{< 1}{u^{2p - 1} (1+\epsilon)}]^n \xrightarrow{n \to \infty} 0. \left( \frac{X_n}{x} \right)^{\frac{1}{n}} \le u^{2p - 1}(1 + \epsilon) \implies x [\underbrace{u^{2p - 1} (1+\epsilon)}_{<1}]^n \xrightarrow{n \to \infty} 0.
\] \]
Hence it can not converge in $L^1$. Hence it can not converge in $L^1$.
% TODO Confusion % TODO Confusion
@ -132,7 +132,7 @@ First, we need a very important inequality:
Let $X_n^\ast \coloneqq \max \{|X_1|, |X_2|, \ldots, |X_n|\}$ Let $X_n^\ast \coloneqq \max \{|X_1|, |X_2|, \ldots, |X_n|\}$
denote the \vocab{running maximum}. denote the \vocab{running maximum}.
\begin{enumerate}[(1)] \begin{enumerate}[(1)]
\item Then \[ \forall \ell > 0 .~\bP[X_n^\ast \ge \ell] \le \frac{1}{\ell} \int_{\{X_n^\ast \ell\} |X_n| \dif \bP \le \frac{1}{\ell} \bE[|X_n|]. \] \item Then \[ \forall \ell > 0 .~\bP[X_n^\ast \ge \ell] \le \frac{1}{\ell} \int_{\{X_n^\ast \ell\}} |X_n| \dif \bP \le \frac{1}{\ell} \bE[|X_n|]. \]
(Doob's $L^1$ inequality). (Doob's $L^1$ inequality).
\item Fix $p > 1$. Then \[ \item Fix $p > 1$. Then \[
\bE[(X_n^\ast)^p] \le \left( \frac{p}{p-1} \right)^p \bE[|X_n|^p]. \bE[(X_n^\ast)^p] \le \left( \frac{p}{p-1} \right)^p \bE[|X_n|^p].
@ -167,7 +167,7 @@ We first need
We have We have
\begin{IEEEeqnarray*}{rCl} \begin{IEEEeqnarray*}{rCl}
\eqref{l18star} &\le & \int_0^\infty \frac{1}{\ell} \int_{\{Y(\omega) \ge \ell\} \ell^p \dif \ell\\ \eqref{l18star} &\le & \int_0^\infty \frac{1}{\ell} \int_{\{Y(\omega) \ge \ell\}} \ell^p \dif \ell\\
&\overset{\text{Fubini}}{=}& \int_\Omega X(\omega) \int_{0}^{Y(\omega)} p \ell^{p-2} \dif \ell\bP(\dif \omega)\\ &\overset{\text{Fubini}}{=}& \int_\Omega X(\omega) \int_{0}^{Y(\omega)} p \ell^{p-2} \dif \ell\bP(\dif \omega)\\
&=& \frac{p}{p-1} \int X(\omega) Y (\omega)^{p-1} \bP(\dif \omega)\\ &=& \frac{p}{p-1} \int X(\omega) Y (\omega)^{p-1} \bP(\dif \omega)\\
&\overset{\text{Hölder}}{\le}& \frac{p}{p-1} \|X\|_{L^p} \|Y\|_{p}^{p-1}, &\overset{\text{Hölder}}{\le}& \frac{p}{p-1} \|X\|_{L^p} \|Y\|_{p}^{p-1},

238
inputs/lecture_19.tex Normal file
View file

@ -0,0 +1,238 @@
\lecture{19}{2023-06-22}{}
\subsection{Uniform integrability}
\begin{example}
Let $\Omega = [0,1]$, $\cF = \cB$
and $\bP = \lambda \defon{[0,1]}$.
Consider $X_n \coloneqq n \One_{(0,\frac{1}{n}))}$.
We know that $X_n \xrightarrow{n \to \infty} 0$ a.s.,
however $\bE[X_n] = \bE[|X_n|] = 1$,
hence $X_n$ does not converge in $L^1(\bP)$.
Let $\mu_n(\cdot ) = \bP[X_n \in \cdot ]$.
Intuitively, for a series that converges in probability,
for $L^1$-convergence to hold we somehow need to make sure
that probability measures don't assign mass far away from $0$.
This will be made precise in the notion of uniform integrability.
\end{example}
\begin{goal}
We want to show that uniform integrability and convergence in probability
is equivalent to convergence in $L^1$.
\end{goal}
\begin{definition}
A sequence of random variables $(X_n)_n$ is called \vocab{uniformly integrable} (UI),
if
\[\forall \epsilon > 0 .~\exists k > 0 .~ \forall n.~\bE[|X_n| \One_{\{|X_n > k\} }] < \epsilon.\]
Similarly, we define uniformly integrable for sets of random variables.
\end{definition}
\begin{example}
$X_n \coloneqq n \One_{(0,\frac{1}{n})}$ is not uniformly integrable.
\end{example}
There is no nice description of uniform integrability.
However, some subsets can be easily described, e.g.
\begin{fact}\label{lec19f1}
If $(X_n)_{n \ge 1}$ is a sequence bounded in $L^{1 + \delta}(\bP)$
for some $\delta > 0$ (i.e.~$\sup_n \bE[|X_n|^{1+\delta}] < \infty$),
then $(X_n)_n$ is uniformly integrable.
\end{fact}
\begin{proof}
Let $\epsilon > 0$.
Let $p \coloneqq 1 + \delta > 1$.
Choose $q$ such that $\frac{1}{p} + \frac{1}{q} = 1$.
Then
\begin{IEEEeqnarray*}{rCl}
\bE[|X_n| \One_{|X_n| > k}] &\le& \bE[|X_n|^p]^{\frac{1}{p}} \bP[|X_n| > k]^{\frac{1}{q}}\\
\end{IEEEeqnarray*}
i.e.
\begin{IEEEeqnarray*}{rCl}
\sup_n\bE[|X_n| \One_{|X_n| > k}] &\le& \underbrace{\sup_n\bE[|X_n|^p]^{\frac{1}{p}}}_{< \infty} \sup_n \underbrace{\bP[|X_n| > k]^{\frac{1}{q}}}_{\le k^{\frac{1}{q}} \bE[|X_n|]^{\frac{1}{q}}}\\
\end{IEEEeqnarray*}
where we have applied Markov's inequality. % TODO REF
Since $\sup_n \bE[|X_n|^{1+\delta}] < \infty$,
we have that $\sup_n \bE[|X_n|] < \infty$ by Jensen. % TODO REF
Hence, choose $k$ large enough to make the relevant
term less than $\epsilon$.
\end{proof}
\begin{fact}\label{lec19f2}
If $(X_n)_n$ is uniformly integrable,
then $(X_n)_n$ is bounded in $L^1$.k:w
\end{fact}
\begin{fact}\label{lec19f3}
Suppose $Y \in L^1(\bP)$ and $\sup_n |X_n(\cdot )| \le Y(\cdot )$.
Then $(X_n)_n$ is uniformly integrable.
\end{fact}
\begin{fact}\label{lec19f4}
Let $X \in L^1(\bP)$.
\begin{enumerate}[(a)]
\item $\forall \epsilon > 0 .~ \exists \delta > 0 .~\forall F \in \cF .~ \bP(F) < \delta \implies\int_F |X| \dif \bP < \epsilon$.
\item $\forall \epsilon > 0 .~ \exists k \in (0,\infty) .~ \int_{|X| > k} | X| \dif \bP < \epsilon$.
\end{enumerate}
\end{fact}
\begin{proof}
\begin{enumerate}[(a)]
\item Suppose not. Then for $\delta = 1, \frac{1}{2}, \frac{1}{2^2}, \ldots$
there exists $F_n$ such that $\bP(F_n) <\frac{1}{2^n}$
but $\int_{F_n} |X| \dif \bP \ge \epsilon$.
Since $\sum_{n} \bP(F_n) < \infty$,
by \autoref{borelcantelli},
\[\bP[\underbrace{\limsup_n F_n}_{\text{\reflectbox{$\coloneqq$}}F}] = 0.\]
We have
\begin{IEEEeqnarray*}{rCl}
\int_F | X| \dif \bP &=& \int |X| \One_F \dif \bP\\
&=& \int \limsup_n (|X| \One_{F_n}) \dif \bP\\
&\overset{\text{Reverse Fatou}}{\ge }&
\limsup_n \int |X| \One_{F_n} \dif \bP\\
&\ge & \epsilon
\end{IEEEeqnarray*}
where the assumption that $X$ is in $L^1$ was used to apply
the reverse of Fatou's lemma.
This yields a contradiction since $\bP(F) = 0$.
\item We want to apply part (a) to $F = \{ |X| > k\}$.
By Markov, $\bP(F) \le \frac{1}{k} \bE[|X|]$.
Since $\bE[|X|] < \infty$, we can choose $k$ large enough
to get $\bP(F) \le \delta$.
\end{enumerate}
\end{proof}
\begin{refproof}{lec19f3}
Fix $\epsilon > 0$.
We have
\[
\bE[|X_n| \One_{|X_n| > k}] \le \bE[|Y| \One_{|Y| > k}] < \epsilon
\]
for $k$ large enough by \autoref{lec19f4} (b).
\end{refproof}
\begin{fact}\label{lec19f5}
Let $X \in L^1(\bP)$.
Then $\bF \coloneqq \{ \bE[X | \cG] : \cG \subseteq \cF \text{ sub-$\sigma$-algebra}\}$ is uniformly integrable.
\end{fact}
\begin{proof}
Fix $\epsilon > 0$.
Choose $\delta > 0$ such that
\[\forall F \in \cF.~ \bP(F) < \delta \implies \bE[|X| \One_F] <\epsilon. (\ast)\]
Let $Y = \bE[X | \cG]$ for some sub-$\sigma$-algebra $\cG$.
Then, by \autoref{condjensen}, $|Y| \le \bE[ |X| | \cG]$.
Hence $\bE[|Y|] \le \bE[|X|]$.
It follows that $\bP[|Y| > k] < \delta$
for $k$ suitably large,
since $\bE[|X|] \le \infty$.
Note that $\{Y > k\} \in \cG$.
We have
\begin{IEEEeqnarray*}{rCl}
\bE[|Y| \One_{\{|Y| > k\} }] &<& \epsilon
\end{IEEEeqnarray*}
by $(\ast)$, since $\bP[|Y| > k] < \delta$.
\end{proof}
\begin{theorem}
Assume that $X_n \in L^1$ for all $n$ and $X \in L^1$.
Then the following are equivalent:
\begin{enumerate}[(1)]
\item $X_n \to X$ in $L^1$.
\item $(X_n)_n$ is uniformly integrable and $X_n \to X$ in probability.
\end{enumerate}
\end{theorem}
\begin{proof}
(2) $\implies$ (1)
Define
\begin{IEEEeqnarray*}{rCl}
\phi(x) &\coloneqq & \begin{cases}
-k, & x \le -k\\
x, & x \in (-k,k)\\
k, & x \ge k.
\end{cases}
\end{IEEEeqnarray*}
$\phi$ is $1$-Lipshitz. % TODO
We have
\begin{IEEEeqnarray*}{rCl}
\int |X_n - X| \dif \bP
&\le & \int |X_n - \phi(X_n)| \dif \bP
+ \int |\phi(X) - X| \dif \bP
+ \int |\phi(X_n) - \phi(X)| \dif \bP\\
\end{IEEEeqnarray*}
We have $\int_{|X_n| > k} \underbrace{|X_n - \phi(X_n)|}_{\le |X_n| + | \phi(X_n)| \le 2 |X_n|} \dif \bP\le \epsilon$ by uniform integrability and
\autoref{lec19f4} part (b).
Similarly $\int_{|X| > k} |X - \phi(X)| \dif \bP < \epsilon$.
Since $\phi$ is Lipshitz,
$ X_n \xrightarrow{\bP} X \implies \phi(X_n) \xrightarrow{\bP} \phi(X)$.
By the bounded convergence theorem % TODO
$|\phi(X_n)| \le k \implies \int | \phi(X_n) - \phi(X)| \dif \bP \to 0$.
(1) $\implies$ (2)
$X_n \xrightarrow{L^1} X \implies X_n \xrightarrow{\bP} X$
by Markov's inequality.
Fix $\epsilon > 0$.
We have
\begin{IEEEeqnarray*}{rCl}
\bE[|X_n|] &=& \bE[|X_n - X + X|]\\
&\le & \epsilon + \bE[|X|]\\
&<& \delta k
\end{IEEEeqnarray*}
for all $\delta > 0$ and suitable $k$.
Hence $\bP[|X_n| < k] < \delta$ by Markov's inequality.
Then by \autoref{lec19f4} part (a) it follows that
\[
\int_{|X_n| > k} |X_n| \dif \bP \le \underbrace{\int |X - X_n| \dif \bP}_{< \epsilon} + \int_{|X_n| > k} |X| \dif \bP \le 2 \epsilon.
\]
\end{proof}
\subsection{Martingale convergence theorems in $L^p, p \ge 1$}
Let $(\Omega, \cF, \bP)$ as always and let $(\cF_n)_n$ always be a filtration.
\begin{fact}\label{lec19f6}
Suppose that $X \in L^p$ for some $p \ge 1$.
Then $(\bE[X | \cF_n])_n$ is an $\cF_n$-martingale.
\end{fact}
\begin{proof}
It is clear that $(\bE[X | \cF_n])_n$ is adapted to $(\cF_n)_n$.
Let $X_n \coloneqq \bE[X | \cF_n]$.
Consider
\begin{IEEEeqnarray*}{rCl}
\bE[X_n - X_{n-1} | \cF_{n-1}]
&=& \bE[\bE[X | \cF_n] - \bE[X | \cF_{n-1}] | \cF_{n-1}]\\
&=& \bE[X | \cF_{n-1}] - \bE[X | \cF_{n-1}]\\
&=& 0.
\end{IEEEeqnarray*}
\end{proof}
\begin{theorem}
Let $X \in L^p$ for some $p \ge 1$.
Then $X_n \coloneqq \bE[X | \cF_n]$ defines a martingale which converges
to $X$ in $L^p$.
\end{theorem}
\begin{proof}
\end{proof}
\begin{theorem}
Let $p > 1$.
Let $(X_n)_n$ be a martingale bounded in $L^p$.
Then there exists a random variable $X \in L^p$, such that
$X_n = \bE[X | \cF_n]$ for all $n$.
\end{theorem}

View file

@ -42,6 +42,7 @@
\input{inputs/lecture_16.tex} \input{inputs/lecture_16.tex}
\input{inputs/lecture_17.tex} \input{inputs/lecture_17.tex}
\input{inputs/lecture_18.tex} \input{inputs/lecture_18.tex}
\input{inputs/lecture_19.tex}
\cleardoublepage \cleardoublepage