From 559fc6dd271c8673d8dd9c182bfcb005063755ae Mon Sep 17 00:00:00 2001 From: Josia Pietsch Date: Tue, 20 Jun 2023 17:56:38 +0200 Subject: [PATCH] lecture 18 --- inputs/lecture_17.tex | 13 +-- inputs/lecture_18.tex | 214 +++++++++++++++++++++++++++++++++++++++++ probability_theory.tex | 1 + 3 files changed, 217 insertions(+), 11 deletions(-) create mode 100644 inputs/lecture_18.tex diff --git a/inputs/lecture_17.tex b/inputs/lecture_17.tex index 0bda9c9..164472e 100644 --- a/inputs/lecture_17.tex +++ b/inputs/lecture_17.tex @@ -48,7 +48,7 @@ Stop playing when $X_n \ge b$. I.e.~define \begin{itemize} \item $C_1 \coloneqq 0$, - \item $C_n \coloneqq \One_{\{C_{n-1} = 1\}} \cdot \One_{\{X_{n-1} \le b\}} + \One_{\{C_{n-1} = 0\} } \One_{\{X_{n-1}\} < a}$. + \item $C_n \coloneqq \One_{\{C_{n-1} = 1\}} \cdot \One_{\{X_{n-1} \le b\}} + \One_{\{C_{n-1} = 0\} } \One_{\{X_{n-1} < a\}}$. \end{itemize} \begin{definition} @@ -139,6 +139,7 @@ Hence there exists a random variable $X_\infty$ such that $X_n \xrightarrow{a.s. We have thus shown \begin{theorem}[Doob's martingale convergence theorem] \label{doobmartingaleconvergence} + \label{doob} Any supermartingale bounded in $L^1$ converges almost surely to a random variable, which is almost surely finite. In particular, any non-negative supermartingale converges a.s.~to a finite random variable. @@ -151,16 +152,6 @@ The second part follows from We need to show $\sup_n \bE(|X_n|) < \infty$. Since the supermartingale is non-negative, we have $\bE[|X_n|] = \bE[X_n]$ and since it is a supermartingale $\bE[X_n] \le \bE[X_0]$. - \end{subproof} \todo{rearrange proof} - - - -\begin{example}[Branching process] - % TODO - -\end{example} - - diff --git a/inputs/lecture_18.tex b/inputs/lecture_18.tex new file mode 100644 index 0000000..138d532 --- /dev/null +++ b/inputs/lecture_18.tex @@ -0,0 +1,214 @@ +\lecture{18}{2023-06-20}{} + +Recall our key lemma for supermartingales from last time: +\[ + (b-a) \bE[U_N([a,b])] \le \bE[(X_n - a)^-]. +\] +% TODO Ref + +What happens for submartingales? +If $(X_n)_{n \in \N}$ is a submartingale, then $(-X_n)_{n \in \N}$ is a supermartingale. +Hence the same holds for submartingales, i.e. +\begin{lemma} + A (sub-/super-) martingale bounded in $L^1$ converges + a.s.~to a finite limit, which is a.s.~finite. +\end{lemma} + +\begin{question} + What about $L^p$ convergence of martingales? +\end{question} + +\begin{example}[Branching process] + Let $ (Z_n)_{n \ge 1}$ be i.i.d.~$\pm 1$ with + $\bP[Z_n = 1] = p \in (0,1)$. + + Fix $u > 0$. Let $X_0 = x > 0$. + Define $X_{n+1} \coloneqq u^{Z_{n+1}} X_{n}$. + + + \paragraph{Exercise} + Given $u \ge 0$, find $p = p(u)$ + such that $(X_n)_n$ is a martingale w.r.t.~the canonical filtration. + + % TODO + + + By \autoref{doobmartingaleconvergence}, there is an + a.s.~limit $X_\infty$. + By the SLLN, we have + \[ + \frac{1}{n} \sum_{k=1}^{n} Z_k \xrightarrow{a.s.} \bE[Z_1] = zp - 1. + \] + Hence + \[ + \left(\frac{X_n}{x}\right)^{\frac{1}{n}} = u^{\frac{1}{n} \sum_{k=1}^n Z_k} + \xrightarrow{a.s.} u^{zp -1}. + \] + Since $(X_n)_{n \ge 0}$ is a martingale, we must have $\bE[u^{Z_1}] = 1$. + Hence $2p - 1 < 0$, because $u > 1$. + + Hence, if $\epsilon > 0$ is small, there exists + $N_0(\epsilon)$ (possibly random) + such that for all $n > N_0(\epsilon)$ + \[ + \left( \frac{X_n}{x} \right)^{\frac{1}{n}} \le u^{2p - 1}(1 + \epsilon) \implies x [\underbrace_{< 1}{u^{2p - 1} (1+\epsilon)}]^n \xrightarrow{n \to \infty} 0. + \] + Hence it can not converge in $L^1$. + % TODO Confusion + +\end{example} + +$L^2$ is nice, since it is a Hilbert space. So we will first +consider $L^2$. + +\begin{fact}[Martingale increments are orthogonal in $L^2$ ] + \label{martingaleincrementsorthogonal} + Let $(X_n)_n$ be a martingale + and let $Y_n \coloneqq X_n - X_{n-1}$ + denote the \vocab{martingale increments}. + Then for all $m \neq n$ we have that + \[ + \langle Y_m | Y_n\rangle_{L^2} = \bE[Y_n Y_m] = 0. + \] +\end{fact} +\begin{proof} + Since $\bE[X_n | \cF_{n-1}] = X_{n-1}$ a.s., + by induction $\bE[X_n | \cF_{k}] = X_k$ a.s.~for all $k \le n$. + Play with conditional expectation. + % TODO Exercise +\end{proof} + +\begin{fact}[Parallelogram identity] + % TODO +\end{fact} + +\begin{theorem} + Suppose that $(X_n)_n$ is a martingale bounded in + $L^2$, i.e.~$\sup_n \bE[X_n^2] < \infty$. + Then there is a random variable $X_\infty$ such that + \[ + X_n \xrightarrow{L^2} X_\infty. + \] +\end{theorem} +\begin{proof} + Let $Y_n \coloneqq X_n - X_{n-1}$ and write + \[ + X_n = \sum_{j=1}^{n} Y_j. + \] + We have + \[ + \bE[X_n^2] = \bE[X_0^2] + \sum_{j=1}^{n} \bE[Y_j^2] + \] + by \autoref{martingaleincrementsorthogonal} + (this is known as the \vocab{parallelogram identity}). % TODO Move + In particular, + \[ + \sup_n \bE[X_n^2] < \infty \iff \sum_{j=1}^{\infty} \bE[Y_j^2] < \infty. + \] + + Since $(X_n)_n$ is bounded in $L^2$, + there exists $X_\infty$ such that $X_n \xrightarrow{\text{a.s.}} X_\infty$ + by \autoref{doob}. + + It remains to show $X_n \xrightarrow{L^2} X_\infty$. + For any $r \in \N$, consider + \[\bE[(X_{n+r} - X_n)^2] = \sum_{j=n+1}^{n+r} \bE[Y_j^2] \xrightarrow{n \to \infty} 0\] + as a tail of a convergent series. + + Hence $(X_n)_n$ is Cauchy, thus it converges in $L^2$. + Since $\bE[(X_\infty - X_n)^2]$ converges to the increasing + limit + \[ + \sum_{j \ge n + 1} \xrightarrow{n\to \infty} 0 + \] + we get $\bE[(X_\infty - X_n)^2] \xrightarrow{n\to \infty} 0$. +\end{proof} + +Now let $p \ge 1$ be not necessarily $2$. +First, we need a very important inequality: +\begin{theorem}[Doob's $L^p$ inequality] + \label{dooblp} + Suppose that $(X_n)_n$ is a sub-martingale. + Let $X_n^\ast \coloneqq \max \{|X_1|, |X_2|, \ldots, |X_n|\}$ + denote the \vocab{running maximum}. + \begin{enumerate}[(1)] + \item Then \[ \forall \ell > 0 .~\bP[X_n^\ast \ge \ell] \le \frac{1}{\ell} \int_{\{X_n^\ast \ell\} |X_n| \dif \bP \le \frac{1}{\ell} \bE[|X_n|]. \] + (Doob's $L^1$ inequality). + \item Fix $p > 1$. Then \[ + \bE[(X_n^\ast)^p] \le \left( \frac{p}{p-1} \right)^p \bE[|X_n|^p]. + \] + (Doob's $L^p$ inequality). + \end{enumerate} +\end{theorem} + +We first need +\begin{lemma} + \label{dooplplemma} + Let $p > 1$ and $X,Y$ non-negative random variable + such that + \[ + \forall \ell > 0 .~ \bP[Y \ge \ell] \le \frac{1}{\ell} \int_{\{Y \ge \ell\} } x \dif \bP + \] + Then + \[ + \bE[Y^p] \le \left( \frac{p}{p-1} \right)^p \bE[X^p]. + \] +\end{lemma} +\begin{proof} + First, assume $Y \in L^p$. + + Then + \begin{IEEEeqnarray}{rCl} + \|Y\|_{L^p}^p &=& \bE[Y^p]\\ + &=& \int Y(\omega)^p \dif \bP(\omega)\\ + &=&k \int_{\Omega} \left( \int_0^{Y(\omega)} p \ell^{p-1} \dif \ell \right) \dif \bP(\omega)\\ + &\overset{\text{Fubini}}{=}& \int_0^\infty \int_\Omega \underbrace{\One_{Y \ge \ell}\dif \bP\dif}_{\bP[Y \ge \ell]} \ell. \label{l18star}\\ + \end{IEEEeqnarray} + + We have + \begin{IEEEeqnarray*}{rCl} + \eqref{l18star} &\le & \int_0^\infty \frac{1}{\ell} \int_{\{Y(\omega) \ge \ell\} \ell^p \dif \ell\\ + &\overset{\text{Fubini}}{=}& \int_\Omega X(\omega) \int_{0}^{Y(\omega)} p \ell^{p-2} \dif \ell\bP(\dif \omega)\\ + &=& \frac{p}{p-1} \int X(\omega) Y (\omega)^{p-1} \bP(\dif \omega)\\ + &\overset{\text{Hölder}}{\le}& \frac{p}{p-1} \|X\|_{L^p} \|Y\|_{p}^{p-1}, + \end{IEEEeqnarray*} + where the assumption was used to apply Hölder. + + Suppose now $Y \not\in L^p$. + Then look at $Y_M = Y \wedge M$. + Apply the case of $Y \in L^p$ and use the monotone convergence theorem. +\end{proof} + +\begin{refproof}{dooblp} + Let $E \coloneqq \{X_n^\ast \ge \ell\} = E_1 \sqcup \ldots \sqcup E_n$ + where + \[ + E_j = \{|X_1| \le \ell, |X_2| \le \ell, \ldots, |X_{j-1}| \le \ell, |X_j| \ge \ell\}. + \] + Then $\bP[E_j] \overset{\text{Markov}}{\le } \frac{1}{\ell} \int_{E_j} |X_j| \dif \bP (\ast\ast)$. + Since $(X_n)_n$ is a sub-martingale, $(|X_n|)_n$ is also a sub-martingale + (by \autoref{jensen}). + Hence + \begin{IEEEeqnarray*}{rCl} + \bE[\One_{E_j}(|X_n| - |X_{j}|) | \cF_j] + &=& \One_{E_j} \bE[(|X_n| - |X_{j}|)|\cF_j]\\ + &\overset{\text{a.s.}}{\ge }& 0. + \end{IEEEeqnarray*} + By the law of total expectation, \autoref{totalexpectation}, + it follows that + \[ + \bE[\One_{E_j} (|X_n| - |X_j|)] \ge 0 (\ast\ast\ast). + \] + + Now + \begin{IEEEeqnarray*}{rCl} + \bP(E) &=& \sum_{j=1}^n \bP(E_j)\\ + &\overset{(\ast\ast) (\ast\ast\ast)}{\le }& \frac{1}{\ell} \left( \int_{E_1} |X_n| \dif \bP + \ldots + \int_{E_n} |X_n| \dif \bP \right)\\ + &=& \frac{1}{\ell} \int_E |X_n| \dif \bP + \end{IEEEeqnarray*} + + This proves the first part. + + For the second part, we apply the first part and + \autoref{dooplplemma} (choose $Y \coloneqq X_n^\ast$). +\end{refproof} diff --git a/probability_theory.tex b/probability_theory.tex index fc9e70f..0e25050 100644 --- a/probability_theory.tex +++ b/probability_theory.tex @@ -41,6 +41,7 @@ \input{inputs/lecture_15.tex} \input{inputs/lecture_16.tex} \input{inputs/lecture_17.tex} +\input{inputs/lecture_18.tex} \cleardoublepage