2023-06-29 22:18:23 +02:00
\lecture { 20} { 2023-06-27} { }
2023-06-27 17:08:59 +02:00
\begin { refproof} { ceismartingale}
2023-07-28 03:45:37 +02:00
By the \yaref { cetower}
2023-06-27 17:08:59 +02:00
it is clear that $ ( \bE [ X | \cF _ n ] ) _ n $
is a martingale.
2023-06-29 20:31:46 +02:00
2023-06-27 17:08:59 +02:00
First step:
Assume that $ X $ is bounded.
2023-07-28 03:45:37 +02:00
Then, by \yaref { cjensen} , $ |X _ n| \le \bE [ |X| | \cF _ n ] $ ,
2023-06-27 17:08:59 +02:00
hence $ \sup _ { \substack { n \in \N \\ \omega \in \Omega } } | X _ n ( \omega ) | < \infty $ .
Thus $ ( X _ n ) _ n $ is a martingale in $ L ^ { \infty } \subseteq L ^ 2 $ .
2023-06-29 22:18:23 +02:00
By the convergence theorem for martingales in $ L ^ 2 $
2023-07-28 03:45:37 +02:00
(\yaref { martingaleconvergencel2} )
2023-06-27 17:08:59 +02:00
there exists a random variable $ Y $ ,
such that $ X _ n \xrightarrow { L ^ 2 } Y $ .
Fix $ m \in \N $ and $ A \in \cF _ m $ .
Then
\begin { IEEEeqnarray*} { rCl}
\int _ A Y \dif \bP
& =& \lim _ { n \to \infty } \int _ A X_ n \dif \bP \\
& =& \lim _ { n \to \infty } \bE [X_n \One_A] \\
& =& \lim _ { n \to \infty } \bE [\bE[X | \cF_n] \One _ A]\\
& \overset { A \in \cF _ n} { =} & \lim _ { \substack { n \to \infty \\ n \ge m} } \bE [X \One_A] \\
\end { IEEEeqnarray*}
Hence $ \int _ A Y \dif \bP = \int _ A X \dif \bP $ for all $ m \in \N , A \in \cF _ m $ .
2023-06-27 18:08:38 +02:00
Since $ \sigma ( X ) = \bigcup \cF _ n $
this holds for all $ A \in \sigma ( X ) $ .
2023-06-27 17:08:59 +02:00
Hence $ X = Y $ a.s., so $ X _ n \xrightarrow { L ^ 2 } X $ .
Since $ ( X _ n ) _ n $ is uniformly bounded, this also means
$ X _ n \xrightarrow { L ^ p } X $ .
Second step:
Now let $ X \in L ^ p $ be general and define
\[
X'(\omega ) \coloneqq \begin { cases}
X(\omega )& \text { if } |X(\omega )| \le M,\\
0& \text { otherwise}
\end { cases}
\]
for some $ M > 0 $ .
Then $ X' \in L ^ \infty $ and
\begin { IEEEeqnarray*} { rCl}
\int | X - X'|^ p \dif \bP & =& \int _ { \{ |X| > M\} } |X|^ p \dif \bP \xrightarrow { M \to \infty } 0
\end { IEEEeqnarray*}
2023-07-14 22:07:36 +02:00
as $ \bP $ is regular,
2023-07-15 00:57:02 +02:00
i.e.~$ \forall \epsilon > 0 . ~ \exists k . ~
\bP [|X|^p \in [-k,k] ] \ge 1-\epsilon $ .
2023-06-27 17:08:59 +02:00
2023-06-29 22:18:23 +02:00
Take some $ \epsilon > 0 $ and $ M $ large enough such that
\[
\int |X - X'| \dif \bP < \epsilon .
\]
2023-06-27 17:08:59 +02:00
2023-06-29 22:18:23 +02:00
Let $ ( X _ n' ) _ n $ be the martingale given by $ ( \bE [ X' | \cF _ n ] ) _ n $ .
Then $ X _ n' \xrightarrow { L ^ p } X' $ by the first step.
2023-06-27 17:08:59 +02:00
2023-06-29 22:18:23 +02:00
It is
\begin { IEEEeqnarray*} { rCl}
\| X_ n - X_ n'\| _ { L^ p} ^ p
& =& \bE [\bE[X - X' | \cF_n] ^ { p} ]\\
& \overset { \text { Jensen} } { \le } & \bE [\bE[(X - X')^p | \cF_n] ]\\
& =& \| X - X'\| _ { L^ p} ^ p\\
& <& \epsilon .
\end { IEEEeqnarray*}
2023-06-27 17:08:59 +02:00
Hence
\[
2023-07-05 17:53:41 +02:00
\| X_ n - X\| _ { L^ p} %
2023-06-29 22:18:23 +02:00
\le \| X_ n - X_ n'\| _ { L^ p} + \| X_ n' - X'\| _ { L^ p} + \| X - X'\| _ { L^ p} %
\le 3 \epsilon .
2023-06-27 17:08:59 +02:00
\]
Thus $ X _ n \xrightarrow { L ^ p } X $ .
\end { refproof}
2023-07-28 03:45:37 +02:00
For the proof of \yaref { martingaleisce} ,
2023-06-27 17:08:59 +02:00
we need the following theorem, which we won't prove here:
\begin { theorem} [Banach Alaoglu]
2023-07-28 03:45:37 +02:00
\yalabel { Banach Alaoglu} { Banach Alaoglu} { banachalaoglu}
2023-06-27 17:08:59 +02:00
Let $ X $ be a normed vector space and $ X ^ \ast $ its
continuous dual.
Then the closed unit ball in $ X ^ \ast $ is compact
w.r.t.~the $ { \text { weak } } ^ \ast $ topology.
\end { theorem}
\begin { fact}
We have $ L ^ p \cong ( L ^ q ) ^ \ast $ for $ \frac { 1 } { p } + \frac { 1 } { q } = 1 $
via
\begin { IEEEeqnarray*} { rCl}
L^ p & \longrightarrow & (L^ q)^ \ast \\
2023-07-11 23:38:47 +02:00
f & \longmapsto & (g \mapsto \int g f \dif \bP )
2023-06-27 17:08:59 +02:00
\end { IEEEeqnarray*}
2023-06-29 20:31:46 +02:00
2023-06-27 17:08:59 +02:00
We also have $ ( L ^ 1 ) ^ \ast \cong L ^ \infty $ ,
however $ ( L ^ \infty ) ^ \ast \not \cong L ^ 1 $ .
\end { fact}
\begin { refproof} { martingaleisce}
2023-07-28 03:45:37 +02:00
Since $ ( X _ n ) _ n $ is bounded in $ L ^ p $ , by \yaref { banachalaoglu} ,
2023-06-27 17:08:59 +02:00
there exists $ X \in L ^ p $ and a subsequence
$ ( X _ { n _ k } ) _ k $ such that for all $ Y \in L ^ q $ ($ \frac { 1 } { p } + \frac { 1 } { q } = 1 $ )
\[
\int X_ { n_ k} Y \dif \bP \to \int XY \dif \bP
2023-06-29 20:31:46 +02:00
\]
2023-06-27 17:08:59 +02:00
(Note that this argument does not work for $ p = 1 $ ,
because $ ( L ^ \infty ) ^ \ast \not \cong L ^ 1 $ ).
2023-07-15 02:00:04 +02:00
Let $ A \in \cF _ m $ for some fixed $ m $
and choose $ Y = \One _ A $ .
2023-06-27 17:08:59 +02:00
Then
\begin { IEEEeqnarray*} { rCl}
\int _ A X \dif \bP
& =& \lim _ { k \to \infty } \int _ A X_ { n_ k} \dif \bP \\
& =& \lim _ { k \to \infty } \bE [X_{n_k} \One_A] \\
2023-07-15 02:00:04 +02:00
& \overset { \text { for } n_ k \ge m} { =} & \bE [X_m \One_A] .
2023-06-27 17:08:59 +02:00
\end { IEEEeqnarray*}
Hence $ X _ n = \bE [ X | \cF _ m ] $ by the uniqueness of conditional expectation
2023-07-28 03:45:37 +02:00
and by \yaref { ceismartingale} ,
2023-06-27 17:08:59 +02:00
we get the convergence.
\end { refproof}
2023-07-20 15:13:06 +02:00
\begin { example} +[\vocab { Branching Process} ; Exercise 10.1, 12.4]
Let $ ( Y _ { n,k } ) _ { n \in \N _ 0 , k \in \N } $ be i.i.d.~with values in $ \N _ 0 $
such that $ 0 < \bE [ Y _ { n,k } ] = m < \infty $ .
Define
\[
S_ 0 \coloneqq 1, S_ n \coloneqq \sum _ { k=1} ^ { S_ { n-1} } Y_ { n-1,k}
\]
and let $ M _ n \coloneqq \frac { S _ n } { m ^ n } $ .
$ S _ n $ models the size of a population.
\begin { claim}
$ M _ n $ is a martingale.
\end { claim}
\begin { subproof}
We have
\begin { IEEEeqnarray*} { rCl}
\bE [M_{n+1} - M_n | \cF_n]
& =& \frac { 1} { m^ n} \left ( \frac { 1} { m} \sum _ { k=1} ^ { S_ { n} } \bE [X_{n,k}] - S_ n\right )\\
& =& \frac { 1} { m^ n} (S_ n - S_ n).
\end { IEEEeqnarray*}
\end { subproof}
\begin { claim}
$ ( M _ n ) _ { n \in \N } $ is bounded in $ L ^ 2 $ iff $ m > 1 $ .
\end { claim}
\todo { TODO}
\begin { claim}
If $ m > 1 $ and $ M _ n \to M _ \infty $ ,
then
\[
\Var (M_ \infty ) = \sigma ^ 2(m(m-1))^ { -1} .
\]
\end { claim}
\todo { TODO}
\end { example}
2023-07-05 17:53:41 +02:00
\subsection { Stopping Times}
2023-06-27 18:08:38 +02:00
\begin { definition} [Stopping time]
2023-07-18 22:33:10 +02:00
\label { def:stopping-time}
2023-06-29 20:31:46 +02:00
A random variable $ T: \Omega \to \N _ 0 \cup \{ \infty \} $ on a filtered probability space $ ( \Omega , \cF , \{ \cF _ n \} _ n, \bP ) $ is called a \vocab { stopping time} ,
2023-06-27 18:08:38 +02:00
if
\[
\{ T \le n\} \in \cF _ n
2023-06-29 20:31:46 +02:00
\]
2023-06-27 18:08:38 +02:00
for all $ n \in \N $ .
Equivalently, $ \{ T = n \} \in \cF _ n $ for all $ n \in \N $ .
\end { definition}
\begin { example}
A constant random variable $ T = c $ is a stopping time.
\end { example}
\begin { example} [Hitting times]
2023-06-29 20:31:46 +02:00
For an adapted process $ ( X _ n ) _ n $
2023-06-27 18:08:38 +02:00
with values in $ \R $ and $ A \in \cB ( \R ) $ , the \vocab { hitting time}
\[
2023-06-29 20:31:46 +02:00
T \coloneqq \inf \{ n \in \N : X_ n \in A\}
2023-06-27 18:08:38 +02:00
\]
is a stopping time,
as
\[
\{ T \le n \} = \bigcup _ { k=1} ^ n \{ X_ k \in A\} \in \cF _ n.
2023-06-29 20:31:46 +02:00
\]
2023-06-27 18:08:38 +02:00
However, the last exit time
\[
2023-06-29 20:31:46 +02:00
T \coloneqq \sup \{ n \in \N : X_ n \in A\}
\]
2023-06-27 18:08:38 +02:00
is not a stopping time.
\end { example}
\begin { example}
Consider the simple random walk, i.e.
$ X _ n $ i.i.d.~with $ \bP [ X _ n = 1 ] = \bP [ X _ n = - 1 ] = \frac { 1 } { 2 } $ .
Set $ S _ n \coloneqq \sum _ { i = 1 } ^ { n } X _ n $ .
Then
\[
T \coloneqq \inf \{ n \in \N : S_ n \ge A \lor S_ n \le B\}
2023-06-29 20:31:46 +02:00
\]
2023-06-27 18:08:38 +02:00
is a stopping time.
\end { example}
2023-07-05 17:53:41 +02:00
\begin { fact}
2023-06-27 18:08:38 +02:00
If $ T _ 1 , T _ 2 $ are stopping times with respect to the same filtration,
then
\begin { itemize}
\item $ T _ 1 + T _ 2 $ ,
\item $ \min \{ T _ 1 , T _ 2 \} $ and
\item $ \max \{ T _ 1 , T _ 2 \} $
\end { itemize}
are stopping times.
2023-07-05 17:53:41 +02:00
\end { fact}
\begin { warning}
2023-06-27 18:08:38 +02:00
Note that $ T _ 1 - T _ 2 $ is not a stopping time.
2023-07-05 17:53:41 +02:00
\end { warning}
2023-06-27 18:08:38 +02:00
\begin { remark}
2023-07-05 17:53:41 +02:00
There are two ways to look at the interaction between a stopping time $ T $
and a stochastic process $ ( X _ n ) _ n $ :
2023-06-27 18:08:38 +02:00
\begin { itemize}
2023-07-05 17:53:41 +02:00
\item The behaviour of $ X _ n $ until $ T $ , i.e.
2023-06-27 18:08:38 +02:00
\[
X^ T \coloneqq \left (X_ { T \wedge n} \right )_ { n \in \N }
2023-07-05 17:53:41 +02:00
\]
is called the \vocab { stopped process} .
2023-06-27 18:08:38 +02:00
\item The value of $ ( X _ n ) _ n ) $ at time $ T $ ,
i.e.~looking at $ X _ T $ .
\end { itemize}
\end { remark}
\begin { example}
If we look at a process
2023-07-05 17:53:41 +02:00
\[ S _ n = \sum _ { i = 1 } ^ { n } X _ i \]
for some $ ( X _ n ) _ n $ ,
then
\[ S ^ T = ( \sum _ { i = 1 } ^ { T \wedge n } X _ i ) _ n \]
2023-06-27 18:08:38 +02:00
and
2023-07-05 17:53:41 +02:00
\[ S _ T = \sum _ { i = 1 } ^ { T } X _ i. \]
2023-06-27 18:08:38 +02:00
\end { example}
\begin { theorem}
If $ ( X _ n ) _ n $ is a supermartingale and $ T $ is a stopping time,
then $ X ^ T $ is also a supermartingale,
and we have $ \bE [ X _ { T \wedge n } ] \le \bE [ X _ 0 ] $ for all $ n $ .
2023-06-29 20:31:46 +02:00
If $ ( X _ n ) _ n $ is a martingale, then so is $ X ^ T $
2023-07-15 15:08:17 +02:00
and $ \bE [ X _ { T \wedge n } ] = \bE [ X _ 0 ] $ .
2023-06-27 18:08:38 +02:00
\end { theorem}
\begin { proof}
First, we need to show that $ X ^ T $ is adapted.
This is clear since
\begin { IEEEeqnarray*} { rCl}
X^ T_ n & =& X_ T \One _ { T < n} + X_ n \One _ { T \ge n} \\
& =& \sum _ { k=1} ^ { n-1} X_ k \One _ { T = k} + X_ n \One _ { T \ge n} .
\end { IEEEeqnarray*}
It is also clear that $ X ^ T _ n $ is integrable since
\[
\bE [|X^T_n|] \le \sum _ { k=1} ^ { n} \bE [|X_k|] < \infty .
2023-06-29 01:24:35 +02:00
\]
2023-06-27 18:08:38 +02:00
We have
\begin { IEEEeqnarray*} { rCl}
2023-06-29 22:18:23 +02:00
& & \bE [X^T_n - X^T_{n-1} | \cF_{n-1}] \\
& =& \bE \left [X_ n \One _ { \{ T \ge n\} } + \sum _ { k=1} ^ { n-1} X_ k \One _ { \{ T = k\} }
- X_ { n-1} (\One _ { T \ge n} + \One _ { \{ T = n-1\} } )\right .\\
& & \left .+ \sum _ { k=1} ^ { n-2} X_ k \One _ { \{ T = k\} } \middle | \cF _ { n-1} \right ]\\
2023-06-27 18:08:38 +02:00
& =& \bE [(X_n - X_{n-1}) \One_{\{ T \ge n\} } | \cF_{n-1}] \\
2023-06-29 22:18:23 +02:00
& =& \One _ { \{ T \ge n\} } (\bE [X_n | \cF_{n-1}] - X_ { n-1} )
\begin { cases}
2023-06-27 18:08:38 +02:00
\le 0\\
= 0 \text { if $ ( X _ n ) _ n $ is a martingale} .
2023-06-29 22:18:23 +02:00
\end { cases}
2023-06-27 18:08:38 +02:00
\end { IEEEeqnarray*}
\end { proof}
\begin { remark}
\label { roptionalstoppingi}
We now want a similar statement for $ X _ T $ .
In the case that $ T \le M $ is bounded,
we get from the above that
\[
\bE [X_T] \overset { n \ge M} { =} \bE [X^T_n] \begin { cases}
2023-06-29 01:24:35 +02:00
\le \bE [X_0] & \text { supermartingale} ,\\
2023-06-27 18:08:38 +02:00
= \bE [X_0] & \text { martingale} .
\end { cases}
\]
However if $ T $ is not bounded, this does not hold in general.
\end { remark}
\begin { example}
Let $ ( S _ n ) _ n $ be the simple random walk
and take $ T \coloneqq \inf \{ n : S _ n = 1 \} $ .
Then $ \bP [ T < \infty ] = 1 $ , but
\[
1 = \bE [S_T] \neq \bE [S_0] = 0.
2023-06-29 01:24:35 +02:00
\]
2023-06-27 18:08:38 +02:00
\end { example}
\begin { theorem} [Optional Stopping]
2023-07-28 03:45:37 +02:00
\yalabel { Optional Stopping Theorem} { Optional Stopping} { optionalstopping}
2023-06-27 18:08:38 +02:00
Let $ ( X _ n ) _ n $ be a supermartingale
and let $ T $ be a stopping time
taking values in $ \N $ .
If one of the following holds
2023-06-29 01:24:35 +02:00
\begin { enumerate} [(i)]
2023-06-27 18:08:38 +02:00
\item $ T \le M $ is bounded,
\item $ ( X _ n ) _ n $ is uniformly bounded
and $ T < \infty $ a.s.,
\item $ \bE [ T ] < \infty $
and $ |X _ n ( \omega ) - X _ { n - 1 } ( \omega ) | \le K $
for all $ n \in \N , \omega \in \Omega $ and
some $ K > 0 $ ,
2023-06-29 01:24:35 +02:00
\end { enumerate}
2023-06-27 18:08:38 +02:00
then $ \bE [ X _ T ] \le \bE [ X _ 0 ] $ .
If $ ( X _ n ) _ n $ even is a martingale, then
under the same conditions
$ \bE [ X _ T ] = \bE [ X _ 0 ] $ .
\end { theorem}
\begin { proof}
2023-07-28 03:45:37 +02:00
(i) was already done in \yaref { roptionalstoppingi} .
2023-06-27 18:08:38 +02:00
(ii): Since $ ( X _ n ) _ n $ is bounded, we get that
\begin { IEEEeqnarray*} { rCl}
\bE [|X_T - X_0|] & \overset { \text { dominated convergence} } { =} & \lim _ { n \to \infty } \bE [|X_{T \wedge n} - X_0|] \\
& \overset { \text { part (i)} } { \le } & 0.
\end { IEEEeqnarray*}
(iii): It is
\begin { IEEEeqnarray*} { rCl}
|X_ { T \wedge n} - X_ 0| & \le & | \sum _ { k=1} ^ { T \wedge n} X_ k - X_ { k-1} |\\
& \le & (T \wedge n) \cdot K\\
& \le & T \cdot K < \infty .
\end { IEEEeqnarray*}
Hence, we can apply dominated convergence and obtain
\begin { IEEEeqnarray*} { rCl}
\bE [X_T - X_0] & =& \lim _ { n \to \infty } \bE [X_{T \wedge n} - X_0] .
\end { IEEEeqnarray*}
Thus, we can apply (ii).
2023-06-27 17:08:59 +02:00
2023-06-27 18:08:38 +02:00
The statement about martingales follows from
applying this to $ ( X _ n ) _ n $ and $ ( - X _ n ) _ n $ ,
which are both supermartingales.
\end { proof}
2023-07-28 03:45:37 +02:00
\begin { remark} +
Let $ ( X _ n ) _ n $ be a supermartingale and $ T $ a stopping time.
If $ ( X _ n ) _ n $ itself is not bounded,
but $ T $ ensures boundedness,
i.e. $ T < \infty $ a.s.~and $ ( X _ { T \wedge n } ) _ n $
is uniformly bounded,
the \yaref { optionalstopping} can still be applied, as
\[
\bE [X_T] = \bE [X_{T \wedge T}]
\overset { \yaref { optionalstopping} } { \le } \bE [X_{T \wedge 0}]
= \bE [X_0] .
\]
\end { remark}