From a9bb7ae3c783945bfb405d3b2c1dde52f5ce11d1 Mon Sep 17 00:00:00 2001 From: Josia Pietsch Date: Tue, 27 Jun 2023 18:08:38 +0200 Subject: [PATCH] lecture 20 --- inputs/lecture_19.tex | 3 +- inputs/lecture_20.tex | 201 ++++++++++++++++++++++++++++++++++++++++- probability_theory.tex | 1 + 3 files changed, 201 insertions(+), 4 deletions(-) diff --git a/inputs/lecture_19.tex b/inputs/lecture_19.tex index 7b55c42..bedf09c 100644 --- a/inputs/lecture_19.tex +++ b/inputs/lecture_19.tex @@ -219,7 +219,8 @@ Let $(\Omega, \cF, \bP)$ as always and let $(\cF_n)_n$ always be a filtration. \begin{theorem} \label{ceismartingale} - Let $X \in L^p$ for some $p \ge 1$. + Let $X \in L^p$ for some $p \ge 1$ + and $\bigcup_n \cF_n \to \cF$. Then $X_n \coloneqq \bE[X | \cF_n]$ defines a martingale which converges to $X$ in $L^p$. \end{theorem} diff --git a/inputs/lecture_20.tex b/inputs/lecture_20.tex index ab378d8..711f6a7 100644 --- a/inputs/lecture_20.tex +++ b/inputs/lecture_20.tex @@ -22,8 +22,8 @@ &\overset{A \in \cF_n}{=}& \lim_{\substack{n \to \infty\\n \ge m}} \bE[X \One_A]\\ \end{IEEEeqnarray*} Hence $\int_A Y \dif \bP = \int_A X \dif \bP$ for all $m \in \N, A \in \cF_m$. - Since $\cF = \sigma\left( \bigcup \cF_n \right)$ - this holds for all $A \in \cF$. + Since $\sigma(X) = \bigcup \cF_n$ + this holds for all $A \in \sigma(X)$. Hence $X = Y$ a.s., so $X_n \xrightarrow{L^2} X$. Since $(X_n)_n$ is uniformly bounded, this also means $X_n \xrightarrow{L^p} X$. @@ -111,8 +111,203 @@ we need the following theorem, which we won't prove here: Hence $X_n = \bE[X | \cF_m]$ by the uniqueness of conditional expectation and by \autoref{ceismartingale}, we get the convergence. - \end{refproof} +\subsection{Stopping times} + +\begin{definition}[Stopping time] + A random variable $T: \Omega \to \N \cup \{\infty\}$ on a filtered probability space $(\Omega, \cF, \{\cF_n\}_n, \bP)$ is called a \vocab{stopping time}, + if + \[ + \{T \le n\} \in \cF_n + \] + for all $n \in \N$. + Equivalently, $\{T = n\} \in \cF_n$ for all $n \in \N$. + +\end{definition} + +\begin{example} + A constant random variable $T = c$ is a stopping time. +\end{example} + +\begin{example}[Hitting times] + For an adapted process $(X_n)_n$ + with values in $\R$ and $A \in \cB(\R)$, the \vocab{hitting time} + \[ + T \coloneqq \inf \{n \in \N : X_n \in A\} + \] + is a stopping time, + as + \[ + \{T \le n \} = \bigcup_{k=1}^n \{X_k \in A\} \in \cF_n. + \] + + However, the last exit time + \[ + T \coloneqq \sup \{n \in \N : X_n \in A\} + \] + is not a stopping time. + +\end{example} +\begin{example} + Consider the simple random walk, i.e. + $X_n$ i.i.d.~with $\bP[X_n = 1] = \bP[X_n = -1] = \frac{1}{2}$. + Set $S_n \coloneqq \sum_{i=1}^{n} X_n$. + Then + \[ + T \coloneqq \inf \{n \in \N : S_n \ge A \lor S_n \le B\} + \] + is a stopping time. +\end{example} + +\begin{example} + If $T_1, T_2$ are stopping times with respect to the same filtration, + then + \begin{itemize} + \item $T_1 + T_2$, + \item $\min \{T_1, T_2\}$ and + \item $\max \{T_1, T_2\}$ + \end{itemize} + are stopping times. + + Note that $T_1 - T_2$ is not a stopping time. + +\end{example} + +\begin{remark} + There are two ways to interpret the interaction between a stopping time $T$ + and a stochastic process $(X_n)_n$. + \begin{itemize} + \item The behaviour of $ X_n$ until $T$, + i.e.~looking at the \vocab{stopped process} + \[ + X^T \coloneqq \left(X_{T \wedge n}\right)_{n \in \N} + \]. + \item The value of $(X_n)_n)$ at time $T$, + i.e.~looking at $X_T$. + \end{itemize} +\end{remark} +\begin{example} + If we look at a process + \[ + S_n = \sum_{i=1}^{n} X_i + \] + for some $(X_n)_n$, then + \[ + S^T = (\sum_{i=1}^{T \wedge n} X_i)_n + \] + and + \[ + S_T = \sum_{i=1}^{T} X_i. + \] +\end{example} + +\begin{theorem} + If $(X_n)_n$ is a supermartingale and $T$ is a stopping time, + then $X^T$ is also a supermartingale, + and we have $\bE[X_{T \wedge n}] \le \bE[X_0]$ for all $n$. + If $(X_n)_n$ is a martingale, then so is $X^T$ + and $\bE[X_{T \wedge n}] \le \bE[X_0]$. +\end{theorem} +\begin{proof} + First, we need to show that $X^T$ is adapted. + This is clear since + \begin{IEEEeqnarray*}{rCl} + X^T_n &=& X_T \One_{T < n} + X_n \One_{T \ge n}\\ + &=& \sum_{k=1}^{n-1} X_k \One_{T = k} + X_n \One_{T \ge n}. + \end{IEEEeqnarray*} + + It is also clear that $X^T_n$ is integrable since + \[ + \bE[|X^T_n|] \le \sum_{k=1}^{n} \bE[|X_k|] < \infty. + \] + + We have + \begin{IEEEeqnarray*}{rCl} + \bE[X^T_n - X^T_{n-1} | \cF_{n-1}] + &=& \bE[X_n \One_{\{T \ge n\}} + \sum_{k=1}^{n-1} X_k \One_{\{ T = k\} } - X_{n-1}(\One_{T \ge n} + \One_{\{T = n-1\}}) + + \sum_{k=1}^{n-2} X_k \One_{\{T = k\} } | \cF_{n-1}]\\ + &=& \bE[(X_n - X_{n-1}) \One_{\{ T \ge n\} } | \cF_{n-1}]\\ + &=& \One_{\{ T \ge n\}} (\bE[X_n | \cF_{n-1}] - X_{n-1})\\ + && \begin{cases} + \le 0\\ + = 0 \text{ if $(X_n)_n$ is a martingale}. + \end{cases}. + \end{IEEEeqnarray*} + +\end{proof} + +\begin{remark} + \label{roptionalstoppingi} + We now want a similar statement for $X_T$. + In the case that $T \le M$ is bounded, + we get from the above that + \[ + \bE[X_T] \overset{n \ge M}{=} \bE[X^T_n] \begin{cases} + \le \bE[X_0] & \text{ supermartingale}, + = \bE[X_0] & \text{ martingale}. + \end{cases} + \] + + However if $T$ is not bounded, this does not hold in general. +\end{remark} +\begin{example} + Let $(S_n)_n$ be the simple random walk + and take $T \coloneqq \inf \{n : S_n = 1\}$. + Then $\bP[T < \infty] = 1$, but + \[ + 1 = \bE[S_T] \neq \bE[S_0] = 0. + \] +\end{example} + +\begin{theorem}[Optional Stopping] + \label{optionalstopping} + Let $(X_n)_n$ be a supermartingale + and let $T$ be a stopping time + taking values in $\N$. + + If one of the following holds + \begin{itemize}[(i)] + \item $T \le M$ is bounded, + \item $(X_n)_n$ is uniformly bounded + and $T < \infty$ a.s., + \item $\bE[T] < \infty$ + and $|X_n(\omega) - X_{n-1}(\omega)| \le K$ + for all $n \in \N, \omega \in \Omega$ and + some $K > 0$, + \end{itemize} + then $\bE[X_T] \le \bE[X_0]$. + + If $(X_n)_n$ even is a martingale, then + under the same conditions + $\bE[X_T] = \bE[X_0]$. +\end{theorem} +\begin{proof} + (i) was dealt with in \autoref{roptionalstoppingi}. + + (ii): Since $(X_n)_n$ is bounded, we get that + \begin{IEEEeqnarray*}{rCl} + \bE[|X_T - X_0|] &\overset{\text{dominated convergence}}{=}& \lim_{n \to \infty} \bE[|X_{T \wedge n} - X_0|]\\ + &\overset{\text{part (i)}}{\le}& 0. + \end{IEEEeqnarray*} + + (iii): It is + \begin{IEEEeqnarray*}{rCl} + |X_{T \wedge n}- X_0| &\le& | \sum_{k=1}^{T \wedge n} X_k - X_{k-1}|\\ + &\le & (T \wedge n) \cdot K\\ + &\le & T \cdot K < \infty. + \end{IEEEeqnarray*} + + Hence, we can apply dominated convergence and obtain + \begin{IEEEeqnarray*}{rCl} + \bE[X_T - X_0] &=& \lim_{n \to \infty} \bE[X_{T \wedge n} - X_0]. + \end{IEEEeqnarray*} + Thus, we can apply (ii). + + + The statement about martingales follows from + applying this to $(X_n)_n$ and $(-X_n)_n$, + which are both supermartingales. +\end{proof} diff --git a/probability_theory.tex b/probability_theory.tex index 8cd381b..fb2b4f9 100644 --- a/probability_theory.tex +++ b/probability_theory.tex @@ -43,6 +43,7 @@ \input{inputs/lecture_17.tex} \input{inputs/lecture_18.tex} \input{inputs/lecture_19.tex} +\input{inputs/lecture_20.tex} \cleardoublepage