yaref
This commit is contained in:
parent
25e248c605
commit
296c7b2f55
23 changed files with 267 additions and 198 deletions
|
@ -66,7 +66,7 @@ Define $\cF = \bigcup_{n \in \N} \cF_n$. Then $\cF$ is an algebra.
|
|||
We'll show that if we define $\lambda: \cF \to [0,1]$ with
|
||||
$\lambda(A) = \lambda_n(A)$ for any $n$ where this is well defined,
|
||||
then $\lambda$ is countably additive on $\cF$.
|
||||
Using \autoref{caratheodory}, $\lambda$ will extend uniquely to a probability measure on $\sigma(\cF)$.
|
||||
Using \yaref{caratheodory}, $\lambda$ will extend uniquely to a probability measure on $\sigma(\cF)$.
|
||||
|
||||
We want to prove:
|
||||
\begin{claim}
|
||||
|
@ -107,7 +107,7 @@ We want to prove:
|
|||
thus $\cF \subseteq \cB_\infty$. Since $\cB_\infty$ is a $\sigma$-algebra,
|
||||
$\sigma(\cF) \subseteq \cB_\infty$.
|
||||
\end{refproof}
|
||||
For the proof of \autoref{claim:lambdacountadd},
|
||||
For the proof of \yaref{claim:lambdacountadd},
|
||||
we are going to use the following:
|
||||
\begin{fact}
|
||||
\label{fact:finaddtocountadd}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
\lecture{4}{}{End of proof of Kolmogorov's consistency theorem}
|
||||
|
||||
To finish the proof of \autoref{claim:lambdacountadd},
|
||||
To finish the proof of \yaref{claim:lambdacountadd},
|
||||
we need the following:
|
||||
\begin{fact}
|
||||
\label{lec4fact1}
|
||||
|
@ -91,7 +91,7 @@ we need the following:
|
|||
so $\{x_k^{(n)}\}_n$ is bounded.
|
||||
\end{itemize}
|
||||
|
||||
By \autoref{lec4fact1},
|
||||
By \yaref{lec4fact1},
|
||||
there is an infinite set $S \subseteq \N$,
|
||||
such that $\{x_k^{(n)}\}_{n \in S}$
|
||||
converges for every $k$.
|
||||
|
@ -115,7 +115,7 @@ we need the following:
|
|||
\end{refproof}
|
||||
|
||||
\begin{refproof}{claim:lambdacountadd}
|
||||
In order to apply \autoref{fact:finaddtocountadd},
|
||||
In order to apply \yaref{fact:finaddtocountadd},
|
||||
we need the following:
|
||||
\begin{claim}
|
||||
For any sequence $B_n \in \cF$
|
||||
|
@ -163,7 +163,7 @@ we need the following:
|
|||
\[
|
||||
\bigcap_{k=1}^n L_k^\ast \neq \emptyset.
|
||||
\]
|
||||
By \autoref{lem:intersectioncompactsets},
|
||||
By \yaref{lem:intersectioncompactsets},
|
||||
it follows that
|
||||
\[
|
||||
\bigcap_{k \in \N} L_k^\ast \neq \emptyset.
|
||||
|
@ -190,7 +190,7 @@ hence
|
|||
\end{IEEEeqnarray*}
|
||||
|
||||
For the definition of $\lambda$
|
||||
as well as the proof of \autoref{claim:lambdacountadd}
|
||||
as well as the proof of \yaref{claim:lambdacountadd}
|
||||
we have only used that $(\lambda_n)_{n \in \N}$
|
||||
is a consistent family.
|
||||
Hence we have in fact shown \autoref{thm:kolmogorovconsistency}.
|
||||
Hence we have in fact shown \yaref{thm:kolmogorovconsistency}.
|
||||
|
|
|
@ -9,7 +9,7 @@ The RHS is constant, which we can explicitly compute from the distribution of th
|
|||
We fix a probability space $(\Omega, \cF, \bP)$ once and for all.
|
||||
|
||||
\begin{theorem}
|
||||
\label{lln}
|
||||
\label{lln} % TODO - yaref
|
||||
Let $X_1, X_2,\ldots$ be i.i.d.~random variables on $(\R, \cB(\R))$
|
||||
and $m = \bE[X_i] < \infty$
|
||||
and $\sigma^{2} = \Var(X_i) = \bE[ (X_i - \bE(X_i))^2] = \bE[X_i^2] - \bE[X_i]^2 < \infty$.
|
||||
|
@ -37,7 +37,7 @@ We fix a probability space $(\Omega, \cF, \bP)$ once and for all.
|
|||
\begin{IEEEeqnarray*}{rCl}
|
||||
\bP\left[ \left| \frac{X_1 + \ldots + X_n}{n} - m\right| > \epsilon\right]
|
||||
&=& \bP\left[\left|\frac{S_n}{n}-m\right| > \epsilon\right]\\
|
||||
&\overset{\text{Chebyshev}}{\le }&
|
||||
&\overset{\yaref{thm:chebyshev}}{\le }&
|
||||
\frac{\Var\left( \frac{S_n}{n} \right) }{\epsilon^2}
|
||||
= \frac{1}{n} \frac{\Var(X_1)}{\epsilon^2}
|
||||
\xrightarrow{n \to \infty} 0
|
||||
|
@ -69,7 +69,7 @@ Consider the following:
|
|||
where $X_n$ has distribution
|
||||
$\frac{1}{n^2} \delta_n + \frac{1}{n^2} \delta_{-n} + (1-\frac{2}{n^2}) \delta_0$.
|
||||
We have $\bP[X_n \neq 0] = \frac{2}{n^2}$.
|
||||
Since this is summable, Borel-Cantelli yields
|
||||
Since this is summable, \yaref{thm:borelcantelli} yields
|
||||
\[
|
||||
\bP[X_{n} \neq 0 \text{ for infinitely many $n$}] = 0.
|
||||
\]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
\lecture{6}{}{Proof of SLLN}
|
||||
\begin{refproof}{lln}
|
||||
We want to deduce the SLLN (\autoref{lln}) from \autoref{thm2}.
|
||||
We want to deduce the SLLN (\yaref{lln}) from \yaref{thm2}.
|
||||
W.l.o.g.~let us assume that $\bE[X_i] = 0$
|
||||
(otherwise define $X'_i \coloneqq X_i - \bE[X_i]$).
|
||||
We will show that $\frac{S_n}{n} \xrightarrow{a.s.} 0$.
|
||||
|
@ -8,7 +8,7 @@
|
|||
Then the $Y_i$ are independent and we have $\bE[Y_i] = 0$
|
||||
and $\Var(Y_i) = \frac{\sigma^2}{i^2}$.
|
||||
Thus $\sum_{i=1}^\infty \Var(Y_i) < \infty$.
|
||||
From \autoref{thm2} we obtain that $\sum_{i=1}^\infty Y_i$ converges a.s.
|
||||
From \yaref{thm2} we obtain that $\sum_{i=1}^\infty Y_i$ converges a.s.
|
||||
\begin{claim}
|
||||
Let $(a_n)$ be a sequence in $\R$
|
||||
such that $\sum_{n=1}^{\infty} \frac{a_n}{n}$ converges,
|
||||
|
@ -45,9 +45,9 @@
|
|||
The SLLN follows from the claim.
|
||||
\end{refproof}
|
||||
|
||||
In order to prove \autoref{thm2}, we need the following:
|
||||
In order to prove \yaref{thm2}, we need the following:
|
||||
\begin{theorem}[Kolmogorov's inequality]
|
||||
\label{thm:kolmogorovineq}
|
||||
\yalabel{Kolmogorov's Inequality}{Kolmogorov}{thm:kolmogorovineq}
|
||||
If $X_1,\ldots, X_n$ are independent with $\bE[X_i] = 0$
|
||||
and $\Var(X_i) = \sigma_i^2$, then
|
||||
\[
|
||||
|
@ -139,7 +139,7 @@ In order to prove \autoref{thm2}, we need the following:
|
|||
\begin{IEEEeqnarray*}{rCl}
|
||||
&&\max \{|S_{m+1} - S_m|, |S_{m+2} - S_m|, \ldots, |S_{m+l} - S_m|\}\\
|
||||
&=& \max \{|X_{m+1}|, |X_{m+1} + X_{m+2}|, \ldots, |X_{m+1} + X_{m+2} + \ldots + X_{m+l}|\}\\
|
||||
&\overset{\text{\autoref{thm:kolmogorovineq}}}{\le}&
|
||||
&\overset{\yaref{thm:kolmogorovineq}}{\le}&
|
||||
\frac{1}{\epsilon^2} \sum_{i=m}^{l} \Var(X_i)\\
|
||||
&\le & \frac{1}{\epsilon^2} \sum_{i=m}^\infty \Var(X_i)
|
||||
\xrightarrow{m \to \infty} 0,
|
||||
|
@ -200,6 +200,3 @@ In order to prove \autoref{thm2}, we need the following:
|
|||
Hence $\frac{t}{N_t} \to m$.
|
||||
|
||||
\end{proof}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
when the $X_n$ are independent.
|
||||
\end{goal}
|
||||
\begin{theorem}[Kolmogorov's three-series theorem] % Theorem 3
|
||||
\label{thm:kolmogorovthreeseries}
|
||||
\yalabel{Kolmogorov's Three-Series Theorem}{3 Series}{thm:kolmogorovthreeseries}
|
||||
\label{thm3}
|
||||
Let $X_n$ be a family of independent random variables.
|
||||
\begin{enumerate}[(a)]
|
||||
|
@ -21,7 +21,7 @@
|
|||
Then all three series above converge for every $C > 0$.
|
||||
\end{enumerate}
|
||||
\end{theorem}
|
||||
For the proof we'll need a slight generalization of \autoref{thm2}:
|
||||
For the proof we'll need a slight generalization of \yaref{thm2}:
|
||||
\begin{theorem} %[Theorem 4]
|
||||
\label{thm4}
|
||||
Let $\{X_n\}_n$ be independent and \vocab{uniformly bounded}
|
||||
|
@ -31,7 +31,7 @@ For the proof we'll need a slight generalization of \autoref{thm2}:
|
|||
converge.
|
||||
\end{theorem}
|
||||
\begin{refproof}{thm3}
|
||||
Assume, that we have already proved \autoref{thm4}.
|
||||
Assume, that we have already proved \yaref{thm4}.
|
||||
We prove part (a) first.
|
||||
Put $Y_n = X_n \cdot \One_{\{|X_n| \le C\}}$.
|
||||
Since the $X_n$ are independent, the $Y_n$ are independent as well.
|
||||
|
@ -40,11 +40,11 @@ For the proof we'll need a slight generalization of \autoref{thm2}:
|
|||
$\sum_{n \ge 1} \int_{|X_n| \le C} X_n \dif\bP = \sum_{n \ge 1} \bE[Y_n]$
|
||||
and $\sum_{n \ge 1} \int_{|X_n| \le C} X_n^2 \dif\bP - \left( \int_{|X_n| \le C} X_n \dif\bP \right)^2 = \sum_{n \ge 1} \Var(Y_n)$
|
||||
converges.
|
||||
By \autoref{thm4} it follows that $\sum_{n \ge 1} Y_n < \infty$
|
||||
By \yaref{thm4} it follows that $\sum_{n \ge 1} Y_n < \infty$
|
||||
almost surely.
|
||||
Let $A_n \coloneqq \{\omega : |X_n(\omega)| > C\}$.
|
||||
Since $\sum_{n \ge 1} \bP(A_n) < \infty$ by assumption,
|
||||
Borel-Cantelli yields $\bP[\text{infinitely many $A_n$ occur}] = 0$.
|
||||
\yaref{thm:borelcantelli} yields $\bP[\text{infinitely many $A_n$ occur}] = 0$.
|
||||
|
||||
|
||||
For the proof of (b), suppose $\sum_{n\ge 1} X_n(\omega) < \infty$
|
||||
|
@ -59,7 +59,7 @@ For the proof we'll need a slight generalization of \autoref{thm2}:
|
|||
\]
|
||||
Then the $Y_n$ are independent and $\sum_{n \ge 1} Y_n(\omega) < \infty$
|
||||
almost surely and the $Y_n$ are uniformly bounded.
|
||||
By \autoref{thm4} $\sum_{n \ge 1} \bE[Y_n]$ and $\sum_{n \ge 1} \Var(Y_n)$
|
||||
By \yaref{thm4} $\sum_{n \ge 1} \bE[Y_n]$ and $\sum_{n \ge 1} \Var(Y_n)$
|
||||
converge.
|
||||
Define
|
||||
\[
|
||||
|
@ -70,7 +70,7 @@ For the proof we'll need a slight generalization of \autoref{thm2}:
|
|||
\]
|
||||
Then the $Z_n$ are independent, uniformly bounded and $\sum_{n \ge 1} Z_n(\omega) < \infty$
|
||||
almost surely.
|
||||
By \autoref{thm4} we have
|
||||
By \yaref{thm4} we have
|
||||
$\sum_{n \ge 1} \bE(Z_n) < \infty$
|
||||
and $\sum_{n \ge 1} \Var(Z_n) < \infty$.
|
||||
|
||||
|
@ -88,8 +88,8 @@ For the proof we'll need a slight generalization of \autoref{thm2}:
|
|||
$\sum_{n \ge 1} \Var(Z_n)$ to conclude that this series converges
|
||||
as well.
|
||||
\end{refproof}
|
||||
Recall \autoref{thm2}.
|
||||
We will see, that the converse of \autoref{thm2} is true if the $X_n$ are uniformly bounded.
|
||||
Recall \yaref{thm2}.
|
||||
We will see, that the converse of \yaref{thm2} is true if the $X_n$ are uniformly bounded.
|
||||
More formally:
|
||||
\begin{theorem}[Theorem 5]
|
||||
\label{thm5}
|
||||
|
@ -99,14 +99,14 @@ More formally:
|
|||
then $\sum_{n \ge 1} \Var(X_n) < \infty$.
|
||||
\end{theorem}
|
||||
\begin{refproof}{thm4}
|
||||
Assume we have proven \autoref{thm5}.
|
||||
Assume we have proven \yaref{thm5}.
|
||||
|
||||
``$\impliedby$'' Assume $\{X_n\} $ are independent, uniformly bounded
|
||||
and $\sum_{n \ge 1} \bE(X_n) < \infty$ as well as $\sum_{n \ge 1} \Var(X_n) < \infty$.
|
||||
We need to show that $\sum_{n \ge 1} X_n < \infty$ a.s.
|
||||
Let $Y_n \coloneqq X_n - \bE(X_n)$.
|
||||
Then the $Y_n$ are independent, $\bE(Y_n) = 0$ and $\Var(Y_n) = \Var(X_n)$.
|
||||
By \autoref{thm2} $\sum_{n \ge 1} Y_n < \infty$ a.s.
|
||||
By \yaref{thm2} $\sum_{n \ge 1} Y_n < \infty$ a.s.
|
||||
Thus $\sum_{n \ge 1} X_n < \infty$ a.s.
|
||||
|
||||
``$\implies$'' We assume that $\{X_n\}$ are independent, uniformly bounded
|
||||
|
@ -145,23 +145,23 @@ More formally:
|
|||
$\sum_{n \ge 1} \left(Y_n(\omega, \omega') - Z_n(\omega, \omega') \right)= \sum_{n \ge 1} \left(X_n(\omega) - X_n(\omega')\right)$.
|
||||
Thus $\sum_{n \ge 1} \left( Y_n(\omega, \omega') - Z_n(\omega, \omega') \right) < \infty$ a.s.~on $\Omega_0\otimes\Omega_0$.
|
||||
\end{subproof}
|
||||
By \autoref{thm5}, $\sum_{n} \Var(X_n) = \frac{1}{2}\sum_{n \ge 1} \Var(Y_n - Z_n) < \infty$ a.s.
|
||||
By \yaref{thm5}, $\sum_{n} \Var(X_n) = \frac{1}{2}\sum_{n \ge 1} \Var(Y_n - Z_n) < \infty$ a.s.
|
||||
Define $U_n \coloneqq X_n - \bE(X_n)$.
|
||||
Then $\bE(U_n) = 0$ and the $U_n$ are independent
|
||||
and uniformly bounded.
|
||||
We have $\sum_{n} \Var(U_n) = \sum_{n} \Var(X_n) < \infty$.
|
||||
Thus $\sum_{n} U_n$ converges a.s.~by \autoref{thm2}.
|
||||
Thus $\sum_{n} U_n$ converges a.s.~by \yaref{thm2}.
|
||||
Since by assumption $\sum_{n} X_n < \infty$ a.s.,
|
||||
it follows that $\sum_{n} \bE(X_n) < \infty$.
|
||||
\end{refproof}
|
||||
\begin{remark}
|
||||
In the proof of \autoref{thm4}
|
||||
``$\impliedby$'' is just a trivial application of \autoref{thm2}
|
||||
In the proof of \yaref{thm4}
|
||||
``$\impliedby$'' is just a trivial application of \yaref{thm2}
|
||||
and uniform boundedness was not used.
|
||||
The idea of `` $\implies$ '' will lead to coupling. % TODO ?
|
||||
\end{remark}
|
||||
A proof of \autoref{thm5} can be found in the notes.\notes
|
||||
\begin{example}[Application of \autoref{thm4}]
|
||||
A proof of \yaref{thm5} can be found in the notes.\notes
|
||||
\begin{example}[Application of \yaref{thm4}]
|
||||
The series $\sum_{n} \frac{1}{n^{\frac{1}{2} + \epsilon}}$
|
||||
does not converge for $\epsilon < \frac{1}{2}$.
|
||||
However
|
||||
|
|
|
@ -59,7 +59,7 @@ which does not depend on the realisation of the first $k$ random variables
|
|||
for any $k \in \N$.
|
||||
|
||||
\begin{theorem}[Kolmogorov's 0-1 law]
|
||||
\label{kolmogorov01}
|
||||
\yalabel{Kolmogorov's 0-1 Law}{0-1 Law}{kolmogorov01}
|
||||
Let $X_n, n \in \N$ be a sequence of independent random variables
|
||||
and let $\cT$ denote their tail-$\sigma$-algebra.
|
||||
Then $\cT$ is \vocab{$\bP$-trivial}, i.e.~$\bP[A] \in \{0,1\}$
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
\lecture{9}{}{Percolation, Introduction to characteristic functions}
|
||||
\subsubsection{Application: Percolation}
|
||||
|
||||
We will now discuss another application of Kolmogorov's $0-1$-law, percolation.
|
||||
We will now discuss another application of \yaref{kolmogorov01}, percolation.
|
||||
|
||||
\begin{definition}[\vocab{Percolation}]
|
||||
Consider the graph with nodes $\Z^d$, $d \ge 2$, where edges from the lattice are added with probability $p$. The added edges are called \vocab[Percolation!Edge!open]{open};
|
||||
|
@ -178,7 +178,7 @@ We have
|
|||
\end{remark}
|
||||
|
||||
\begin{theorem}[Inversion formula] % thm1
|
||||
\label{inversionformula}
|
||||
\yalabel{Inversion Formula}{Inversion Formula}{inversionformula}
|
||||
Let $(\Omega, \cB(\R), \bP)$ be a probability space.
|
||||
Let $F$ be the distribution function of $\bP$
|
||||
(i.e.~$F(x) = \bP((-\infty, x])$ for all $x \in \R$ ).
|
||||
|
@ -193,7 +193,7 @@ We have
|
|||
We will prove this later.
|
||||
|
||||
\begin{theorem}[Uniqueness theorem] % thm2
|
||||
\label{charfuncuniqueness}
|
||||
\yalabel{Uniqueness Theorem}{Uniqueness}{charfuncuniqueness}
|
||||
Let $\bP$ and $\Q$ be two probability measures on $(\R, \cB(\R))$.
|
||||
Then $\phi_\bP = \phi_\Q \implies \bP = \Q$.
|
||||
|
||||
|
@ -202,20 +202,20 @@ We will prove this later.
|
|||
from $\phi$.
|
||||
\end{theorem}
|
||||
\begin{refproof}{charfuncuniqueness}
|
||||
Assume that we have already shown \autoref{inversionformula}.
|
||||
Assume that we have already shown the \yaref{inversionformula}.
|
||||
Suppose that $F$ and $G$ are the distribution functions of $\bP$ and $\Q$.
|
||||
Let $a,b \in \R$ with $a < b$.
|
||||
Assume that $a $ and $b$ are continuity points of both $F$ and $G$.
|
||||
By \autoref{inversionformula} we have
|
||||
By the \yaref{inversionformula} we have
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
F(b) - F(a) = G(b) - G(a) \label{eq:charfuncuniquefg}
|
||||
\end{IEEEeqnarray*}
|
||||
|
||||
Since $F$ and $G$ are monotonic, \autoref{eq:charfuncuniquefg}
|
||||
Since $F$ and $G$ are monotonic, \yaref{eq:charfuncuniquefg}
|
||||
holds for all $a < b$ outside a countable set.
|
||||
|
||||
Take $a_n$ outside this countable set, such that $a_n \ssearrow -\infty$.
|
||||
Then, \autoref{eq:charfuncuniquefg} implies that
|
||||
Then, \yaref{eq:charfuncuniquefg} implies that
|
||||
$F(b) - F(a_n) = G(b) - G(a_n)$ hence $F(b) = G(b)$.
|
||||
Since $F$ and $G$ are right-continuous, it follows that $F = G$.
|
||||
\end{refproof}
|
||||
|
|
|
@ -14,7 +14,7 @@ where $\mu = \bP X^{-1}$.
|
|||
|
||||
|
||||
\begin{refproof}{inversionformula}
|
||||
We will prove that the limit in the RHS of \autoref{invf}
|
||||
We will prove that the limit in the RHS of \yaref{invf}
|
||||
exists and is equal to the LHS.
|
||||
Note that the term on the RHS is integrable, as
|
||||
\[
|
||||
|
@ -31,7 +31,7 @@ where $\mu = \bP X^{-1}$.
|
|||
&=& \lim_{T \to \infty} \frac{1}{2 \pi} \int_{\R} \underbrace{\int_{-T}^T \left[ \frac{\cos(t (x-b)) - \cos(t(x-a))}{-\i t}\right] \dif t}_{=0 \text{, as the function is odd}} \bP(\dif x) \\
|
||||
&& + \lim_{T \to \infty} \frac{1}{2\pi} \int_{\R}\int_{-T}^T \frac{\sin(t ( x - b)) - \sin(t(x-a))}{-t} \dif t \bP(\dif x)\\
|
||||
&=& \lim_{T \to \infty} \frac{1}{\pi} \int_\R \int_{0}^T \frac{\sin(t(x-a)) - \sin(t(x-b))}{t} \dif t \bP(\dif x)\\
|
||||
&\overset{\substack{\text{\autoref{fact:sincint},}\\\text{dominated convergence}}}{=}&
|
||||
&\overset{\substack{\yaref{fact:sincint},\text{dominated convergence}}}{=}&
|
||||
\frac{1}{\pi} \int -\frac{\pi}{2} \One_{x < a} + \frac{\pi}{2} \One_{x > a}
|
||||
- (- \frac{\pi}{2} \One_{x < b} + \frac{\pi}{2} \One_{x > b}) \bP(\dif x)\\
|
||||
&=& \frac{1}{2} \bP(\{a\} ) + \frac{1}{2} \bP(\{b\}) + \bP((a,b))\\
|
||||
|
@ -103,7 +103,7 @@ where $\mu = \bP X^{-1}$.
|
|||
\bP\left( (a,b] \right) = \int_a^b f(x) \dif x.\label{thm10_3eq1}
|
||||
\]
|
||||
Let $F$ be the distribution function of $\bP$.
|
||||
It is enough to prove \autoref{thm10_3eq1}
|
||||
It is enough to prove \yaref{thm10_3eq1}
|
||||
for all continuity points $a $ and $ b$ of $F$.
|
||||
We have
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
|
@ -112,12 +112,14 @@ where $\mu = \bP X^{-1}$.
|
|||
&=& \frac{1}{2\pi} \int_{\R} \phi(t) \left( \frac{e^{-\i t b} - e^{-\i t a}}{- \i t} \right) \dif t\\
|
||||
&\overset{\text{dominated convergence}}{=}& \lim_{T \to \infty} \frac{1}{2\pi} \int_{-T}^{T} \phi(t) \left( \frac{e^{-\i t b} - e^{- \i t a}}{- \i t} \right) \dif t
|
||||
\end{IEEEeqnarray*}
|
||||
By \autoref{inversionformula}, the RHS is equal to $F(b) - F(a) = \bP\left( (a,b] \right)$.
|
||||
By the \yaref{inversionformula},
|
||||
the RHS is equal to $F(b) - F(a) = \bP\left( (a,b] \right)$.
|
||||
\end{refproof}
|
||||
|
||||
However, Fourier analysis is not only useful for continuous probability density functions:
|
||||
|
||||
\begin{theorem}[Bochner's formula for the mass at a point]\label{bochnersformula} % Theorem 4
|
||||
\begin{theorem}[Bochner's formula for the mass at a point]
|
||||
\yalabel{Bochner's Formula for the Mass at a Point}{Bochner}{bochnersformula} % Theorem 4
|
||||
Let $\bP \in M_1(\lambda)$.
|
||||
Then
|
||||
\[
|
||||
|
@ -174,13 +176,14 @@ However, Fourier analysis is not only useful for continuous probability density
|
|||
&=& \int_{\R} \left| \sum_{l} c_l e^{\i t_l x}\right|^2 \ge 0
|
||||
\end{IEEEeqnarray*}
|
||||
\end{refproof}
|
||||
\begin{theorem}[Bochner's theorem]\label{thm:bochner}
|
||||
The converse to \autoref{thm:lec_10thm5} holds, i.e.~any
|
||||
$\phi: \R \to \C$ satisfying (a) and (b) of \autoref{thm:lec_10thm5}
|
||||
\begin{theorem}[Bochner's theorem]
|
||||
\yalabel{Bochner's Theorem for Positive Definite Functions}{Bochner's Theorem}{thm:bochner}%
|
||||
The converse to \yaref{thm:lec_10thm5} holds, i.e.~any
|
||||
$\phi: \R \to \C$ satisfying (a) and (b) of \yaref{thm:lec_10thm5}
|
||||
must be the Fourier transform of a probability measure $\bP$
|
||||
on $(\R, \cB(\R))$.
|
||||
\end{theorem}
|
||||
Unfortunately, we won't prove \autoref{thm:bochner} in this lecture.
|
||||
Unfortunately, we won't prove \yaref{thm:bochner} in this lecture.
|
||||
|
||||
|
||||
\begin{definition}[Convergence in distribution / weak convergence]
|
||||
|
@ -325,7 +328,8 @@ for all $f \in C_b(\R)$.
|
|||
% \end{itemize}
|
||||
%
|
||||
% \end{proof}
|
||||
\begin{theorem}[Levy's continuity theorem]\label{levycontinuity}
|
||||
\begin{theorem}[Levy's continuity theorem]
|
||||
\yalabel{Levy's Continuity Theorem}{Levy}{levycontinuity}
|
||||
% Theorem 2
|
||||
$X_n \xrightarrow{\text{d}} X$ iff
|
||||
$\phi_{X_n}(t) \to \phi(t)$ for all $t \in \R$.
|
||||
|
|
|
@ -52,7 +52,8 @@ In order to make things nicer, we do the following:
|
|||
Then $\bE[\frac{S_n - \bE[S_n]}{\sqrt{\Var(S_n)}}] = 0$
|
||||
and $\Var(\frac{S_n - \bE[S_n]}{\sqrt{\Var(S_n)}}) = 1$.
|
||||
|
||||
\begin{theorem}[Central limit theorem, 1920s, Lindeberg and Levy]\label{clt}
|
||||
\begin{theorem}[Central limit theorem, 1920s, Lindeberg and Levy]%
|
||||
\yalabel{Central Limit Theorem}{CLT}{clt}
|
||||
Let $X_1,X_2,\ldots$ be i.i.d.~random variables
|
||||
with $\bE[X_1] = \mu$ and $\Var(X_1) = \sigma^2 \in (0, \infty)$.
|
||||
|
||||
|
@ -81,9 +82,9 @@ There exists a special case of this theorem, which was proved earlier:
|
|||
Let $X_1, X_2,\ldots$ i.i.d.~with $X_1 \sim \Ber(p)$.
|
||||
Then $\bE[X_1] = p$ and $\Var(X_1) = p(1-p )$.
|
||||
Furthermore $\sum_{i=1}^n X_i \sim \Bin(n,p)$,
|
||||
and the special case follows from \autoref{clt}.
|
||||
and the special case follows from \yaref{clt}.
|
||||
\end{proof}
|
||||
\autoref{preclt} is a useful tool for approximating the Binomial distribution with the normal distribution.
|
||||
\yaref{preclt} is a useful tool for approximating the Binomial distribution with the normal distribution.
|
||||
If $S_n \sim \Bin(n,p)$ and $[a,b] \subseteq \R$, we have
|
||||
\[\bP[a \le S_n \le b] = \bP\left[\frac{a - np}{\sqrt{np(1-p)}} \le \frac{S_n -np}{\sqrt{n p (1-p)}} \le \frac{b - np}{\sqrt{n p (1-p)} }\right] \approx \Phi(b') - \Phi(a').\]
|
||||
|
||||
|
@ -105,7 +106,7 @@ If $S_n \sim \Bin(n,p)$ and $[a,b] \subseteq \R$, we have
|
|||
|
||||
More formally: Let $X_1,X_2,\ldots$ be i.i.d.~with $\bP[X_1=1] = \bP[X_1=-1] = \frac{1}{2}$ and consider $S_n \coloneqq \sum_{i=1}^n X_i$.
|
||||
|
||||
Then \autoref{clt} states, that $S_n \approx \cN(0,n)$.
|
||||
Then the \yaref{clt} states, that $S_n \approx \cN(0,n)$.
|
||||
\end{example}
|
||||
|
||||
\begin{example}
|
||||
|
@ -129,4 +130,3 @@ If $S_n \sim \Bin(n,p)$ and $[a,b] \subseteq \R$, we have
|
|||
We have $p\cdot (1-p) \le \frac{1}{4}$,
|
||||
thus $n \approx (1.96)^2 \cdot 100^2 \cdot \frac{1}{4} = 9600$ suffices.
|
||||
\end{example}
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
\lecture{12}{2023-05-16}{Proof of the CLT}
|
||||
|
||||
We now want to prove \autoref{clt}.
|
||||
We now want to prove the \yaref{clt}.
|
||||
The plan is to do the following:
|
||||
\begin{enumerate}[1.]
|
||||
\item Identify the characteristic function of a standard normal
|
||||
\item Show that the characteristic functions of the $V_n$ converge pointwise
|
||||
to that of $\cN$.
|
||||
\item Apply \autoref{levycontinuity}
|
||||
\item Apply \yaref{levycontinuity}
|
||||
\end{enumerate}
|
||||
|
||||
First, we need to prove some properties of characteristic functions.
|
||||
|
@ -94,7 +94,7 @@ First, we need to prove some properties of characteristic functions.
|
|||
For arbitrary $h \in \R$, we have
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
|e^{\i t X} \frac{e^{\i h X}}{h}| &\le & \left| \frac{1}{h} \left( e^{\i h X} - 1 \right)\right|\\
|
||||
&\overset{\text{\autoref{charfprop:c1}}}{\le}& \left|\frac{1}{h} \i h X\right| = |X|.
|
||||
&\overset{\yaref{charfprop:c1}}{\le}& \left|\frac{1}{h} \i h X\right| = |X|.
|
||||
\end{IEEEeqnarray*}
|
||||
Thus the dominated convergence theorem can be applied and we obtain
|
||||
\[
|
||||
|
@ -156,7 +156,7 @@ First, we need to prove some properties of characteristic functions.
|
|||
\end{refproof}
|
||||
|
||||
|
||||
Now, we can finally prove the CLT:
|
||||
Now, we can finally prove the \yaref{clt}:
|
||||
\begin{refproof}{clt}
|
||||
Let $X_1,X_2,\ldots$ be i.i.d.~random variables
|
||||
with $\bE[X_1] = \mu_1$, $\Var(X_1) = \sigma^2$.
|
||||
|
@ -212,7 +212,7 @@ Now, we can finally prove the CLT:
|
|||
\[
|
||||
\phi_n(t) \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}} = \phi_{\cN(0,1)}(t).
|
||||
\]
|
||||
Using \autoref{levycontinuity}, we obtain \autoref{clt}.
|
||||
Using \yaref{levycontinuity}, we obtain the \yaref{clt}.
|
||||
\end{refproof}
|
||||
|
||||
\begin{remark}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
\lecture{13}{2023-05}{}
|
||||
%The difficult part is to show \autoref{levycontinuity}.
|
||||
%The difficult part is to show \yaref{levycontinuity}.
|
||||
%This is the last lecture, where we will deal with independent random variables.
|
||||
|
||||
We have seen, that
|
||||
|
@ -12,7 +12,7 @@ if $X_1, X_2,\ldots$ are i.i.d.~with $ \mu = \bE[X_1]$,
|
|||
\end{question}
|
||||
|
||||
\begin{theorem}[Lindeberg CLT]
|
||||
\label{lindebergclt}
|
||||
\yalabel{Lindeberg's CLT}{Lindeberg CLT}{lindebergclt}
|
||||
Assume $X_1, X_2, \ldots,$ are independent (but not necessarily identically distributed) with $\mu_i = \bE[X_i] < \infty$ and $\sigma_i^2 = \Var(X_i) < \infty$.
|
||||
Let $S_n = \sqrt{\sum_{i=1}^{n} \sigma_i^2}$
|
||||
and assume that
|
||||
|
@ -29,7 +29,7 @@ if $X_1, X_2,\ldots$ are i.i.d.~with $ \mu = \bE[X_1]$,
|
|||
\end{theorem}
|
||||
|
||||
\begin{theorem}[Lyapunov condition]
|
||||
\label{lyapunovclt}
|
||||
\yalabel{Lyapunov's CLT}{Lyapunov CLT}{lyapunovclt}
|
||||
Let $X_1, X_2,\ldots$ be independent, $\mu_i = \bE[X_i] < \infty$,
|
||||
$\sigma_i^2 = \Var(X_i) < \infty$
|
||||
and $S_n \coloneqq \sqrt{\sum_{i=1}^n \sigma_i^2}$.
|
||||
|
@ -45,10 +45,10 @@ if $X_1, X_2,\ldots$ are i.i.d.~with $ \mu = \bE[X_1]$,
|
|||
The Lyapunov condition implies the Lindeberg condition.
|
||||
(Exercise).
|
||||
\end{remark}
|
||||
We will not prove the \autoref{lindebergclt} or \autoref{lyapunovclt}
|
||||
We will not prove \yaref{lindebergclt} or \yaref{lyapunovclt}
|
||||
in this lecture. However, they are quite important.
|
||||
|
||||
We will now sketch the proof of \autoref{levycontinuity},
|
||||
We will now sketch the proof of \yaref{levycontinuity},
|
||||
details can be found in the notes.\notes
|
||||
\begin{definition}
|
||||
Let $(X_n)_n$ be a sequence of random variables.
|
||||
|
@ -62,8 +62,8 @@ details can be found in the notes.\notes
|
|||
\begin{example}+[Exercise 8.1]
|
||||
\todo{Copy}
|
||||
\end{example}
|
||||
A generalized version of \autoref{levycontinuity} is the following:
|
||||
\begin{theorem}[A generalized version of Levy's continuity \autoref{levycontinuity}]
|
||||
A generalized version of \yaref{levycontinuity} is the following:
|
||||
\begin{theorem}[A generalized version of \yaref{levycontinuity}]
|
||||
\label{genlevycontinuity}
|
||||
Suppose we have random variables $(X_n)_n$ such that
|
||||
$\bE[e^{\i t X_n}] \xrightarrow{n \to \infty} \phi(t)$ for all $t \in \R$
|
||||
|
@ -77,12 +77,12 @@ A generalized version of \autoref{levycontinuity} is the following:
|
|||
\item $\phi$ is continuous at $0$.
|
||||
\end{enumerate}
|
||||
\end{theorem}
|
||||
\todo{Proof of \autoref{genlevycontinuity} (Exercise 8.2)}
|
||||
\todo{Proof of \yaref{genlevycontinuity} (Exercise 8.2)}
|
||||
\begin{example}
|
||||
Let $Z \sim \cN(0,1)$ and $X_n \coloneqq n Z$.
|
||||
We have $\phi_{X_n}(t) = \bE[[e^{\i t X_n}] = e^{-\frac{1}{2} t^2 n^2} \xrightarrow{n \to \infty} \One_{\{t = 0\} }$.
|
||||
$\One_{\{t = 0\}}$ is not continuous at $0$.
|
||||
By \autoref{genlevycontinuity}, $X_n$ can not converge to a real-valued
|
||||
By \yaref{genlevycontinuity}, $X_n$ can not converge to a real-valued
|
||||
random variable.
|
||||
|
||||
Exercise: $X_n \xrightarrow{(d)} \overline{X}$,
|
||||
|
@ -102,12 +102,12 @@ A generalized version of \autoref{levycontinuity} is the following:
|
|||
\frac{1}{n} \bE[ (X_1+ \ldots + X_n)^2]\\
|
||||
&=& \sigma^2
|
||||
\end{IEEEeqnarray*}
|
||||
For $a > 0$, by Chebyshev's inequality, % TODO
|
||||
For $a > 0$, by \yaref{thm:chebyshev},
|
||||
we have
|
||||
\[
|
||||
\bP\left[ \left| \frac{S_n}{\sqrt{n}} \right| > a \right] \leq \frac{\sigma^2}{a^2} \xrightarrow{a \to \infty} 0.
|
||||
\]
|
||||
verifying \autoref{genlevycontinuity}.
|
||||
verifying \yaref{genlevycontinuity}.
|
||||
\end{example}
|
||||
|
||||
\begin{example}
|
||||
|
@ -133,9 +133,9 @@ A generalized version of \autoref{levycontinuity} is the following:
|
|||
Exercise: $\phi_{\frac{S_n}{n}}(t) = e^{-|t|} = \phi_{C_1}(t)$, thus $\frac{S_n}{n} \sim C$.
|
||||
\end{example}
|
||||
|
||||
We will prove \autoref{levycontinuity} assuming
|
||||
\autoref{lec10_thm1}.
|
||||
\autoref{lec10_thm1} will be shown in the notes.\notes
|
||||
We will prove \yaref{levycontinuity} assuming
|
||||
\yaref{lec10_thm1}.
|
||||
\yaref{lec10_thm1} will be shown in the notes.\notes
|
||||
We will need the following:
|
||||
\begin{lemma}
|
||||
\label{lec13_lem1}
|
||||
|
@ -217,7 +217,7 @@ for all $t \in \R$.
|
|||
Apply dominated convergence.
|
||||
\end{subproof}
|
||||
So to prove $\mu_n\left( (-A,A) \right) \ge 1 - 2 \epsilon$,
|
||||
apply \autoref{s7e1}.
|
||||
apply \yaref{s7e1}.
|
||||
It suffices to show that
|
||||
\[
|
||||
\frac{A}{2} \left| \int_{-\frac{2}{A}}^{\frac{2}{A}} \phi_n(t) dt\right| - 1 \ge 1 - 2\epsilon
|
||||
|
@ -226,11 +226,11 @@ for all $t \in \R$.
|
|||
\[
|
||||
1 - \frac{A}{4} \left|\int_{-\frac{2}{A}}^{\frac{2}{A}} \phi_n(t) dt \right| \le \epsilon,
|
||||
\]
|
||||
which follows from \autoref{levyproofc1eqn2}.
|
||||
which follows from \yaref{levyproofc1eqn2}.
|
||||
\end{refproof}
|
||||
|
||||
% Step 2
|
||||
By \autoref{lec13_lem1}
|
||||
By \yaref{lec13_lem1}
|
||||
there exists a right continuous, non-decreasing $F $
|
||||
and a subsequence $(F_{n_k})_k$ of $(F_n)_n$ where $F_n$ is
|
||||
the probability distribution function of $\mu_n$,
|
||||
|
@ -251,7 +251,7 @@ such that $F_{n_k}(x) \to F(x)$ for all $x$ where $F$ is continuous.
|
|||
\mu_{n_k}\left( (- \infty, x] \right) = F_{n_k}(x) \to F(x).
|
||||
\]
|
||||
Again, given $\epsilon > 0$, there exists $A > 0$, such that
|
||||
$\mu_{n_k}\left( (-A,A) \right) > 1 - 2 \epsilon$ (\autoref{levyproofc1}).
|
||||
$\mu_{n_k}\left( (-A,A) \right) > 1 - 2 \epsilon$ (\yaref{levyproofc1}).
|
||||
|
||||
Hence $F(x) \ge 1 - 2 \epsilon$ for $x > A $
|
||||
and $F(x) \le 2\epsilon$ for $x < -A$.
|
||||
|
@ -262,13 +262,13 @@ Since $F$ is a probability distribution function, there exists
|
|||
a probability measure $\nu$ on $\R$ such that $F$ is the distribution
|
||||
function of $\nu$.
|
||||
Since $F_{n_k}(x) \to F_n(x)$ at all continuity points $x$ of $F$,
|
||||
by \autoref{lec10_thm1} we obtain that
|
||||
by \yaref{lec10_thm1} we obtain that
|
||||
$\mu_{n_k} \overset{k \to \infty}{\implies} \nu$.
|
||||
Hence
|
||||
$\phi_{\mu_{n_k}}(t) \to \phi_\nu(t)$, by the other direction of that theorem.
|
||||
But by assumption,
|
||||
$\phi_{\mu_{n_k}}(\cdot ) \to \phi_n(\cdot )$ so $\phi_{\mu}(\cdot) = \phi_{\nu}(\cdot )$.
|
||||
By \autoref{charfuncuniqueness}, we get $\mu = \nu$.
|
||||
By the \yaref{charfuncuniqueness}, we get $\mu = \nu$.
|
||||
|
||||
We have shown, that $\mu_{n_k} \implies \mu$ along a subsequence.
|
||||
We still need to show that $\mu_n \implies \mu$.
|
||||
|
@ -281,7 +281,7 @@ We still need to show that $\mu_n \implies \mu$.
|
|||
% \notes
|
||||
% \end{subproof}
|
||||
Assume that $\mu_n$ does not converge to $\mu$.
|
||||
By \autoref{lec10_thm1}, pick a continuity point $x_0$ of $F$,
|
||||
By \yaref{lec10_thm1}, pick a continuity point $x_0$ of $F$,
|
||||
such that $F_n(x_0) \not\to F(x_0)$.
|
||||
Pick $\delta > 0$ and a subsequence $F_{n_1}(x_0), F_{n_2}(x_0), \ldots$
|
||||
which are all outside $(F(x_0) - \delta, F(x_0) + \delta)$.
|
||||
|
|
|
@ -89,7 +89,7 @@ We now want to generalize this to arbitrary random variables.
|
|||
|
||||
\subsection{Existence of Conditional Probability}
|
||||
|
||||
We will give two different proves of \autoref{conditionalexpectation}.
|
||||
We will give two different proves of \yaref{conditionalexpectation}.
|
||||
The first one will use orthogonal projections.
|
||||
The second will use the Radon-Nikodym theorem.
|
||||
We'll first do the easy proof, derive some properties
|
||||
|
@ -139,7 +139,7 @@ and then do the harder proof.
|
|||
|
||||
$K$ is closed, since a pointwise limit of $\cG$-measurable
|
||||
functions is $\cG$ measurable (if it exists).
|
||||
By \autoref{orthproj},
|
||||
By \yaref{orthproj},
|
||||
there exists $z \in K$ such that
|
||||
\[\bE[(X - Z)^2] = \inf \{ \bE[(X- W)^2] ~|~ W \in L^2(\cG)\}\]
|
||||
and
|
||||
|
@ -168,5 +168,6 @@ and then do the harder proof.
|
|||
|
||||
Define $Z(\omega) \coloneqq \limsup_{n \to \infty} Z_n(\omega)$.
|
||||
Then $Z$ is $\cG$-measurable and since $Z_n \uparrow Z$,
|
||||
by MCT, $\bE(Z \One_G) = \bE(X \One_G)$ for all $G \in \cG$.
|
||||
by the \yaref{cmct},
|
||||
$\bE(Z \One_G) = \bE(X \One_G)$ for all $G \in \cG$.
|
||||
\end{refproof}
|
||||
|
|
|
@ -5,7 +5,7 @@ We want to derive some properties of conditional expectation.
|
|||
|
||||
\begin{theorem}[Law of total expectation]
|
||||
\label{ceprop1}
|
||||
\label{totalexpectation}
|
||||
\yalabel{Law of Total Expectation}{Total Expectation}{totalexpectation}
|
||||
\[
|
||||
\bE[\bE[X | \cG ]] = \bE[X].
|
||||
\]
|
||||
|
@ -26,7 +26,7 @@ We want to derive some properties of conditional expectation.
|
|||
\[
|
||||
\int_A X \dif \bP \ge \frac{1}{n}\bP(A) + \int_A Y \dif \bP,
|
||||
\]
|
||||
contradicting property (b) from \autoref{conditionalexpectation}.
|
||||
contradicting property (b) from \yaref{conditionalexpectation}.
|
||||
\end{proof}
|
||||
|
||||
\begin{example}
|
||||
|
@ -37,7 +37,7 @@ We want to derive some properties of conditional expectation.
|
|||
|
||||
\begin{theorem}[Linearity]
|
||||
\label{ceprop3}
|
||||
\label{celinearity}
|
||||
\yalabel{Linearity of Conditional Expectation}{Linearity}{celinearity}
|
||||
For all $a,b \in \R$
|
||||
we have
|
||||
\[
|
||||
|
@ -50,7 +50,7 @@ We want to derive some properties of conditional expectation.
|
|||
|
||||
\begin{theorem}[Positivity]
|
||||
\label{ceprop4}
|
||||
\label{cpositivity}
|
||||
\yalabel{Positivity of Conditional Expectation}{Positivity}{cpositivity}
|
||||
If $X \ge 0$, then $\bE[X | \cG] \ge 0$ a.s.
|
||||
\end{theorem}
|
||||
\begin{proof}
|
||||
|
@ -65,7 +65,7 @@ We want to derive some properties of conditional expectation.
|
|||
\end{proof}
|
||||
\begin{theorem}[Conditional monotone convergence theorem]
|
||||
\label{ceprop5}
|
||||
\label{mcmt}
|
||||
\yalabel{Conditional Monotone Converence Theorem}{MCT}{cmct}
|
||||
Let $X_n,X \in L^1(\Omega, \cF, \bP)$.
|
||||
Suppose $X_n \ge 0$ with $X_n \uparrow X$.
|
||||
Then $\bE[X_n|\cG] \uparrow \bE[X|\cG]$.
|
||||
|
@ -73,7 +73,7 @@ We want to derive some properties of conditional expectation.
|
|||
\begin{proof}
|
||||
Let $Z_n$ be a version of $\bE[X_n | Y]$.
|
||||
Since $X_n \ge 0$ and $X_n \uparrow$,
|
||||
by \autoref{cpositivity},
|
||||
by the \yaref{cpositivity},
|
||||
we have
|
||||
\[
|
||||
\bE[X_n | \cG] \overset{\text{a.s.}}{\ge } 0
|
||||
|
@ -100,7 +100,7 @@ We want to derive some properties of conditional expectation.
|
|||
|
||||
\begin{theorem}[Conditional Fatou]
|
||||
\label{ceprop6}
|
||||
\label{cfatou}
|
||||
\yalabel{Conditional Fatou's Lemma}{Fatou}{cfatou}
|
||||
Let $X_n \in L^1(\Omega, \cF, \bP)$, $X_n \ge 0$.
|
||||
Then
|
||||
\[
|
||||
|
@ -112,7 +112,7 @@ We want to derive some properties of conditional expectation.
|
|||
\end{proof}
|
||||
\begin{theorem}[Conditional dominated convergence theorem]
|
||||
\label{ceprop7}
|
||||
\label{cdct}
|
||||
\yalabel{Conditional Dominated Convergence Theorem}{DCT}{cdct}
|
||||
Let $X_n,Y \in L^1(\Omega, \cF, \bP)$.
|
||||
Suppose that $\sup_n |X_n(\omega)| < Y(\omega)$ a.e.~
|
||||
and that $X_n$ converges to a pointwise limit $X$.
|
||||
|
@ -124,7 +124,7 @@ We want to derive some properties of conditional expectation.
|
|||
|
||||
Recall
|
||||
\begin{fact}[Jensen's inequality]
|
||||
\label{jensen}
|
||||
\yalabel{Jensen's Inequality}{Jensen}{jensen}
|
||||
If $c : \R \to \R$ is convex and $\bE[|c \circ X|] < \infty$,
|
||||
then $\bE[c \circ X] \overset{\text{a.s.}}{\ge} c(\bE[X])$.
|
||||
\end{fact}
|
||||
|
@ -132,7 +132,7 @@ Recall
|
|||
For conditional expectation, we have
|
||||
\begin{theorem}[Conditional Jensen's inequality]
|
||||
\label{ceprop8}
|
||||
\label{cjensen}
|
||||
\yalabel{Jensen's Inequality}{Jensen}{cjensen}
|
||||
Let $X \in L^1(\Omega, \cF, \bP)$.
|
||||
If $c : \R \to \R$ is convex and $\bE[|c \circ X|] < \infty$,
|
||||
then $\bE[c \circ X | \cG] \ge c(\bE[X | \cG])$ a.s.
|
||||
|
@ -147,7 +147,7 @@ For conditional expectation, we have
|
|||
\]
|
||||
\end{fact}
|
||||
\begin{refproof}{cjensen}
|
||||
By \autoref{convapprox}, $c(x) \ge a_n X + b_n$
|
||||
By \yaref{convapprox}, $c(x) \ge a_n X + b_n$
|
||||
for all $n$.
|
||||
Hence
|
||||
\[
|
||||
|
@ -159,12 +159,13 @@ For conditional expectation, we have
|
|||
we conclude that a.s~this happens simultaneously for all $n$.
|
||||
Hence
|
||||
\[
|
||||
\bE[c(X) | \cG] \ge \sup_n (a_n \bE[X | \cG] + b_n) \overset{\text{\autoref{convapprox}}}{=} c(\bE(X | \cG)).
|
||||
\bE[c(X) | \cG] \ge \sup_n (a_n \bE[X | \cG] + b_n) \overset{\yaref{convapprox}}{=} c(\bE(X | \cG)).
|
||||
\]
|
||||
\end{refproof}
|
||||
|
||||
Recall
|
||||
\begin{fact}[Hölder's inequality]
|
||||
\yalabel{Hölder's Inequality}{Hölder}{thm:hoelder}
|
||||
Let $p,q \ge 1$ such that $\frac{1}{p} + \frac{1}{q} = 1$.
|
||||
Suppose $X \in L^p(\bP)$ and $Y \in L^q(\bP)$.
|
||||
Then
|
||||
|
@ -175,7 +176,7 @@ Recall
|
|||
|
||||
\begin{theorem}[Conditional Hölder's inequality]
|
||||
\label{ceprop9}
|
||||
\label{choelder}
|
||||
\yalabel{Hölder's Inequality}{Hölder}{choelder}
|
||||
Let $p,q \ge 1$ such that $\frac{1}{p} + \frac{1}{q} = 1$.
|
||||
Suppose $X \in L^p(\bP)$ and $Y \in L^q(\bP)$.
|
||||
Then
|
||||
|
@ -203,7 +204,7 @@ Recall
|
|||
|
||||
\begin{theorem}[Tower property]
|
||||
\label{ceprop10}
|
||||
\label{cetower}
|
||||
\yalabel{Tower Property}{Tower}{cetower}
|
||||
Suppose $\cF \supset \cG \supset \cH$ are sub-$\sigma$-algebras.
|
||||
Then
|
||||
\[
|
||||
|
@ -245,7 +246,7 @@ Assume $Y = \One_B$, then $Y$ simple, then take the limit (using that $Y$ is bou
|
|||
|
||||
\begin{theorem}[Role of independence]
|
||||
\label{ceprop12}
|
||||
\label{ceroleofindependence}
|
||||
\yalabel{Role of Independence}{Independence}{ceroleofindependence}
|
||||
Let $X$ be a random variable,
|
||||
and let $\cG, \cH$ be $\sigma$-algebras.
|
||||
|
||||
|
@ -272,10 +273,10 @@ Assume $Y = \One_B$, then $Y$ simple, then take the limit (using that $Y$ is bou
|
|||
|
||||
For $\bE[S_{n+1} | \cF_n]$ we obtain
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
\bE[S_{n+1} | \cF_n] &\overset{\text{\autoref{celinearity}}}{=}&
|
||||
\bE[S_{n+1} | \cF_n] &\overset{\yaref{celinearity}}{=}&
|
||||
\bE[S_n | \cF_n] + \bE[X_{n+1} | \cF_n]\\
|
||||
&\overset{\text{a.s.}}{=}& S_n + \bE[X_{n+1} | \cF_n]\\
|
||||
&\overset{\text{\autoref{ceprop12}}}{=}& S_{n} + \bE[X_n]\\
|
||||
&\overset{\yaref{ceroleofindependence}}{=}& S_{n} + \bE[X_n]\\
|
||||
&=& S_n.
|
||||
\end{IEEEeqnarray*}
|
||||
\end{example}
|
||||
|
|
|
@ -48,7 +48,7 @@ Note that in this setting, if $\mu(A) = 0$ it follows that $\nu(A) = 0$.
|
|||
|
||||
The Radon Nikodym theorem is the converse of that:
|
||||
\begin{theorem}[Radon-Nikodym]
|
||||
\label{radonnikodym}
|
||||
\yalabel{Radon-Nikodym Theorem}{Radon-Nikodym}{radonnikodym}
|
||||
|
||||
Let $\mu$ and $\nu$ be two $\sigma$-finite measures
|
||||
on $(\Omega, \cF)$.
|
||||
|
@ -85,14 +85,14 @@ The Radon Nikodym theorem is the converse of that:
|
|||
\end{definition}
|
||||
|
||||
|
||||
With \autoref{radonnikodym} we get a very short proof of the existence
|
||||
With the \yaref{radonnikodym} we get a very short proof of the existence
|
||||
of conditional expectation:
|
||||
\begin{proof}[Second proof of \autoref{conditionalexpectation}]
|
||||
\begin{proof}[Second proof of \yaref{conditionalexpectation}]
|
||||
Let $(\Omega, \cF, \bP)$ as always, $X \in L^1(\bP)$ and $\cG \subseteq \cF$.
|
||||
It suffices to consider the case of $X \ge 0$.
|
||||
For all $G \in \cG$, define $\nu(G) \coloneqq \int_G X \dif \bP$.
|
||||
Obviously, $\nu \ll \bP$ on $\cG$.
|
||||
Then apply \autoref{radonnikodym}.
|
||||
Then apply the \yaref{radonnikodym}.
|
||||
\end{proof}
|
||||
|
||||
|
||||
|
@ -212,7 +212,7 @@ Typically $\cF_n = \sigma(X_1, \ldots, X_n)$ for a sequence of random variables.
|
|||
Likewise, if $f$ is concave, then $((f(X_n))_n$ is a supermartingale.
|
||||
\end{corollary}
|
||||
\begin{proof}
|
||||
Apply \autoref{cjensen}.
|
||||
Apply \yaref{cjensen}.
|
||||
\end{proof}
|
||||
|
||||
\begin{corollary}
|
||||
|
|
|
@ -130,9 +130,9 @@ exists pointwise.
|
|||
\end{lemma}
|
||||
\begin{proof}
|
||||
Since $C_n \ge 0$,
|
||||
by \autoref{lem:gambling-strategy} we have that $Y_n$ is a supermartingale.
|
||||
by \yaref{lem:gambling-strategy} we have that $Y_n$ is a supermartingale.
|
||||
Hence $\bE[Y_N] \le \bE[Y_1] = 0$.
|
||||
From \autoref{lec17l2} it follows that
|
||||
From \yaref{lec17l2} it follows that
|
||||
\[
|
||||
(b-a) \bE[U_N([a,b])] \le \bE[Y_n] + \bE[(X_N-a)^-] \le \bE[(X_N-a)^-].
|
||||
\]
|
||||
|
@ -146,7 +146,7 @@ exists pointwise.
|
|||
In particular, $\bP[U_\infty = \infty] = 0$.
|
||||
\end{corollary}
|
||||
\begin{proof}
|
||||
By \autoref{lec17l3}
|
||||
By \yaref{lec17l3}
|
||||
we have that
|
||||
\[(b-a) \bE[U_N([a,b])] \le \bE[ | X_N| ] + |a| \le \sup_n \bE[|X_n|] + |a|.\]
|
||||
Since $U_N(\cdot) \ge 0$ and $U_N(\cdot ) \uparrow U_\infty(\cdot )$,
|
||||
|
@ -160,8 +160,8 @@ Let us now consider the case that our process $(X_n)_{n \ge 1}$ is a supermartin
|
|||
bounded in $L^1(\bP)$.
|
||||
|
||||
\begin{theorem}[Doob's martingale convergence theorem]
|
||||
\label{doobmartingaleconvergence}
|
||||
\label{doob}
|
||||
\yalabel{Doob's Martingale Convergence Theorem}{Doob}{doobmartingaleconvergence}
|
||||
\yalabel{Doob's Martingale Convergence Theorem}{Doob}{doob}
|
||||
Any supermartingale bounded in $L^1$ converges almost surely to a
|
||||
random variable, which is almost surely finite.
|
||||
In particular, any non-negative supermartingale converges a.s.~to a finite random variable.
|
||||
|
@ -179,8 +179,8 @@ We have
|
|||
\end{IEEEeqnarray*}
|
||||
|
||||
We have $\Lambda_{a,b} \subseteq \{\omega : U_{\infty}([a,b])(\omega) = \infty\}$
|
||||
by \autoref{lec17l1}.
|
||||
By \autoref{lec17l3} we have $\bP(\Lambda_{a,b}) = 0$,
|
||||
by \yaref{lec17l1}.
|
||||
By \yaref{lec17l3} we have $\bP(\Lambda_{a,b}) = 0$,
|
||||
hence $\bP(\Lambda) = 0$.
|
||||
Thus there exists a random variable $X_\infty$ such that $X_n \xrightarrow{a.s.} X_\infty$.
|
||||
|
||||
|
@ -192,7 +192,7 @@ Thus there exists a random variable $X_\infty$ such that $X_n \xrightarrow{a.s.}
|
|||
We have.
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
\bE[|X_\infty|] &=& \bE[\liminf_{n \to \infty} |X_n|]\\
|
||||
&\overset{\text{Fatou}}{\le }& \liminf_n \bE[|X_n|]\\
|
||||
&\overset{\yaref{cfatou}}{\le }& \liminf_n \bE[|X_n|]\\
|
||||
&\le & \sup_n \bE[|X_n|]\\
|
||||
&<& \infty.
|
||||
\end{IEEEeqnarray*}
|
||||
|
|
|
@ -37,7 +37,7 @@ Hence the same holds for submartingales, i.e.
|
|||
&=& X_n.
|
||||
\end{IEEEeqnarray*}
|
||||
|
||||
By \autoref{doobmartingaleconvergence},
|
||||
By \yaref{doobmartingaleconvergence},
|
||||
there exists an a.s.~limit $X_\infty$.
|
||||
By the SLLN, we have almost surely
|
||||
\[
|
||||
|
@ -116,7 +116,7 @@ consider $L^2$.
|
|||
\[
|
||||
\bE[X_n^2] = \bE[X_0^2] + \sum_{j=1}^{n} \bE[Y_j^2]
|
||||
\]
|
||||
by \autoref{martingaleincrementsorthogonal}.
|
||||
by \yaref{martingaleincrementsorthogonal}.
|
||||
In particular,
|
||||
\[
|
||||
\sup_n \bE[X_n^2] < \infty \iff \sum_{j=1}^{\infty} \bE[Y_j^2] < \infty.
|
||||
|
@ -124,7 +124,7 @@ consider $L^2$.
|
|||
|
||||
Since $(X_n)_n$ is bounded in $L^2$,
|
||||
there exists $X_\infty$ such that $X_n \xrightarrow{\text{a.s.}} X_\infty$
|
||||
by \autoref{doob}.
|
||||
by \yaref{doob}.
|
||||
|
||||
It remains to show $X_n \xrightarrow{L^2} X_\infty$.
|
||||
For any $r \in \N$, consider
|
||||
|
@ -143,7 +143,7 @@ consider $L^2$.
|
|||
Now let $p \ge 1$ be not necessarily $2$.
|
||||
First, we need a very important inequality:
|
||||
\begin{theorem}[Doob's $L^p$ inequality]
|
||||
\label{dooblp}
|
||||
\yalabel{Doob's Martingale Inequalities}{Doob}{dooblp}
|
||||
Suppose that $(X_n)_n$ is a martingale
|
||||
or a non-negative submartingale.
|
||||
Let $X_n^\ast \coloneqq \max \{|X_1|, |X_2|, \ldots, |X_n|\}$
|
||||
|
@ -158,7 +158,7 @@ First, we need a very important inequality:
|
|||
\end{enumerate}
|
||||
\end{theorem}
|
||||
|
||||
In order to prove \autoref{dooblp}, we first need
|
||||
In order to prove \yaref{dooblp}, we first need
|
||||
\begin{lemma}
|
||||
\label{dooplplemma}
|
||||
Let $p > 1$ and $X,Y$ non-negative random variables
|
||||
|
@ -182,7 +182,7 @@ In order to prove \autoref{dooblp}, we first need
|
|||
&=& \int Y(\omega)^p \dif \bP(\omega)\\
|
||||
&=&\int_{\Omega} \left( \int_0^{Y(\omega)} p \ell^{p-1} \dif \ell
|
||||
\right) \dif \bP(\omega)\\
|
||||
&\overset{\text{Fubini}}{=}&
|
||||
&\overset{\yaref{thm:fubini}}{=}&
|
||||
\int_0^\infty p \ell^{p-1}\underbrace{\int_\Omega \One_{Y \ge \ell}\dif \bP}_%
|
||||
{\bP[Y \ge \ell]} \dif\ell. \label{l18star}
|
||||
\end{IEEEeqnarray}
|
||||
|
@ -192,7 +192,7 @@ In order to prove \autoref{dooblp}, we first need
|
|||
\eqref{l18star}
|
||||
&\le& \int_0^\infty p \ell^{p-2}
|
||||
\int_{\{Y(\omega) \ge \ell\}} X(\omega) \bP(\dif \omega)\dif \ell\\
|
||||
&\overset{\text{Fubini}}{=}&
|
||||
&\overset{\yaref{thm:fubini}}{=}&
|
||||
\int_\Omega X(\omega) \int_{0}^{Y(\omega)} p \ell^{p-2} \dif \ell\bP(\dif \omega)\\
|
||||
&=& \frac{p}{p-1} \int_{\omega} X(\omega) Y (\omega)^{p-1} \bP(\dif \omega)\\
|
||||
&\overset{\text{Hölder}}{\le}& \frac{p}{p-1} \|X\|_{L^p} \|Y\|_{p}^{p-1},
|
||||
|
@ -212,11 +212,11 @@ In order to prove \autoref{dooblp}, we first need
|
|||
\]
|
||||
Then
|
||||
\begin{equation}
|
||||
\bP[E_j] \overset{\text{Markov}}{\le } \frac{1}{\ell} \int_{E_j} |X_j| \dif \bP
|
||||
\bP[E_j] \overset{\yaref{thm:markov}}{\le } \frac{1}{\ell} \int_{E_j} |X_j| \dif \bP
|
||||
\label{lec18eq2star}
|
||||
\end{equation}
|
||||
We have that $(|X_n|)_n$ is a submartingale,
|
||||
by \autoref{cor:convexmartingale}
|
||||
by \yaref{cor:convexmartingale}
|
||||
in the case of $X_n$ being a martingale
|
||||
and trivially if $X_n$ is non-negative.
|
||||
Hence
|
||||
|
@ -225,7 +225,7 @@ In order to prove \autoref{dooblp}, we first need
|
|||
&=& \One_{E_j} \bE[(|X_n| - |X_{j}|)|\cF_j]\\
|
||||
&\overset{\text{a.s.}}{\ge }& 0.
|
||||
\end{IEEEeqnarray*}
|
||||
By the law of total expectation, \autoref{totalexpectation},
|
||||
By the \yaref{totalexpectation},
|
||||
it follows that
|
||||
\begin{equation}
|
||||
\bE[\One_{E_j} (|X_n| - |X_j|)] \ge 0. \label{lec18eq3star}
|
||||
|
@ -241,6 +241,6 @@ In order to prove \autoref{dooblp}, we first need
|
|||
This proves the first part.
|
||||
|
||||
For the second part, we apply the first part and
|
||||
\autoref{dooplplemma} (choose $Y \coloneqq X_n^\ast$).
|
||||
\yaref{dooplplemma} (choose $Y \coloneqq X_n^\ast$).
|
||||
\end{refproof}
|
||||
\todo{Branching process}
|
||||
|
|
|
@ -57,10 +57,10 @@ However, some subsets can be easily described, e.g.
|
|||
\sup_n \underbrace{\bP[|X_n| > K]^{\frac{1}{q}}}_%
|
||||
{\le K^{-\frac{1}{q}} \bE[|X_n|]^{\frac{1}{q}}}\\
|
||||
\end{IEEEeqnarray*}
|
||||
where we have applied Markov's inequality. % TODO REF
|
||||
where we have applied \yaref{thm:markov}.
|
||||
|
||||
Since $\sup_n \bE[|X_n|^{1+\delta}] < \infty$,
|
||||
we have that $\sup_n \bE[|X_n|] < \infty$ by Jensen (\autoref{jensen}).
|
||||
we have that $\sup_n \bE[|X_n|] < \infty$ by \yaref{jensen}.
|
||||
Hence for $K$ large enough relevant term is less than $\epsilon$.
|
||||
\end{proof}
|
||||
|
||||
|
@ -93,7 +93,7 @@ However, some subsets can be easily described, e.g.
|
|||
but $\int_{F_n} |X| \dif \bP \ge \epsilon$.
|
||||
|
||||
Since $\sum_{n} \bP(F_n) < \infty$,
|
||||
by \autoref{borelcantelli},
|
||||
by \yaref{thm:borelcantelli},
|
||||
\[\bP[\underbrace{\limsup_n F_n}_{\text{\reflectbox{$\coloneqq$}}F}] = 0.\]
|
||||
We have
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
|
@ -108,7 +108,7 @@ However, some subsets can be easily described, e.g.
|
|||
|
||||
This yields a contradiction since $\bP(F) = 0$.
|
||||
\item We want to apply part (a) to $F = \{ |X| > k\}$.
|
||||
By Markov, $\bP(F) \le \frac{1}{k} \bE[|X|]$.
|
||||
By \yaref{thm:markov}, $\bP(F) \le \frac{1}{k} \bE[|X|]$.
|
||||
Since $\bE[|X|] < \infty$, we can choose $k$ large enough
|
||||
to get $\bP(F) \le \delta$.
|
||||
\end{enumerate}
|
||||
|
@ -120,7 +120,7 @@ However, some subsets can be easily described, e.g.
|
|||
\[
|
||||
\bE[|X_n| \One_{|X_n| > k}] \le \bE[|Y| \One_{|Y| > k}] < \epsilon
|
||||
\]
|
||||
for $k$ large enough by \autoref{lec19f4} (b).
|
||||
for $k$ large enough by \yaref{lec19f4} (b).
|
||||
\end{refproof}
|
||||
|
||||
\begin{fact}\label{lec19f5}
|
||||
|
@ -135,9 +135,9 @@ However, some subsets can be easily described, e.g.
|
|||
\label{lec19eqstar}
|
||||
\end{equation}
|
||||
Let $Y = \bE[X | \cG]$ for some sub-$\sigma$-algebra $\cG$.
|
||||
Then, by \autoref{cjensen}, $|Y| \le \bE[ |X| | \cG]$.
|
||||
Then, by \yaref{cjensen}, $|Y| \le \bE[ |X| | \cG]$.
|
||||
Hence $\bE[|Y|] \le \bE[|X|]$.
|
||||
By Markov's inequality,
|
||||
By \yaref{thm:markov},
|
||||
it follows that $\bP[|Y| > k] < \delta$
|
||||
for $k > \frac{\bE[|X|]}{\delta}$.
|
||||
Note that $\{|Y| > k\} \in \cG$.
|
||||
|
@ -179,19 +179,19 @@ However, some subsets can be easily described, e.g.
|
|||
+ \int |\phi(X_n) - \phi(X)| \dif \bP\\
|
||||
\end{IEEEeqnarray*}
|
||||
We have $\int_{|X_n| > k} \underbrace{|X_n - \phi(X_n)|}_{\le |X_n| + | \phi(X_n)| \le 2 |X_n|} \dif \bP\le \epsilon$ by uniform integrability and
|
||||
\autoref{lec19f4} part (b).
|
||||
\yaref{lec19f4} part (b).
|
||||
Similarly $\int_{|X| > k} |X - \phi(X)| \dif \bP < \epsilon$.
|
||||
|
||||
Since $\phi$ is Lipschitz,
|
||||
$ X_n \xrightarrow{\bP} X \implies \phi(X_n) \xrightarrow{\bP} \phi(X)$.
|
||||
By the bounded convergence theorem, \autoref{thm:boundedconvergence},
|
||||
By the \yaref{thm:boundedconvergence}
|
||||
$|\phi(X_n)| \le k \implies \int | \phi(X_n) - \phi(X)| \dif \bP \to 0$.
|
||||
|
||||
|
||||
(1) $\implies$ (2)
|
||||
|
||||
$X_n \xrightarrow{L^1} X \implies X_n \xrightarrow{\bP} X$
|
||||
by Markov's inequality (see \autoref{claim:convimpll1p}).
|
||||
by \yaref{thm:markov} (see \yaref{claim:convimpll1p}).
|
||||
|
||||
Fix $\epsilon > 0$.
|
||||
We have
|
||||
|
@ -202,8 +202,8 @@ However, some subsets can be easily described, e.g.
|
|||
\end{IEEEeqnarray*}
|
||||
for all $\delta > 0$ and suitable $k$.
|
||||
|
||||
Hence $\bP[|X_n| > k] < \delta$ by Markov's inequality.
|
||||
Then by \autoref{lec19f4} part (a) it follows that
|
||||
Hence $\bP[|X_n| > k] < \delta$ by \yaref{thm:markov}.
|
||||
Then by \yaref{lec19f4} part (a) it follows that
|
||||
\[
|
||||
\int_{|X_n| > k} |X_n| \dif \bP \le \underbrace{\int |X - X_n| \dif \bP}_{< \epsilon} + \int_{|X_n| > k} |X| \dif \bP \le 2 \epsilon.
|
||||
\]
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
\lecture{20}{2023-06-27}{}
|
||||
\begin{refproof}{ceismartingale}
|
||||
By the tower property (\autoref{cetower})
|
||||
By the \yaref{cetower}
|
||||
it is clear that $(\bE[X | \cF_n])_n$
|
||||
is a martingale.
|
||||
|
||||
First step:
|
||||
Assume that $X$ is bounded.
|
||||
Then, by \autoref{cjensen}, $|X_n| \le \bE[|X| | \cF_n]$,
|
||||
Then, by \yaref{cjensen}, $|X_n| \le \bE[|X| | \cF_n]$,
|
||||
hence $\sup_{\substack{n \in \N \\ \omega \in \Omega}} | X_n(\omega)| < \infty$.
|
||||
Thus $(X_n)_n$ is a martingale in $L^{\infty} \subseteq L^2$.
|
||||
By the convergence theorem for martingales in $L^2$
|
||||
(\autoref{martingaleconvergencel2})
|
||||
(\yaref{martingaleconvergencel2})
|
||||
there exists a random variable $Y$,
|
||||
such that $X_n \xrightarrow{L^2} Y$.
|
||||
|
||||
|
@ -74,10 +74,10 @@
|
|||
Thus $X_n \xrightarrow{L^p} X$.
|
||||
\end{refproof}
|
||||
|
||||
For the proof of \autoref{martingaleisce},
|
||||
For the proof of \yaref{martingaleisce},
|
||||
we need the following theorem, which we won't prove here:
|
||||
\begin{theorem}[Banach Alaoglu]
|
||||
\label{banachalaoglu}
|
||||
\yalabel{Banach Alaoglu}{Banach Alaoglu}{banachalaoglu}
|
||||
Let $X$ be a normed vector space and $X^\ast$ its
|
||||
continuous dual.
|
||||
Then the closed unit ball in $X^\ast$ is compact
|
||||
|
@ -96,7 +96,7 @@ we need the following theorem, which we won't prove here:
|
|||
\end{fact}
|
||||
|
||||
\begin{refproof}{martingaleisce}
|
||||
Since $(X_n)_n$ is bounded in $L^p$, by \autoref{banachalaoglu},
|
||||
Since $(X_n)_n$ is bounded in $L^p$, by \yaref{banachalaoglu},
|
||||
there exists $X \in L^p$ and a subsequence
|
||||
$(X_{n_k})_k$ such that for all $Y \in L^q$ ($\frac{1}{p} + \frac{1}{q} = 1$ )
|
||||
\[
|
||||
|
@ -115,7 +115,7 @@ we need the following theorem, which we won't prove here:
|
|||
&\overset{\text{for }n_k \ge m}{=}& \bE[X_m \One_A].
|
||||
\end{IEEEeqnarray*}
|
||||
Hence $X_n = \bE[X | \cF_m]$ by the uniqueness of conditional expectation
|
||||
and by \autoref{ceismartingale},
|
||||
and by \yaref{ceismartingale},
|
||||
we get the convergence.
|
||||
\end{refproof}
|
||||
|
||||
|
@ -298,7 +298,7 @@ we need the following theorem, which we won't prove here:
|
|||
\end{example}
|
||||
|
||||
\begin{theorem}[Optional Stopping]
|
||||
\label{optionalstopping}
|
||||
\yalabel{Optional Stopping Theorem}{Optional Stopping}{optionalstopping}
|
||||
Let $(X_n)_n$ be a supermartingale
|
||||
and let $T$ be a stopping time
|
||||
taking values in $\N$.
|
||||
|
@ -320,7 +320,7 @@ we need the following theorem, which we won't prove here:
|
|||
$\bE[X_T] = \bE[X_0]$.
|
||||
\end{theorem}
|
||||
\begin{proof}
|
||||
(i) was already done in \autoref{roptionalstoppingi}.
|
||||
(i) was already done in \yaref{roptionalstoppingi}.
|
||||
|
||||
(ii): Since $(X_n)_n$ is bounded, we get that
|
||||
\begin{IEEEeqnarray*}{rCl}
|
||||
|
@ -345,3 +345,17 @@ we need the following theorem, which we won't prove here:
|
|||
applying this to $(X_n)_n$ and $(-X_n)_n$,
|
||||
which are both supermartingales.
|
||||
\end{proof}
|
||||
\begin{remark}+
|
||||
Let $(X_n)_n$ be a supermartingale and $T$ a stopping time.
|
||||
If $(X_n)_n$ itself is not bounded,
|
||||
but $T$ ensures boundedness,
|
||||
i.e. $T < \infty$ a.s.~and $(X_{T \wedge n})_n$
|
||||
is uniformly bounded,
|
||||
the \yaref{optionalstopping} can still be applied, as
|
||||
\[
|
||||
\bE[X_T] = \bE[X_{T \wedge T}]
|
||||
\overset{\yaref{optionalstopping}}{\le} \bE[X_{T \wedge 0}]
|
||||
= \bE[X_0].
|
||||
\]
|
||||
\end{remark}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ This is the last lecture relevant for the exam.
|
|||
|
||||
\begin{goal}
|
||||
We want to see an application of the
|
||||
optional stopping theorem \ref{optionalstopping}.
|
||||
\ref{optionalstopping}.
|
||||
\end{goal}
|
||||
|
||||
\begin{notation}
|
||||
|
@ -110,7 +110,7 @@ is the unique solution to this problem.
|
|||
% We have $\sigma(\One_{X_{n+1} \in B}) \subseteq \sigma(X_{n}, \xi_{n+1})$.
|
||||
% $\sigma(X_1,\ldots,X_{n-1})$
|
||||
% is independent of $\sigma( \sigma(\One_{X_{n+1} \in B}), X_n)$.
|
||||
% Hence the claim follows from \autoref{ceroleofindependence}.
|
||||
% Hence the claim follows from \yaref{ceroleofindependence}.
|
||||
\end{subproof}
|
||||
\end{example}
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
\subsubsection{Construction of iid random variables.}
|
||||
|
||||
\begin{itemize}
|
||||
\item Definition of a consistent family (\autoref{def:consistentfamily})
|
||||
\item Definition of a consistent family (\yaref{def:consistentfamily})
|
||||
\item Important construction:
|
||||
|
||||
Consider a distribution function $F$ and define
|
||||
|
@ -16,22 +16,22 @@
|
|||
\item Examples of consistent and inconsistent families
|
||||
\todo{Exercises}
|
||||
\item Kolmogorov's consistency theorem
|
||||
(\autoref{thm:kolmogorovconsistency})
|
||||
(\yaref{thm:kolmogorovconsistency})
|
||||
\end{itemize}
|
||||
|
||||
\subsubsection{Limit theorems}
|
||||
\begin{itemize}
|
||||
\item Work with iid.~random variables.
|
||||
\item Notions of convergence (\autoref{def:convergence})
|
||||
\item Notions of convergence (\yaref{def:convergence})
|
||||
\item Implications between different notions of convergence (very important) and counter examples.
|
||||
(\autoref{thm:convergenceimplications})
|
||||
(\yaref{thm:convergenceimplications})
|
||||
|
||||
\item Laws of large numbers: (\autoref{lln})
|
||||
\item Laws of large numbers: (\yaref{lln})
|
||||
\begin{itemize}
|
||||
\item WLLN: convergence in probability
|
||||
\item SLLN: weak convergence
|
||||
\end{itemize}
|
||||
\item \autoref{thm2} (building block for SLLN):
|
||||
\item \yaref{thm2} (building block for SLLN):
|
||||
Let $(X_n)$ be independent with mean $0$ and $\sum \sigma_n^2 < \infty$,
|
||||
then $ \sum X_n $ converges a.s.
|
||||
\begin{itemize}
|
||||
|
@ -43,12 +43,12 @@
|
|||
|
||||
$\sum \frac{\pm 1}{ n^{\frac{1}{2} -\epsilon}}$ does not converge a.s.~for any $\epsilon > 0$.
|
||||
\end{itemize}
|
||||
\item Kolmogorov's inequality (\autoref{thm:kolmogorovineq})
|
||||
\item Kolmogorov's $0-1$-law. (\autoref{kolmogorov01})
|
||||
\item \yaref{thm:kolmogorovineq}
|
||||
\item \yaref{kolmogorov01}
|
||||
|
||||
In particular, a series of independent random variables converges with probability $0$ or $1$.
|
||||
|
||||
\item Kolmogorov's 3 series theorem. (\autoref{thm:kolmogorovthreeseries})
|
||||
\item \yaref{thm:kolmogorovthreeseries}
|
||||
\begin{itemize}
|
||||
\item What are those $3$ series?
|
||||
\item Applications
|
||||
|
@ -59,15 +59,15 @@
|
|||
|
||||
\begin{itemize}
|
||||
\item Definition of Fourier transform
|
||||
(\autoref{def:characteristicfunction})
|
||||
(\yaref{def:characteristicfunction})
|
||||
\item The Fourier transform uniquely determines the probability distribution.
|
||||
It is bounded, so many theorems are easily applicable.
|
||||
\item Uniqueness theorem (\autoref{charfuncuniqueness}),
|
||||
inversion formula (\autoref{inversionformula}), ...
|
||||
\item Levy's continuity theorem (\autoref{levycontinuity}),
|
||||
(\autoref{genlevycontinuity})
|
||||
\item Bochner's theorem for positive definite function (\autoref{thm:bochner})
|
||||
\item Bochner's theorem for the mass at a point (\autoref{bochnersformula})
|
||||
\item \yaref{charfuncuniqueness},
|
||||
\yaref{inversionformula}, ...
|
||||
\item \yaref{levycontinuity},
|
||||
\yaref{genlevycontinuity}
|
||||
\item \yaref{thm:bochner}
|
||||
\item \yaref{bochnersformula}
|
||||
\item Related notions
|
||||
\todo{TODO}
|
||||
\begin{itemize}
|
||||
|
@ -81,7 +81,7 @@
|
|||
\paragraph{Weak convergence}
|
||||
\begin{itemize}
|
||||
\item Definition of weak convergence % ( test against continuous, bounded functions).
|
||||
(\autoref{def:weakconvergence})
|
||||
(\yaref{def:weakconvergence})
|
||||
\item Examples:
|
||||
\begin{itemize}
|
||||
\item $(\delta_{\frac{1}{n}})_n$,
|
||||
|
@ -93,9 +93,8 @@
|
|||
\item Non-examples: $(\delta_n)_n$
|
||||
\item How does one prove weak convergence? How does one write this down in a clear way?
|
||||
\begin{itemize}
|
||||
\item \autoref{lec10_thm1},
|
||||
\item Levy's continuity theorem
|
||||
\ref{levycontinuity},
|
||||
\item \yaref{lec10_thm1},
|
||||
\item \yaref{levycontinuity},
|
||||
\item Generalization of Levy's continuity theorem
|
||||
\ref{genlevycontinuity}
|
||||
\end{itemize}
|
||||
|
@ -111,12 +110,12 @@
|
|||
|
||||
\subsubsubsection{CLT}
|
||||
\begin{itemize}
|
||||
\item Statement of the CLT
|
||||
\item Statement of the \yaref{clt}
|
||||
\item Several versions:
|
||||
\begin{itemize}
|
||||
\item iid (\autoref{clt}),
|
||||
\item Lindeberg (\autoref{lindebergclt}),
|
||||
\item Lyapanov (\autoref{lyapunovclt})
|
||||
\item iid,
|
||||
\item \yaref{lindebergclt},
|
||||
\item \yaref{lyapunovclt}
|
||||
\end{itemize}
|
||||
\item How to apply this? Exercises!
|
||||
\end{itemize}
|
||||
|
@ -124,11 +123,11 @@
|
|||
\subsubsection{Conditional expectation}
|
||||
\begin{itemize}
|
||||
\item Definition and existence of conditional expectation for $X \in L^1(\Omega, \cF, \bP)$
|
||||
(\autoref{conditionalexpectation})
|
||||
(\yaref{conditionalexpectation})
|
||||
\item If $H = L^2(\Omega, \cF, \bP)$, then $\bE[ \cdot | \cG]$
|
||||
is the (unique) projection on the closed subspace $L^2(\Omega, \cG, \bP)$.
|
||||
Why is this a closed subspace? Why is the projection orthogonal?
|
||||
\item Radon-Nikodym Theorem \ref{radonnikodym}
|
||||
\item \yaref{radonnikodym}
|
||||
(Proof not relevant for the exam)
|
||||
\item (Non-)examples of mutually absolutely continuous measures
|
||||
Singularity in this context? % TODO
|
||||
|
@ -137,30 +136,30 @@
|
|||
\subsubsection{Martingales}
|
||||
|
||||
\begin{itemize}
|
||||
\item Definition of Martingales (\autoref{def:martingale})
|
||||
\item Doob's convergence theorem (\autoref{doobmartingaleconvergence}),
|
||||
Upcrossing inequality (\autoref{lec17l1}, \autoref{lec17l2}, \autoref{lec17l3})
|
||||
\item Definition of Martingales (\yaref{def:martingale})
|
||||
\item Doob's convergence theorem (\yaref{doobmartingaleconvergence}),
|
||||
Upcrossing inequality (\yaref{lec17l1}, \yaref{lec17l2}, \yaref{lec17l3})
|
||||
(downcrossings for submartingales)
|
||||
\item Examples of Martingales converging a.s.~but not in $L^1$
|
||||
(\autoref{ex:martingale-not-converging-in-l1})
|
||||
(\yaref{ex:martingale-not-converging-in-l1})
|
||||
\item Bounded in $L^2$ $\implies$ convergence in $L^2$
|
||||
(\autoref{martingaleconvergencel2}).
|
||||
(\yaref{martingaleconvergencel2}).
|
||||
\item Martingale increments are orthogonal in $L^2$!
|
||||
(\autoref{martingaleincrementsorthogonal})
|
||||
(\yaref{martingaleincrementsorthogonal})
|
||||
\item Doob's (sub-)martingale inequalities
|
||||
(\autoref{dooblp}),
|
||||
(\yaref{dooblp}),
|
||||
\item $\bP[\sup_{k \le n} M_k \ge x]$ $\leadsto$ Look at martingale inequalities!
|
||||
Estimates might come from Doob's inequalities if $(M_k)_k$ is a (sub-)martingale.
|
||||
\item Doob's $L^p$ convergence theorem
|
||||
(\autoref{ceismartingale}, \autoref{martingaleisce}).
|
||||
(\yaref{ceismartingale}, \yaref{martingaleisce}).
|
||||
\begin{itemize}
|
||||
\item Why is $p > 1$ important? \textbf{Role of Banach-Alaoglu}
|
||||
\item Why is $p > 1$ important? \textbf{Role of \yaref{banachalaoglu}}
|
||||
\item This is an important proof.
|
||||
\end{itemize}
|
||||
\item Uniform integrability (\autoref{def:ui})
|
||||
\item What are stopping times? (\autoref{def:stopping-time})
|
||||
\item Uniform integrability (\yaref{def:ui})
|
||||
\item What are stopping times? (\yaref{def:stopping-time})
|
||||
\item (Non-)examples of stopping times
|
||||
\item \textbf{Optional stopping theorem} (\autoref{optionalstopping})
|
||||
\item \textbf{\yaref{optionalstopping}}
|
||||
- be really comfortable with this.
|
||||
\end{itemize}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ from the lecture on stochastic.
|
|||
This notion of convergence was actually
|
||||
defined during the course of the lecture,
|
||||
but has been added here for completeness;
|
||||
see \autoref{def:weakconvergence}.
|
||||
see \yaref{def:weakconvergence}.
|
||||
}
|
||||
($X_n \xrightarrow{\text{d}} X$)
|
||||
iff for every continuous, bounded $f: \R \to \R$
|
||||
|
@ -128,7 +128,7 @@ from the lecture on stochastic.
|
|||
\begin{subproof}
|
||||
Let $F$ be the distribution function of $X$
|
||||
and $(F_n)_n$ the distribution functions of $(X_n)_n$.
|
||||
By \autoref{lec10_thm1}
|
||||
By \yaref{lec10_thm1}
|
||||
it suffices to show that $F_n(t) \to F(t)$ for all continuity
|
||||
points $t$ of $F$.
|
||||
Let $t$ be a continuity point of $F$.
|
||||
|
@ -155,7 +155,7 @@ from the lecture on stochastic.
|
|||
\label{claim:convimplpl1}
|
||||
$X_n \xrightarrow{\bP} X \notimplies X_n\xrightarrow{L^1} X$.%
|
||||
\footnote{Note that the implication holds under certain assumptions,
|
||||
see \autoref{thm:l1iffuip}.}
|
||||
see \yaref{thm:l1iffuip}.}
|
||||
\end{claim}
|
||||
\begin{subproof}
|
||||
Take $([0,1], \cB([0,1 ]), \lambda)$
|
||||
|
@ -170,7 +170,7 @@ from the lecture on stochastic.
|
|||
$X_n \xrightarrow{\text{a.s.}} X \notimplies X_n\xrightarrow{L^1} X$.
|
||||
\end{claim}
|
||||
\begin{subproof}
|
||||
We can use the same counterexample as in \autoref{claim:convimplpl1}
|
||||
We can use the same counterexample as in \yaref{claim:convimplpl1}
|
||||
|
||||
$\bP[\lim_{n \to \infty} X_n = 0] \ge \bP[X_n = 0] = 1 - \frac{1}{n} \to 0$.
|
||||
We have already seen, that $X_n$ does not converge in $L_1$.
|
||||
|
@ -220,7 +220,7 @@ from the lecture on stochastic.
|
|||
\end{refproof}
|
||||
|
||||
\begin{theorem}[Bounded convergence theorem]
|
||||
\label{thm:boundedconvergence}
|
||||
\yalabel{Bounded Convergence Theorem}{Bounded convergence}{thm:boundedconvergence}
|
||||
Suppose that $X_n \xrightarrow{\bP} X$ and there exists
|
||||
some $K$ such that $|X_n| \le K$ for all $n$.
|
||||
Then $X_n \xrightarrow{L^1} X$.
|
||||
|
@ -262,7 +262,7 @@ from the lecture on stochastic.
|
|||
|
||||
\begin{theorem}+[Riemann-Lebesgue]
|
||||
%\footnote{see exercise 3.3}
|
||||
\label{riemann-lebesgue}
|
||||
\yalabel{Riemann-Lebesgue}{Riemann-Lebesgue}{riemann-lebesgue}
|
||||
Let $f: \R \to \R$ be integrable.
|
||||
Then
|
||||
\[
|
||||
|
@ -271,6 +271,7 @@ from the lecture on stochastic.
|
|||
\end{theorem}
|
||||
|
||||
\begin{theorem}+[Fubini-Tonelli]
|
||||
\yalabel{Fubuni-Tonelli}{Fubini}{thm:fubini}
|
||||
%\footnote{exercise sheet 1}
|
||||
Let $(\Omega_{i}, \cF_i, \bP_i), i \in \{0,1\}$
|
||||
be probability spaces and $\Omega \coloneqq \Omega_0 \otimes \Omega_1$,
|
||||
|
@ -296,6 +297,7 @@ from the lecture on stochastic.
|
|||
This is taken from section 6.1 of the notes on Stochastik.
|
||||
|
||||
\begin{theorem}[Markov's inequality]
|
||||
\yalabel{Markov's Inequality}{Markov}{thm:markov}
|
||||
Let $X$ be a random variable and $a > 0$.
|
||||
Then
|
||||
\[
|
||||
|
@ -311,6 +313,7 @@ This is taken from section 6.1 of the notes on Stochastik.
|
|||
\end{proof}
|
||||
|
||||
\begin{theorem}[Chebyshev's inequality]
|
||||
\yalabel{Chebyshev's Inequality}{Chebyshev}{thm:chebyshev}
|
||||
Let $X$ be a random variable and $a > 0$.
|
||||
Then
|
||||
\[
|
||||
|
@ -322,14 +325,14 @@ This is taken from section 6.1 of the notes on Stochastik.
|
|||
\begin{IEEEeqnarray*}{rCl}
|
||||
\bP[|X-\bE(X)| \ge a]
|
||||
&=& \bP[|X - \bE(X)|^2 \ge a^2]\\
|
||||
&\overset{\text{Markov}}{\le}& \frac{\bE[|X - \bE(X)|^2]}{a^2}.
|
||||
&\overset{\yaref{thm:markov}}{\le}& \frac{\bE[|X - \bE(X)|^2]}{a^2}.
|
||||
\end{IEEEeqnarray*}
|
||||
\end{proof}
|
||||
|
||||
How do we prove that something happens almost surely?
|
||||
The first thing that should come to mind is:
|
||||
\begin{lemma}[Borel-Cantelli]
|
||||
\label{borelcantelli}
|
||||
\yalabel{Borel-Cantelli}{Borel-Cantelli}{thm:borelcantelli}
|
||||
If we have a sequence of events $(A_n)_{n \ge 1}$
|
||||
such that $\sum_{n \ge 1} \bP(A_n) < \infty$,
|
||||
then $\bP[ A_n \text{for infinitely many $n$}] = 0$
|
||||
|
|
49
jrpie-yaref.sty
Normal file
49
jrpie-yaref.sty
Normal file
|
@ -0,0 +1,49 @@
|
|||
\NeedsTeXFormat{LaTeX2e}
|
||||
\ProvidesPackage{jrpie-yaref}[2023/07/28 - yet another ref]
|
||||
|
||||
\RequirePackage{hyperref}
|
||||
\RequirePackage{amstext}
|
||||
|
||||
\newcommand{\yaref@text@large}[1]{%
|
||||
\ifcsname yaref@longlabel@#1\endcsname%
|
||||
\hyperref[#1]{\csname yaref@longlabel@#1\endcsname\ \ref*{#1}}%
|
||||
\else%
|
||||
\autoref{#1}%
|
||||
\fi%
|
||||
}
|
||||
\newcommand{\yaref@text@small}[1]{%
|
||||
\ifcsname yaref@shortlabel@#1\endcsname%
|
||||
\hyperref[#1]{\csname yaref@shortlabel@#1\endcsname}%
|
||||
\else%
|
||||
(\ref{#1})%
|
||||
\fi%
|
||||
}
|
||||
\newcommand{\yaref@math@large}[1]{%
|
||||
\text{\yaref@text@large{#1}}%
|
||||
}
|
||||
\newcommand{\yaref@math@small}[1]{%
|
||||
\text{\yaref@text@small{#1}}%
|
||||
}
|
||||
\newcommand{\yaref@math@verysmall}[1]{%
|
||||
\yaref@math@small{#1}%
|
||||
}
|
||||
|
||||
\newcommand{\yalabel}[3]{%
|
||||
\write\@auxout{\noexpand\expandafter\noexpand\gdef\noexpand\csname yaref@longlabel@#3\noexpand\endcsname{#1}}%
|
||||
\write\@auxout{\noexpand\expandafter\noexpand\gdef\noexpand\csname yaref@shortlabel@#3\noexpand\endcsname{#2}}%
|
||||
\expandafter\gdef\csname yaref@longlabel@#3\endcsname{#1}%
|
||||
\expandafter\gdef\csname yaref@shortlabel@#3\endcsname{#2}%
|
||||
\label{#3}%
|
||||
}
|
||||
|
||||
\newcommand{\yaref}[1]{%
|
||||
\relax\ifmmode%
|
||||
\mathchoice
|
||||
{\yaref@math@large{#1}} % display style
|
||||
{\yaref@math@large{#1}} % text style
|
||||
{\yaref@math@small{#1}} % script style
|
||||
{\yaref@math@verysmall{#1}} % scriptscript style
|
||||
\else%
|
||||
\yaref@text@large{#1}%
|
||||
\fi%
|
||||
}
|
|
@ -8,6 +8,7 @@
|
|||
\usepackage[index]{mkessler-vocab}
|
||||
\usepackage{mkessler-code}
|
||||
\usepackage{jrpie-math}
|
||||
\usepackage{jrpie-yaref}
|
||||
\usepackage[normalem]{ulem}
|
||||
\usepackage{pdflscape}
|
||||
\usepackage{longtable}
|
||||
|
|
Loading…
Reference in a new issue