\lecture{23}{2023-07-06}{Recap} \subsection{Recap} In this lecture we recall the most important point from the lecture. \subsubsection{Construction of iid random variables.} \begin{itemize} \item Definition of a consistent family (\autoref{def:consistentfamily}) \item Important construction: Consider a distribution function $F$ and define \[ \prod_{i=1}^n (F(b_i) - F(a_i)) \text{\reflectbox{$\coloneqq$}} \mu_n \left( (a_1,b_1] \times x \ldots \times x (a_n, b_n] \right). \] \item Examples of consistent and inconsistent families \todo{Exercises} \item Kolmogorov's consistency theorem (\autoref{thm:kolmogorovconsistency}) \end{itemize} \subsubsection{Limit theorems} \begin{itemize} \item Work with iid.~random variables. \item Notions of convergence (\autoref{def:convergence}) \item Implications between different notions of convergence (very important) and counter examples. (\autoref{thm:convergenceimplications}) \item \item Laws of large numbers: (\autoref{lln}) \begin{itemize} \item WLLN: convergence in probability \item SLLN: weak convergence \end{itemize} \item \autoref{thm2} (building block for SLLN): Let $(X_n)$ be independent with mean $0$ and $\sum \sigma_n^2 < \infty$, then $ \sum X_n $ converges a.s. \begin{itemize} \item Counter examples showing that $\impliedby$ does not hold in general are important \item $\impliedby$ holds for iid.~uniformly bounded random variables \item Application: $\sum_{i=1}^{\infty} \frac{(\pm_1)}{n^{\frac{1}{2} + \epsilon}}$ converges a.s.~for all $\epsilon > 0$. $\sum \frac{\pm 1}{ n^{\frac{1}{2} -\epsilon}}$ does not converge a.s.~for any $\epsilon > 0$. \end{itemize} \item Kolmogorov's inequality (\autoref{thm:kolmogorovineq}) \item Kolmogorov's $0-1$-law. (\autoref{kolmogorov01}) In particular, a series of independent random variables converges with probability $0$ or $1$. \item Kolmogorov's 3 series theorem. (\autoref{thm:kolmogorovthreeseries}) \begin{itemize} \item What are those $3$ series? \item Applications \end{itemize} \end{itemize} \subsubsubsection{Fourier transform / characteristic functions / weak convegence} \begin{itemize} \item Definition of Fourier transform (\autoref{def:characteristicfunction}) \item The Fourier transform uniquely determines the probability distribution. It is bounded, so many theorems are easily applicable. \item Uniqueness theorem (\autoref{charfuncuniqueness}), inversion formula (\autoref{inversionformula}), ... \item Levy's continuity theorem (\autoref{levycontinuity}), (\autoref{genlevycontinuity}) \item Bochner's theorem for positive definite function (\autoref{thm:bochner}) \item Bochner's theorem for the mass at a point (\autoref{bochnersformula}) \item Related notions \todo{TODO} \begin{itemize} \item Laplace transforms $\bE[e^{-\lambda X}]$ for some $\lambda > 0$ (not done in the lecture, but still useful). \item Moments $\bE[X^k]$ (not done in the lecture, but still useful) All moments together uniquely determine the distribution. \end{itemize} \end{itemize} \paragraph{Weak convergence} \begin{itemize} \item Definition of weak convergence % ( test against continuous, bounded functions). (\autoref{def:weakconvergence}) \item Examples: \begin{itemize} \item $(\delta_{\frac{1}{n}})_n$, \item $(\frac{1}{2} \delta_{-\frac{1}{n}} + \frac{1}{2} \delta_{\frac{1}{n}}$, \item $(\cN(0, \frac{1}{n}))_n$, \item $(\frac{1}{n} \delta_n + (1- \frac{1}{n}) \delta_{\frac{1}{n}})_n$. \end{itemize} \item Non-examples: $(\delta_n)_n$ \item How does one prove weak convergence? How does one write this down in a clear way? % TODO \end{itemize} \paragraph{Convolution} \begin{itemize} \item Definition of convolution. \todo{Copy from exercise sheet and write a subsection about this} \item $X_i \sim \mu_i \text{ iid. }\implies X_1 + \ldots + X_n \sim \mu_1 \ast \ldots \ast \mu_n$. \end{itemize} \subsubsubsection{CLT} \begin{itemize} \item Statement of the CLT \item Several versions: \begin{itemize} \item iid (\autoref{clt}), \item Lindeberg (\autoref{lindebergclt}), \item Luyapanov (\autoref{lyapunovclt}) \end{itemize} \item How to apply this? Exercises! \end{itemize} \subsubsection{Conditional expectation} \begin{itemize} \item Definition and existence of conditional expectation for $X \in L^1(\Omega, \cF, \bP)$ \item If $H = L^2(\Omega, \cF, \bP)$, then $\bE[ \cdot | \cG]$ is the (unique) projection on the closed subspace $L^2(\Omega, \cG, \bP)$. Why is this a closed subspace? Why is the projection orthogonal? \item Radon-Nikodym Theorem (Proof not relevant for the exam) \item (Non-)examples of mutually absolutely continuous measures Singularity in this context? % TODO \end{itemize} \subsubsection{Martingales} \begin{itemize} \item Definition of Martingales \item Doob's convergence theorem, Upcrossing inequality (downcrossings for submartingales) \item Examples of Martingales converging a.s.~but not in $L^1$ \item Bounded in $L^2$ $\implies$ convergence in $L^2$. \item Martingale increments are orthogonal in $L^2$! \item Doob's (sub-)martingale inequalities \item $\bP[\sup_{k \le n} M_k \ge x]$ $\leadsto$ Look at martingale inequalities! Estimates might come from Doob's inequalities if $(M_k)_k$ is a (sub-)martingale. \item Doob's $L^p$ convergence theorem. \begin{itemize} \item Why is $p > 1$ important? \textbf{Role of Banach-Alaoglu} \item This is an important proof. \end{itemize} \item Uniform integrability % TODO \item What are stopping times? \item (Non-)examples of stopping times \item \textbf{Optional stopping theorem} - be really comfortable with this. \end{itemize} \subsubsection{Markov Chains} \begin{itemize} \item What are Markov chains? \item State space, initial distribution \item Important examples \item \textbf{What is the relation between Martingales and Markov chains?} $u$ \vocab{harmonic} $\iff Lu = 0$. (sub-/super-) harmonic $u$ $\iff$ for a Markov chain $(X_n)$, $u(X_n)$ is a (sub-/super-)martingale \item Dirichlet problem (Not done in the lecture) \item ... (more in Probability Theory II) \end{itemize}