s23-probability-theory/inputs/vl3.tex

117 lines
4.3 KiB
TeX

$(\Omega, \cF, \bP)$ Probability Space, $X : ( \Omega, \cF) \to (\R, \cB(\R))$ random variable.
Then $\Q(\cdot) = \bP [ x\in \cdot ]$ is the distribution of $X$ under $\bP$.
\section{Independence and product measures}
In order to define the notion of independence, we first need to construct
product measures in order to be able to consider several random variables
at the same time.
The finite case of a product is straightforward:
\begin{theorem}{Product measure (finite)}
Let $(\Omega_1, \cF, \bP)$ and $(\Omega_2, \cF_2, \bP_2)$ be probability spaces.
Let $\Omega \coloneqq \Omega_1 \times \Omega_2$
and $R \coloneqq \{A_1 \times A_2 | A_1 \in \cF_1, A_2 \in \cF_2 \}$.
Let $\cF$ be $\sigma(R)$ (the sigma algebra generated by $R$).
Then there exists a unique probability measure $\bP$ on $\Omega$
such that for every rectangle $R = A_1 \times A_2 \in \cR$, $\bP(A_1 \times A_2) = \bP(A_1) \times \bP(A_2)$.
\end{theorem}
\begin{proof}
See Theorem 5.1.1 in the lecture notes on Stochastik.
\end{proof}
We now want to construct a product measure for infinite products.
\begin{definition}[Independence]
A collection $X_1, X_2, \ldots, X_n$ of random variables are called
\vocab{mutually independent} if
\[
\forall a_1,\ldots,a_n \in \R :
\bP[X_1 \le a_1, \ldots, x_n \le a_n]
= \prod_{i=1}^n \bP[X_i \le a_i]
\]
This is equivalent to
\[
\forall B_1, \ldots, B_n \in \cB(\R):
\bP[X_1 \in B_1, \ldots, X_n \in B_n]
= \prod_{i=1}^n \bP[X_i \in B_i]
\]
\end{definition}
\begin{example}
Suppose we throw a dice twice. Let $A \coloneqq \{\text{first throw even}\}$,
$B \coloneqq \{second throw even\}$
and $C \coloneqq \{\text{sum even}\} $.
Are $\One_A, \One_B, \One_C$ mutually independent random variables?
\end{example}
It is easy the see, that the random variables are pairwise independent,
but not mutually independent.
The definition of mutual independence can be rephrased as follos:
Let $X_1, X_2, \ldots, X_n$ r.v.s. Let $\bP[(X_1,\ldots, X_n) \in \cdot ] \text{\reflectbox{$\coloneqq$}} \Q^{\otimes}(\cdot )$.
Then $\Q^{\otimes}$ is a probability measure on $\R^n$.
\begin{fact}
$X_1,\ldots, X_n$ are mutually independent iff $\Q^{\otimes} = \Q_1 \otimes \ldots \otimes \Q_n$.
\end{fact}
By constructing an infinite product, we can thus extend the notion of independence
to an infinite number of r.v.s.
\begin{goal}
Can we construct infinitely many independent random variables?
\end{goal}
\begin{definition}[Consistent family of random variables]
Let $\bP_n, n \in \N$ be a family of probability measures on $(\R^n, \cB(\R^n))$.
The family is called \vocab{consistent} if if
$\bP_{n+1}[B_1 \times B_2 \times \ldots \times B_n \times \R] = \bP_n[B_1 \times \ldots \times B_n]$
for all $n \in \N, B_i \in B(\R)$.
\end{definition}
\begin{theorem}[Kolmogorov extension / consistency theorem]
Informally:
``Probability measures are determined by finite-dimensional marginals
(as long as these marginals are nice)''
Let $\bP_n, n \in \N$ be probability measures on $(\R^n, \cB(\R^n))$
which are \vocab{consistent},
then there exists a unique probability measure $\bP^{\otimes}$
on $(\R^\infty, B(R^\infty))$ (where $B(R^{\infty}$ has to be defined),
such that
\[
\forall n \in \N, B_1,\ldots, B_n \in B(\R):
\bP^\otimes [\cX : X_i \in B_i \forall 1 \le i \le n]
= \bP_n[B_1 \times \ldots \times B_n]
\]
\end{theorem}
\begin{remark}
Kolmogorov's theorem can be strengthened to the case of arbitrary
index sets. However this requires a different notion of consistency.
\end{remark}
\begin{example}of a consistent family:
Let $F_1, \ldots, F_n$ be probability distribution functions
and let $\bP_n$ be the probability measure on $\R^n$ defined
by
\[
\bP_n[(a_1,b_1] \times \ldots (a_n, b_n]]
\coloneqq (F_1(b_1) - F_1(a_1)) \cdot \ldots \cdot (F_n(b_n) - F_n(a_n)).
\]
It is easy to see that each $\bP_n$ is a probability measure.
Define $X_i(\omega) = \omega_i$ where $\omega = (\omega_1, .., \omega_n)$.
Then $X_1, \ldots, X_n$ are mutually independent with $F_i$ being
the distribution function of $X_i$.
In the case of $F_1 = \ldots = F_n$, then $X_1,\ldots, X_n$ are i.i.d.
\end{example}