2023-05-25 18:02:30 +02:00
|
|
|
\lecture{2}{}{}
|
2023-04-27 17:07:50 +02:00
|
|
|
\section{Independence and product measures}
|
|
|
|
|
|
|
|
In order to define the notion of independence, we first need to construct
|
2023-05-10 18:56:36 +02:00
|
|
|
product measures.
|
2023-04-27 17:07:50 +02:00
|
|
|
|
|
|
|
The finite case of a product is straightforward:
|
|
|
|
\begin{theorem}{Product measure (finite)}
|
|
|
|
Let $(\Omega_1, \cF, \bP)$ and $(\Omega_2, \cF_2, \bP_2)$ be probability spaces.
|
|
|
|
Let $\Omega \coloneqq \Omega_1 \times \Omega_2$
|
|
|
|
and $R \coloneqq \{A_1 \times A_2 | A_1 \in \cF_1, A_2 \in \cF_2 \}$.
|
|
|
|
|
|
|
|
Let $\cF$ be $\sigma(R)$ (the sigma algebra generated by $R$).
|
|
|
|
Then there exists a unique probability measure $\bP$ on $\Omega$
|
|
|
|
such that for every rectangle $R = A_1 \times A_2 \in \cR$, $\bP(A_1 \times A_2) = \bP(A_1) \times \bP(A_2)$.
|
|
|
|
\end{theorem}
|
|
|
|
\begin{proof}
|
|
|
|
See Theorem 5.1.1 in the lecture notes on Stochastik.
|
|
|
|
\end{proof}
|
|
|
|
|
|
|
|
We now want to construct a product measure for infinite products.
|
|
|
|
|
|
|
|
|
|
|
|
\begin{definition}[Independence]
|
|
|
|
A collection $X_1, X_2, \ldots, X_n$ of random variables are called
|
|
|
|
\vocab{mutually independent} if
|
|
|
|
\[
|
|
|
|
\forall a_1,\ldots,a_n \in \R :
|
|
|
|
\bP[X_1 \le a_1, \ldots, x_n \le a_n]
|
|
|
|
= \prod_{i=1}^n \bP[X_i \le a_i]
|
|
|
|
\]
|
|
|
|
This is equivalent to
|
|
|
|
\[
|
|
|
|
\forall B_1, \ldots, B_n \in \cB(\R):
|
|
|
|
\bP[X_1 \in B_1, \ldots, X_n \in B_n]
|
|
|
|
= \prod_{i=1}^n \bP[X_i \in B_i]
|
|
|
|
\]
|
|
|
|
\end{definition}
|
|
|
|
|
|
|
|
\begin{example}
|
|
|
|
Suppose we throw a dice twice. Let $A \coloneqq \{\text{first throw even}\}$,
|
2023-05-10 18:56:36 +02:00
|
|
|
$B \coloneqq \{\text{second throw even}\}$
|
2023-04-27 17:07:50 +02:00
|
|
|
and $C \coloneqq \{\text{sum even}\} $.
|
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
It is easy the see, that the random variables are pairwise independent,
|
|
|
|
but not mutually independent.
|
2023-04-27 17:07:50 +02:00
|
|
|
\end{example}
|
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
\begin{definition}
|
|
|
|
Let $(\Omega, \cF, \bP)$ be a probability space
|
|
|
|
and $X : ( \Omega, \cF) \to (\R, \cB(\R))$ a random variable.
|
|
|
|
Then $\Q(\cdot) \coloneqq \bP [ X \in \cdot ]$ is called the \vocab{distribution}
|
|
|
|
of $X$ under $\bP$.
|
|
|
|
\end{definition}
|
|
|
|
|
|
|
|
Let $X_1, \ldots, X_n$ be random variables and $\Q^{\otimes}(\cdot ) \coloneqq \bP[(X_1,\ldots, X_n) \in \cdot ]$
|
|
|
|
their \vocab{joint distribution}.
|
2023-04-27 17:07:50 +02:00
|
|
|
Then $\Q^{\otimes}$ is a probability measure on $\R^n$.
|
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
The definition of mutual independence can be rephrased as follows:
|
2023-04-27 17:07:50 +02:00
|
|
|
\begin{fact}
|
2023-05-10 18:56:36 +02:00
|
|
|
$X_1,\ldots, X_n$ are mutually independent iff $\Q^{\otimes} = \Q_1 \otimes \ldots \otimes \Q_n$,
|
|
|
|
where $\Q_i$ is the distribution of $X_i$.
|
|
|
|
In this setting, $\Q_i$ is called the \vocab{marginal distribution} of $X_i$.
|
2023-04-27 17:07:50 +02:00
|
|
|
\end{fact}
|
|
|
|
By constructing an infinite product, we can thus extend the notion of independence
|
2023-05-10 18:56:36 +02:00
|
|
|
to an infinite number of random variables.
|
2023-04-27 17:07:50 +02:00
|
|
|
\begin{goal}
|
|
|
|
Can we construct infinitely many independent random variables?
|
|
|
|
\end{goal}
|
|
|
|
|
|
|
|
\begin{definition}[Consistent family of random variables]
|
2023-05-10 18:56:36 +02:00
|
|
|
\label{def:consistentfamily}
|
2023-04-27 17:07:50 +02:00
|
|
|
Let $\bP_n, n \in \N$ be a family of probability measures on $(\R^n, \cB(\R^n))$.
|
2023-05-10 18:56:36 +02:00
|
|
|
The family is called \vocab{consistent} if
|
2023-04-27 17:07:50 +02:00
|
|
|
$\bP_{n+1}[B_1 \times B_2 \times \ldots \times B_n \times \R] = \bP_n[B_1 \times \ldots \times B_n]$
|
|
|
|
for all $n \in \N, B_i \in B(\R)$.
|
|
|
|
|
|
|
|
\end{definition}
|
|
|
|
|
|
|
|
\begin{theorem}[Kolmogorov extension / consistency theorem]
|
2023-05-10 18:56:36 +02:00
|
|
|
\label{thm:kolmogorovconsistency}
|
2023-04-27 17:07:50 +02:00
|
|
|
Informally:
|
|
|
|
``Probability measures are determined by finite-dimensional marginals
|
|
|
|
(as long as these marginals are nice)''
|
|
|
|
|
|
|
|
Let $\bP_n, n \in \N$ be probability measures on $(\R^n, \cB(\R^n))$
|
|
|
|
which are \vocab{consistent},
|
|
|
|
then there exists a unique probability measure $\bP^{\otimes}$
|
|
|
|
on $(\R^\infty, B(R^\infty))$ (where $B(R^{\infty}$ has to be defined),
|
|
|
|
such that
|
|
|
|
\[
|
|
|
|
\forall n \in \N, B_1,\ldots, B_n \in B(\R):
|
|
|
|
\bP^\otimes [\cX : X_i \in B_i \forall 1 \le i \le n]
|
|
|
|
= \bP_n[B_1 \times \ldots \times B_n]
|
|
|
|
\]
|
|
|
|
\end{theorem}
|
|
|
|
|
|
|
|
\begin{remark}
|
|
|
|
Kolmogorov's theorem can be strengthened to the case of arbitrary
|
|
|
|
index sets. However this requires a different notion of consistency.
|
|
|
|
\end{remark}
|
|
|
|
|
2023-05-10 18:56:36 +02:00
|
|
|
\begin{example}[A consistent family]
|
2023-04-27 17:07:50 +02:00
|
|
|
Let $F_1, \ldots, F_n$ be probability distribution functions
|
|
|
|
and let $\bP_n$ be the probability measure on $\R^n$ defined
|
|
|
|
by
|
|
|
|
\[
|
|
|
|
\bP_n[(a_1,b_1] \times \ldots (a_n, b_n]]
|
|
|
|
\coloneqq (F_1(b_1) - F_1(a_1)) \cdot \ldots \cdot (F_n(b_n) - F_n(a_n)).
|
|
|
|
\]
|
|
|
|
|
|
|
|
It is easy to see that each $\bP_n$ is a probability measure.
|
|
|
|
|
|
|
|
Define $X_i(\omega) = \omega_i$ where $\omega = (\omega_1, .., \omega_n)$.
|
|
|
|
Then $X_1, \ldots, X_n$ are mutually independent with $F_i$ being
|
|
|
|
the distribution function of $X_i$.
|
|
|
|
In the case of $F_1 = \ldots = F_n$, then $X_1,\ldots, X_n$ are i.i.d.
|
|
|
|
\end{example}
|
|
|
|
|
|
|
|
|