2023-07-05 17:53:41 +02:00
\lecture { 10} { 2023-05-09} { }
2023-05-10 18:56:36 +02:00
First, we will prove some of the most important facts about Fourier transforms.
We consider $ ( \R , \cB ( \R ) ) $ .
\begin { notation}
By $ M _ 1 ( \R ) $ we denote the set of all probability measures on $ \left ( \R , \cB ( \R ) \right ) $ .
\end { notation}
2023-07-12 15:25:25 +02:00
For all $ \bP \in M _ 1 ( \R ) $ we define $ \phi _ { \bP } ( t ) = \int _ { \R } e ^ { \i t x } \bP ( \dif x ) $ .
2023-05-10 18:56:36 +02:00
If $ X: ( \Omega , \cF ) \to ( \R , \cB ( \R ) ) $ is a random variable, we write
$ \phi _ X ( t ) \coloneqq \bE [ e ^ { \i t X } ] = \phi _ { \mu } ( t ) $ ,
where $ \mu = \bP X ^ { - 1 } $ .
\begin { refproof} { inversionformula}
2023-07-28 03:45:37 +02:00
We will prove that the limit in the RHS of \yaref { invf}
2023-05-10 18:56:36 +02:00
exists and is equal to the LHS.
Note that the term on the RHS is integrable, as
\[
2023-07-12 15:25:25 +02:00
\lim _ { t \to 0} \frac { e^ { -\i t b} - e^ { -\i t a} } { - \i t} \phi (t) = a - b
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
and note that $ \phi ( 0 ) = 1 $ and $ | \phi ( t ) | \le 1 $ .
% TODO think about this
We have
\begin { IEEEeqnarray*} { rCl}
2023-07-12 15:25:25 +02:00
& & \lim _ { T \to \infty } \frac { 1} { 2 \pi } \int _ { -T} ^ T \int _ { \R } \frac { e^ { -\i t b} - e^ { -\i t a} } { -\i t} e^ { \i t x} \dif t \bP (\dif x)\\
& \overset { \text { Fubini for $ L ^ 1 $ } } { =} & \lim _ { T \to \infty } \frac { 1} { 2 \pi } \int _ { \R } \int _ { -T} ^ T \frac { e^ { -\i t b} - e^ { -\i t a} } { -\i t} e^ { \i t x} \dif t \bP (\dif x)\\
& =& \lim _ { T \to \infty } \frac { 1} { 2 \pi } \int _ { \R } \int _ { -T} ^ T \frac { e^ { \i t (b-x)} - e^ { \i t (x-a)} } { -\i t} \dif t \bP (\dif x)\\
2023-07-12 16:09:48 +02:00
& =& \lim _ { T \to \infty } \frac { 1} { 2 \pi } \int _ { \R } \underbrace { \int _ { -T} ^ T \left [ \frac{\cos(t (x-b)) - \cos(t(x-a))}{-\i t}\right] \dif t} _ { =0 \text { , as the function is odd} } \bP (\dif x) \\
& & + \lim _ { T \to \infty } \frac { 1} { 2\pi } \int _ { \R } \int _ { -T} ^ T \frac { \sin (t ( x - b)) - \sin (t(x-a))} { -t} \dif t \bP (\dif x)\\
2023-07-12 15:25:25 +02:00
& =& \lim _ { T \to \infty } \frac { 1} { \pi } \int _ \R \int _ { 0} ^ T \frac { \sin (t(x-a)) - \sin (t(x-b))} { t} \dif t \bP (\dif x)\\
2023-07-28 03:45:37 +02:00
& \overset { \substack { \yaref { fact:sincint} ,\text { dominated convergence} } } { =} &
2023-07-12 16:09:48 +02:00
\frac { 1} { \pi } \int -\frac { \pi } { 2} \One _ { x < a} + \frac { \pi } { 2} \One _ { x > a}
- (- \frac { \pi } { 2} \One _ { x < b} + \frac { \pi } { 2} \One _ { x > b} ) \bP (\dif x)\\
& =& \frac { 1} { 2} \bP (\{ a\} ) + \frac { 1} { 2} \bP (\{ b\} ) + \bP ((a,b))\\
2023-05-10 18:56:36 +02:00
& =& \frac { F(b) + F(b-)} { 2} - \frac { F(a) - F(a-)} { 2}
\end { IEEEeqnarray*}
\end { refproof}
\begin { fact}
2023-07-14 22:07:36 +02:00
\label { fact:sincint}
2023-05-10 18:56:36 +02:00
\[
2023-07-12 15:25:25 +02:00
\int _ 0^ \infty \frac { \sin x} { x} \dif x = \frac { \pi } { 2}
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
where the LHS is an improper Riemann-integral.
Note that the LHS is not Lebesgue-integrable.
It follows that
\begin { IEEEeqnarray*} { rCl}
2023-07-13 00:06:30 +02:00
\lim _ { T \to \infty } \int _ 0^ T \frac { \sin (t(x-a))} { t} \dif t & =&
2023-05-10 18:56:36 +02:00
\begin { cases}
2023-07-13 00:06:30 +02:00
- \frac { \pi } { 2} & \text { if } x < a,\\
0 & \text { if } x = a,\\
\frac { \pi } { 2} & \text { if } x > a.
2023-05-10 18:56:36 +02:00
\end { cases}
\end { IEEEeqnarray*}
\end { fact}
\begin { theorem} % Theorem 3
\label { thm:lec10_ 3}
2023-07-12 16:39:58 +02:00
Let $ \bP \in M _ 1 ( \R ) $ such that $ \phi _ \bP \in L ^ 1 ( \lambda ) $ .
2023-05-10 18:56:36 +02:00
Then $ \bP $ has a continuous probability density given by
\[
2023-07-12 16:39:58 +02:00
f(x) = \frac { 1} { 2 \pi } \int _ { \R } e^ { -\i t x} \phi _ { \bP } (t) \dif t.
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
\end { theorem}
\begin { example}
\begin { itemize}
2023-07-12 16:39:58 +02:00
\item Let $ \bP = \delta _ { 0 } $ .
2023-05-10 18:56:36 +02:00
Then
\[
2023-07-13 00:06:30 +02:00
\phi _ { \bP } (t) = \int e^ { \i t x} \delta _ 0(\dif x) = e^ { \i t 0 } = 1
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
\item Let $ \bP = \frac { 1 } { 2 } \delta _ 1 + \frac { 1 } { 2 } \delta _ { - 1 } $ .
Then
\[
2023-07-12 16:39:58 +02:00
\phi _ { \bP } (t) = \frac { 1} { 2} e^ { \i t} + \frac { 1} { 2} e^ { - \i t} = \cos (t)
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
\end { itemize}
\end { example}
\begin { refproof} { thm:lec10_ 3}
2023-07-12 15:25:25 +02:00
Let $ f ( x ) \coloneqq \frac { 1 } { 2 \pi } \int _ { \R } e ^ { - \i t x } \phi ( t ) \dif t $ .
2023-05-10 18:56:36 +02:00
\begin { claim}
If $ x _ n \to x $ , then $ f ( x _ n ) \to f ( x ) $ .
\end { claim}
\begin { subproof}
2023-07-12 16:39:58 +02:00
Suppose that
$ e ^ { - \i t x _ n } \phi ( t ) \xrightarrow { n \to \infty } e ^ { - \i t x } \phi ( t ) $
for all $ t $ .
Since
2023-05-10 18:56:36 +02:00
\[
|e^ { -\i t x} \phi (t)| \le |\phi (t)|
2023-07-06 00:36:26 +02:00
\]
2023-07-12 16:39:58 +02:00
and $ \phi \in L ^ 1 $ ,
we get $ f ( x _ n ) \to f ( x ) $
2023-05-10 18:56:36 +02:00
by the dominated convergence theorem.
\end { subproof}
We'll show that for all $ a < b $ we have
\[
2023-07-12 16:39:58 +02:00
\bP \left ( (a,b] \right ) = \int _ a^ b f(x) \dif x.\label { thm10_ 3eq1}
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
Let $ F $ be the distribution function of $ \bP $ .
2023-07-28 03:45:37 +02:00
It is enough to prove \yaref { thm10_ 3eq1}
2023-05-10 18:56:36 +02:00
for all continuity points $ a $ and $ b $ of $ F $ .
We have
\begin { IEEEeqnarray*} { rCl}
2023-07-12 15:25:25 +02:00
RHS & \overset { \text { Fubini} } { =} & \frac { 1} { 2 \pi } \int _ { \R } \int _ { a} ^ b e^ { -\i t x} \phi (t) \dif x \dif t\\
& =& \frac { 1} { 2 \pi } \int _ \R \phi (t) \int _ a^ b e^ { -\i t x} \dif x \dif t\\
& =& \frac { 1} { 2\pi } \int _ { \R } \phi (t) \left ( \frac { e^ { -\i t b} - e^ { -\i t a} } { - \i t} \right ) \dif t\\
& \overset { \text { dominated convergence} } { =} & \lim _ { T \to \infty } \frac { 1} { 2\pi } \int _ { -T} ^ { T} \phi (t) \left ( \frac { e^ { -\i t b} - e^ { - \i t a} } { - \i t} \right ) \dif t
2023-05-10 18:56:36 +02:00
\end { IEEEeqnarray*}
2023-07-28 03:45:37 +02:00
By the \yaref { inversionformula} ,
the RHS is equal to $ F ( b ) - F ( a ) = \bP \left ( ( a,b ] \right ) $ .
2023-05-10 18:56:36 +02:00
\end { refproof}
However, Fourier analysis is not only useful for continuous probability density functions:
2023-07-28 03:45:37 +02:00
\begin { theorem} [Bochner's formula for the mass at a point]
\yalabel { Bochner's Formula for the Mass at a Point} { Bochner} { bochnersformula} % Theorem 4
2023-05-10 18:56:36 +02:00
Let $ \bP \in M _ 1 ( \lambda ) $ .
Then
\[
2023-07-13 00:38:46 +02:00
\forall x \in \R .~ \bP \left ( \{ x\} \right ) = \lim _ { T \to \infty } \frac { 1} { 2 T} \int _ { -T} ^ T e^ { -\i t x } \phi (t) \dif t.
2023-07-06 00:36:26 +02:00
\]
2023-05-10 18:56:36 +02:00
\end { theorem}
\begin { refproof} { bochnersformula}
We have
\begin { IEEEeqnarray*} { rCl}
2023-07-12 15:25:25 +02:00
RHS & =& \lim _ { T \to \infty } \frac { 1} { 2 T} \int _ { -T} ^ T e^ { -\i t x} \int _ { \R } e^ { \i t y} \bP (\dif y) \\
2023-07-13 00:06:30 +02:00
& \overset { \text { Fubini} } { =} &
\lim _ { T \to \infty } \frac { 1} { 2 T} \int _ \R \int _ { -T} ^ T
e^ { -\i t (y - x)} \dif t \bP (\dif y)\\
& =& \lim _ { T \to \infty } \frac { 1} { 2 T} \int _ \R \int _ { -T} ^ T
\cos (t(y-x)) + \underbrace { \i \sin (t (y-x))} _ { \text { odd} }
\dif t \bP (\dif y)\\
& =& \lim _ { T \to \infty } \frac { 1} { 2T} \int _ { \R }
\int _ { -T} ^ T \cos (t(y - x)) \dif t \bP (\dif y)\\
& =& \lim _ { T \to \infty } \frac { 1} { 2T} \int _ { \R }
2T \sinc (T(y-x))
\footnote { $ \sinc ( x ) = \begin { cases }
\frac { \sin (x)} { x} & \text { if } x \neq 0,\\
1 & \text { otherwise.}
\end { cases} $ } \bP ( \dif y ) \\
& \overset { \text { DCT} } { =} & \int _ { \R } \lim _ { T \to \infty }
\sinc (T(y-x)) \bP (\dif y)\\
& =& \bP (\{ x\} ).
2023-05-10 18:56:36 +02:00
\end { IEEEeqnarray*}
\end { refproof}
\begin { theorem} % Theorem 5
\label { thm:lec_ 10thm5}
Let $ \phi $ be the characteristic function of $ \bP \in M _ 1 ( \lambda ) $ .
Then
\begin { enumerate} [(a)]
2023-07-20 12:58:53 +02:00
\item $ \phi ( 0 ) = 1 $ , $ | \phi ( t ) | \le 1 $ , $ \phi ( - t ) = \overline { \phi ( t ) } $
and $ \phi ( \cdot ) $ is continuous.
2023-05-10 18:56:36 +02:00
\item $ \phi $ is a \vocab { positive definite function} ,
i.e.~
\[ \forall t _ 1 , \ldots , t _ n \in \R , ( c _ 1 , \ldots ,c _ n ) \in \C ^ n ~ \sum _ { j,k = 1 } ^ n c _ j \overline { c _ k } \phi ( t _ j - t _ k ) \ge 0
\]
2023-07-13 00:06:30 +02:00
Equivalently, the matrix $ ( \phi ( t _ j - t _ k ) ) _ { j,k } $ is positive definite.
2023-05-10 18:56:36 +02:00
\end { enumerate}
\end { theorem}
\begin { refproof} { thm:lec_ 10thm5}
Part (a) is obvious.
For part (b) we have:
\begin { IEEEeqnarray*} { rCl}
2023-07-12 15:25:25 +02:00
\sum _ { j,k} c_ j \overline { c_ k} \phi (t_ j - t_ k) & =& \sum _ { j,k} c_ j \overline { c_ k} \int _ \R e^ { \i (t_ j - t_ k) x} \bP (\dif x)\\
& =& \int _ { \R } \sum _ { j,k} c_ j \overline { c_ k} e^ { \i t_ j x} \overline { e^ { \i t_ k x} } \bP (\dif x)\\
& =& \int _ { \R } \sum _ { j,k} c_ j e^ { \i t_ j x} \overline { c_ k e^ { \i t_ k x} } \bP (\dif x)\\
2023-05-10 18:56:36 +02:00
& =& \int _ { \R } \left | \sum _ { l} c_ l e^ { \i t_ l x} \right |^ 2 \ge 0
\end { IEEEeqnarray*}
\end { refproof}
2023-07-28 03:45:37 +02:00
\begin { theorem} [Bochner's theorem]
\yalabel { Bochner's Theorem for Positive Definite Functions} { Bochner's Theorem} { thm:bochner} %
The converse to \yaref { thm:lec_ 10thm5} holds, i.e.~any
$ \phi : \R \to \C $ satisfying (a) and (b) of \yaref { thm:lec_ 10thm5}
2023-07-06 00:36:26 +02:00
must be the Fourier transform of a probability measure $ \bP $
2023-05-10 18:56:36 +02:00
on $ ( \R , \cB ( \R ) ) $ .
\end { theorem}
2023-07-28 03:45:37 +02:00
Unfortunately, we won't prove \yaref { thm:bochner} in this lecture.
2023-05-10 18:56:36 +02:00
\begin { definition} [Convergence in distribution / weak convergence]
2023-07-07 17:42:38 +02:00
\label { def:weakconvergence}
2023-07-13 00:38:46 +02:00
We say that $ \bP _ n \in M _ 1 ( \R ) $ \vocab [Convergence!weak] { converges weakly} towards $ \bP \in M _ 1 ( \R ) $ (notation: $ \bP _ n \implies \bP $ ), iff
2023-05-10 18:56:36 +02:00
\[
2023-07-11 23:38:47 +02:00
\forall f \in C_ b(\R )~ \int f \dif \bP _ n \to \int f \dif \bP .
2023-05-10 18:56:36 +02:00
\]
Where
\[
C_ b(\R ) \coloneqq \{ f: \R \to \R \text { continuous and bounded} \}
\]
In analysis, this is also known as $ \text { weak } ^ \ast $ convergence.
\end { definition}
\begin { remark}
2023-07-11 23:38:47 +02:00
This notion of convergence makes $ M _ 1 ( \R ) $ a separable metric space.
We can construct a metric on $ M _ 1 ( \R ) $ that turns $ M _ 1 ( \R ) $ into a complete
2023-05-10 18:56:36 +02:00
and separable metric space:
Consider the sets
\[
2023-07-11 23:38:47 +02:00
\{ \bP \in M_ 1(\R ): \forall i=1,\ldots ,n ~ \int f \dif \bP - \int f_ i \dif \bP < \epsilon \}
2023-05-10 18:56:36 +02:00
\]
for any $ f,f _ 1 , \ldots , f _ n \in C _ b ( \R ) $ .
These sets form a basis for the topology on $ M _ 1 ( \R ) $ .
More of this will follow later.
\end { remark}
\begin { example}
\begin { itemize}
\item Let $ \bP _ n = \delta _ { \frac { 1 } { n } } $ .
2023-07-11 23:38:47 +02:00
Then $ \int f \dif \bP _ n = f ( \frac { 1 } { n } ) \to f ( 0 ) = \int f d \delta _ 0 $
2023-05-10 18:56:36 +02:00
for any continuous, bounded function $ f $ .
Hence $ \bP _ n \to \delta _ 0 $ .
\item $ \bP _ n \coloneqq \delta _ n $ does not converge weakly,
as for example
\[
2023-07-11 23:38:47 +02:00
\int \cos (\pi x) \dif \bP _ n(x)
2023-05-10 18:56:36 +02:00
\]
does not converge.
\item $ \bP _ n \coloneqq \frac { 1 } { n } \delta _ n + ( 1 - \frac { 1 } { n } ) \delta _ 0 $ .
Let $ f \in C _ b ( \R ) $ arbitrary.
Then
\[
2023-07-11 23:38:47 +02:00
\int f \dif \bP _ n = \frac { 1} { n} (n) + (1 - \frac { 1} { n} ) f(0) \to f(0)
2023-05-10 18:56:36 +02:00
\]
since $ f $ is bounded.
Hence $ \bP _ n \implies \delta _ 0 $ .
\item $ \bP _ n \coloneqq \frac { 1 } { \sqrt { 2 \pi n } } e ^ { - \frac { x ^ 2 } { 2 n } } $ .
2023-07-13 00:06:30 +02:00
This ``converges'' towards the $ 0 $ -measure,
which is not a probability measure.
Hence $ \bP _ n $ does not converge weakly.
2023-05-10 18:56:36 +02:00
(Exercise) % TODO
\end { itemize}
\end { example}
\begin { definition}
We say that a series of random variables $ X _ n $
\vocab [Convergence!in distribution] { converges in distribution}
2023-07-15 02:00:04 +02:00
to $ X $ (notation: $ X _ n \xrightarrow { \text { d } } X $ ), iff
2023-05-10 18:56:36 +02:00
$ \bP _ n \implies \bP $ , where $ \bP _ n $ is the distribution of $ X _ n $
and $ \bP $ is the distribution of $ X $ .
\end { definition}
2023-07-13 00:38:46 +02:00
It is easy to see, that this is equivalent to $ \bE [ f ( X _ n ) ] \to \bE [ f ( X ) ] $
for all $ f \in C _ b ( \R ) $ .
2023-05-10 18:56:36 +02:00
\begin { example}
Let $ X _ n \coloneqq \frac { 1 } { n } $
and $ F _ n $ the distribution function, i.e.~$ F _ n = \One _ { [ \frac { 1 } { n } , \infty ) } $ .
Then $ \bP _ n = \delta _ { \frac { 1 } { n } } \implies \delta _ 0 $
which is the distribution of $ X \equiv 0 $ .
But $ F _ n ( 0 ) \centernot \to F ( 0 ) $ .
\end { example}
2023-05-23 17:10:43 +02:00
\begin { theorem} % Theorem 1
\label { lec10_ thm1}
2023-07-15 02:00:04 +02:00
$ X _ n \xrightarrow { \text { d } } X $ iff
2023-05-10 18:56:36 +02:00
$ F _ n ( t ) \to F ( t ) $ for all continuity points $ t $ of $ F $ .
\end { theorem}
2023-07-14 22:07:36 +02:00
% \begin{proof}\footnote{This proof was not done in the lecture,
% but can be found in the official notes from lecture 13}
% ``$\implies$''
% Suppose $\mu_n \implies \mu$.
% Let $F_n$ and $F$ denote the respective density functions.
% Fix a continuity point $x_0 \in \R$ of $F$.
% We'll show
% \[
% \limsup_{n \to \infty} F_n(x_0) \le F(x_0) + \epsilon
% \]
% and
% \[
% \liminf_{ \to \infty} F_n(x_0) \ge F(x_0) - \epsilon
% \]
% for all $\epsilon > 0$.
% Fix some $\epsilon > 0$.
% Choose $\delta > 0$ such that $F(x_0 + \delta) < F(x_0) + \epsilon$
% and define
% \[
% g(x) \coloneqq \begin{cases}
% 1 &\text{if } x \le x_0,\\
% 1 - \frac{1}{\delta}(x - x_0)&
% \text{if } x \in (x_0, x_0 + \delta],\\
% 0 &\text{if } x \ge x_0 + \delta.
% \end{cases}
% \]
% Since $g$ is continuous and bounded, we have
% \[
% \int g \dif \mu_n \to \int g \dif \mu.
% \]
% It is clear that $\One_{(-\infty, x_0]} \le g$.
% Hence
% \[
% F_n(x_0) = \int \One_{(-\infty, x_0]} \dif \mu_n \le \int g \dif \mu_n.
% \]
% It follows that
% \begin{IEEEeqnarray*}{rCl}
% \limsup_{n} F_n(x_0)
% &\le& \limsup_n \int g \dif \mu_n\\
% &=& \lim_n \int g \dif \mu_n\\
% &=& \int g \dif \mu\\
% &\overset{g \le \One_{(-\infty, x + \delta]}}{=}& F(x + \delta)\\
% &=& F(x) + \epsilon.
% \end{IEEEeqnarray*}
% The assertion about $\liminf_{n \to \infty} F_n(x_0)$
% follows by a similar argument.
%
% ``$\impliedby$''
% Assume that $F_n(x) \to F(x)$ at all continuity points of $F$.
% We need to show
% \[
% \fgrall g \in C_b(\R) .~\int g \dif \mu_n \to \int g \dif \mu.
% \]
% Let $C$ denote the set of continuity points of $f$.
% We apply measure theoretic induction:
% \begin{itemize}
% \item For $g = \One_{(a,b]}$, $a< b \in C$,
% we have
% \[\int g \dif \mu_n = F_n(b) - F_n(a) \to F(b) - F(a) = \int g \dif \mu.\]
% \item For $g = \sum_{i} \alpha_i \One_{(a_i, b_i]}$,
% $a_i < b_i \in C$,
% we get $\int g \dif \mu_n \to \int g \dif \mu$
% by the same argument.
% \item % TODO continue from Lec13 page 21 (iii)
% \end{itemize}
%
% \end{proof}
2023-07-28 03:45:37 +02:00
\begin { theorem} [Levy's continuity theorem]
\yalabel { Levy's Continuity Theorem} { Levy} { levycontinuity}
2023-05-23 17:10:43 +02:00
% Theorem 2
2023-07-15 02:00:04 +02:00
$ X _ n \xrightarrow { \text { d } } X $ iff
2023-05-10 18:56:36 +02:00
$ \phi _ { X _ n } ( t ) \to \phi ( t ) $ for all $ t \in \R $ .
\end { theorem}
We will assume these two theorems for now and derive the central limit theorem.
The theorems will be proved later.