some diagrams
This commit is contained in:
parent
0930ed6c95
commit
19f81a9a3c
7 changed files with 90 additions and 31 deletions
|
@ -72,6 +72,16 @@ The converse to this fact is also true:
|
||||||
1 & x \in (1,\infty).\\
|
1 & x \in (1,\infty).\\
|
||||||
\end{cases}
|
\end{cases}
|
||||||
\]
|
\]
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[samples=1000, xmin=-1, xmax=2, width=10cm, height=5cm]
|
||||||
|
\addplot[] {and(x>0,x<=1) * x + (x>1)};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
\item \vocab{Exponential distribution}:
|
\item \vocab{Exponential distribution}:
|
||||||
\[
|
\[
|
||||||
F(x) = \begin{cases}
|
F(x) = \begin{cases}
|
||||||
|
@ -79,10 +89,19 @@ The converse to this fact is also true:
|
||||||
0 & x < 0.
|
0 & x < 0.
|
||||||
\end{cases}
|
\end{cases}
|
||||||
\]
|
\]
|
||||||
|
\begin{figure}[H]
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[samples=1000, smooth, width=10cm, height=5cm, xmin=-2, xmax=5]
|
||||||
|
\addplot[] {(x > 0) * (1 - exp( - 5 * x))};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{figure}
|
||||||
\item \vocab{Gaussian distribution}:
|
\item \vocab{Gaussian distribution}:
|
||||||
\[
|
\[
|
||||||
\Phi(x) \coloneqq \frac{1}{\sqrt{2\pi}} \int_{-\infty}^x e^{-\frac{y^2}{2}} dy.
|
\Phi(x) \coloneqq \frac{1}{\sqrt{2\pi}} \int_{-\infty}^x e^{-\frac{y^2}{2}} dy.
|
||||||
\]
|
\]
|
||||||
|
|
||||||
\item $\bP[X = 1] = \bP[X = -1] = \frac{1}{2}$ :
|
\item $\bP[X = 1] = \bP[X = -1] = \frac{1}{2}$ :
|
||||||
\[
|
\[
|
||||||
F(x) = \begin{cases}
|
F(x) = \begin{cases}
|
||||||
|
@ -91,5 +110,14 @@ The converse to this fact is also true:
|
||||||
1 & x \in [1, \infty).
|
1 & x \in [1, \infty).
|
||||||
\end{cases}
|
\end{cases}
|
||||||
\]
|
\]
|
||||||
|
\begin{figure}[H]
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[samples=1000, width=10cm, height=5cm]
|
||||||
|
\addplot[] {and(x >= -1, x < 1) * 0.5 + (x >= 1)};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
\end{example}
|
\end{example}
|
||||||
|
|
|
@ -70,8 +70,8 @@ First, we need to prove some properties of characteristic functions.
|
||||||
\begin{subproof}
|
\begin{subproof}
|
||||||
For $y \ge 0$, we have
|
For $y \ge 0$, we have
|
||||||
\begin{IEEEeqnarray*}{rCl}
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
|e^{\i y} - 1| &=& |\int_0^y \cos(s) \d s + \i \int_0^y \sin(s) \d s|\\
|
|e^{\i y} - 1| &=& |\int_0^y \cos(s) \dif s + \i \int_0^y \sin(s) \dif s|\\
|
||||||
&=& |\int_0^y e^{\i s} \d s|\\
|
&=& |\int_0^y e^{\i s} \dif s|\\
|
||||||
&\overset{\text{Jensen}}{\le}& \int_0^y |e^{\i s}| ds = y.
|
&\overset{\text{Jensen}}{\le}& \int_0^y |e^{\i s}| ds = y.
|
||||||
\end{IEEEeqnarray*}
|
\end{IEEEeqnarray*}
|
||||||
For $y < 0$, we have $|e^{\i y} - 1| = |e^{-\i y} - 1|$
|
For $y < 0$, we have $|e^{\i y} - 1| = |e^{-\i y} - 1|$
|
||||||
|
@ -122,21 +122,21 @@ First, we need to prove some properties of characteristic functions.
|
||||||
\begin{refproof}{lec12_2}
|
\begin{refproof}{lec12_2}
|
||||||
We have
|
We have
|
||||||
\begin{IEEEeqnarray*}{rCl}
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
\phi_X(t) &=& \frac{1}{\sqrt{2 \pi} } \int_{-\infty}^\infty e^{\i t x} e^{-\frac{x^2}{2}} \d x\\
|
\phi_X(t) &=& \frac{1}{\sqrt{2 \pi} } \int_{-\infty}^\infty e^{\i t x} e^{-\frac{x^2}{2}} \dif x\\
|
||||||
&=& \frac{1}{\sqrt{2 \pi} } \int_{-\infty}^\infty (\cos(tx) + \i \sin(tx)) e^{-\frac{x^2}{2}} \d x\\
|
&=& \frac{1}{\sqrt{2 \pi} } \int_{-\infty}^\infty (\cos(tx) + \i \sin(tx)) e^{-\frac{x^2}{2}} \dif x\\
|
||||||
&=& \frac{1}{\sqrt{2 \pi} } \int_{-\infty}^\infty \cos(t x) e^{-\frac{x^2}{2}} \d x,\\
|
&=& \frac{1}{\sqrt{2 \pi} } \int_{-\infty}^\infty \cos(t x) e^{-\frac{x^2}{2}} \dif x,\\
|
||||||
\end{IEEEeqnarray*}
|
\end{IEEEeqnarray*}
|
||||||
since, as $x \mapsto \sin(tx)$ is odd and $x \mapsto e^{-\frac{x^2}{2}}$
|
since, as $x \mapsto \sin(tx)$ is odd and $x \mapsto e^{-\frac{x^2}{2}}$
|
||||||
is even, their product is odd, wich gives that the integral is $0$.
|
is even, their product is odd, wich gives that the integral is $0$.
|
||||||
|
|
||||||
\begin{IEEEeqnarray*}{rCl}
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
\phi'_X(t) &=& \bE[\i X e^{\i t X}] \\
|
\phi'_X(t) &=& \bE[\i X e^{\i t X}] \\
|
||||||
&=& \frac{1}{\sqrt{2 \pi}} \int_{-\infty}^\infty \i x \left( \cos(t x) + \i \sin(tx) \right) e^{-\frac{x^2}{2}} \d x\\
|
&=& \frac{1}{\sqrt{2 \pi}} \int_{-\infty}^\infty \i x \left( \cos(t x) + \i \sin(tx) \right) e^{-\frac{x^2}{2}} \dif x\\
|
||||||
&=& \frac{1}{\sqrt{2 \pi}} \left( \i \int_{-\infty}^\infty x \cos(tx) \right) e^{-\frac{x^2}{2}} \d x\\
|
&=& \frac{1}{\sqrt{2 \pi}} \left( \i \int_{-\infty}^\infty x \cos(tx) \right) e^{-\frac{x^2}{2}} \dif x\\
|
||||||
&=& \frac{1}{\sqrt{2 \pi} } \left(\underbrace{\i \int_{-\infty}^\infty x \cos(tx) e^{-\frac{x^2}{2}} \d x}_{= 0} + \int_{-\infty}^\infty - \sin(t x) e^{-\frac{x^2}{2}} \d x\right)\\
|
&=& \frac{1}{\sqrt{2 \pi} } \left(\underbrace{\i \int_{-\infty}^\infty x \cos(tx) e^{-\frac{x^2}{2}} \dif x}_{= 0} + \int_{-\infty}^\infty - \sin(t x) e^{-\frac{x^2}{2}} \dif x\right)\\
|
||||||
&=& \int_{-\infty}^\infty \underbrace{\sin(tx)}_{y(x)} \underbrace{ \frac{1}{\sqrt{2 \pi} }(-x) e^{\i\frac{x^2}{2}}}_{f'(x)} \d x\\
|
&=& \int_{-\infty}^\infty \underbrace{\sin(tx)}_{y(x)} \underbrace{ \frac{1}{\sqrt{2 \pi} }(-x) e^{\i\frac{x^2}{2}}}_{f'(x)} \dif x\\
|
||||||
&=& \underbrace{[ \sin(tx) \frac{1}{\sqrt{2 \pi} e^{-\frac{x^2}{2}}}]_{x=-\infty}^\infty}_{=0}
|
&=& \underbrace{[ \sin(tx) \frac{1}{\sqrt{2 \pi} e^{-\frac{x^2}{2}}}]_{x=-\infty}^\infty}_{=0}
|
||||||
- \int_{-\infty}^\infty t \cos(tx) \frac{1}{\sqrt{2 \pi} } e^{-\frac{x^2}{2}} \d x\\
|
- \int_{-\infty}^\infty t \cos(tx) \frac{1}{\sqrt{2 \pi} } e^{-\frac{x^2}{2}} \dif x\\
|
||||||
&=& -t \phi_X(t)
|
&=& -t \phi_X(t)
|
||||||
\end{IEEEeqnarray*}
|
\end{IEEEeqnarray*}
|
||||||
Thus, for all $t \in \R$
|
Thus, for all $t \in \R$
|
||||||
|
@ -172,9 +172,10 @@ Now, we can finally prove the CLT:
|
||||||
Let $t \in \R$.
|
Let $t \in \R$.
|
||||||
Then
|
Then
|
||||||
\begin{IEEEeqnarray*}{rCl}
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
\phi_{V_n}(t) = \bE[e^{\i t Y_n}] = \bE[e^{\i t \left( \frac{Y_1 + \ldots + Y_n}{\sqrt{n} } \right) }] \\
|
\phi_{V_n}(t) &=& \bE[e^{\i t Y_n}]\\
|
||||||
&=& \bE[e^{\i t \frac{Y_1}{\sqrt{n}}}] \cdot \ldots \cdot \bE[e^{\i t \frac{Y_n}{\sqrt{n} }}]\\
|
&=& \bE[e^{\i t \left( \frac{Y_1 + \ldots + Y_n}{\sqrt{n} } \right) }] \\
|
||||||
&=& \left( \phi(\frac{t}{\sqrt{n} } \right)^n.
|
&=& \bE\left[e^{\i t \frac{Y_1}{\sqrt{n}}}\right] \cdot \ldots \cdot \bE\left[e^{\i t \frac{Y_n}{\sqrt{n} }}\right]\\
|
||||||
|
&=& \left( \phi\left(\frac{t}{\sqrt{n} }\right) \right)^n.
|
||||||
\end{IEEEeqnarray*}
|
\end{IEEEeqnarray*}
|
||||||
where $\phi(t) \coloneqq \phi_{Y_1}(t)$.
|
where $\phi(t) \coloneqq \phi_{Y_1}(t)$.
|
||||||
|
|
||||||
|
@ -182,8 +183,9 @@ Now, we can finally prove the CLT:
|
||||||
\begin{IEEEeqnarray*}{rCl}
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
\phi(s) &=& \phi(0) + \phi'(0) s + \frac{\phi''(0)}{2} s^2 + o(s^2), \text{ as $s \to 0$}\\
|
\phi(s) &=& \phi(0) + \phi'(0) s + \frac{\phi''(0)}{2} s^2 + o(s^2), \text{ as $s \to 0$}\\
|
||||||
&=& 1 - \underbrace{\i \bE[Y_1] s}_{=0}
|
&=& 1 - \underbrace{\i \bE[Y_1] s}_{=0}
|
||||||
- \bE[Y_1^2] \frac{s^2}{2} + o(s^2)\\
|
- \bE[Y_1^2] \frac{s^2}{2} + o(s^2), \text{ as $s \to 0$}
|
||||||
&=& 1 - \frac{s^2}{2} + o(s^2), \text{as $s \to $}
|
\\
|
||||||
|
&=& 1 - \frac{s^2}{2} + o(s^2), \text{ as $s \to 0$}
|
||||||
\end{IEEEeqnarray*}
|
\end{IEEEeqnarray*}
|
||||||
|
|
||||||
Setting $s \coloneqq \frac{t}{\sqrt{n}}$ we obtain
|
Setting $s \coloneqq \frac{t}{\sqrt{n}}$ we obtain
|
||||||
|
@ -194,7 +196,7 @@ Now, we can finally prove the CLT:
|
||||||
|
|
||||||
\[
|
\[
|
||||||
\phi_{V_n}(t) = \left( \phi\left( \frac{t}{\sqrt{n} } \right) \right)^n =
|
\phi_{V_n}(t) = \left( \phi\left( \frac{t}{\sqrt{n} } \right) \right)^n =
|
||||||
(1 - \frac{t^2}{2 n } + o\left( \frac{t^2}{n} \right)^n \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}},
|
1 - \frac{t^2}{2 n } + o\left( \frac{t^2}{n} \right)^n \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}},
|
||||||
\]
|
\]
|
||||||
where we have used the following:
|
where we have used the following:
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,10 @@ if $X_1, X_2,\ldots$ are i.i.d.~with $ \mu = \bE[X_1]$,
|
||||||
\label{lindebergclt}
|
\label{lindebergclt}
|
||||||
Assume $X_1, X_2, \ldots,$ are independent (but not necessarily identically distributed) with $\mu_i = \bE[X_i] < \infty$ and $\sigma_i^2 = \Var(X_i) < \infty$.
|
Assume $X_1, X_2, \ldots,$ are independent (but not necessarily identically distributed) with $\mu_i = \bE[X_i] < \infty$ and $\sigma_i^2 = \Var(X_i) < \infty$.
|
||||||
Let $S_n = \sqrt{\sum_{i=1}^{n} \sigma_i^2}$
|
Let $S_n = \sqrt{\sum_{i=1}^{n} \sigma_i^2}$
|
||||||
and assume that $\lim_{n \to \infty} \frac{1}{S_n^2} \bE\left[(X_i - \mu_i)^2 \One_{|X_i - \mu_i| > \epsilon \S_n}\right] = 0$ for all $\epsilon > 0$
|
and assume that
|
||||||
(\vocab{Lindeberg condition}, ``The truncated variance is negligible compared to the variance.'').
|
\[\lim_{n \to \infty} \frac{1}{S_n^2} \bE\left[(X_i - \mu_i)^2 \One_{|X_i - \mu_i| > \epsilon \S_n}\right] = 0\]
|
||||||
|
for all $\epsilon > 0$
|
||||||
|
(\vocab{Lindeberg condition}\footnote{``The truncated variance is negligible compared to the variance.''}).
|
||||||
|
|
||||||
Then the CLT holds, i.e.~
|
Then the CLT holds, i.e.~
|
||||||
\[
|
\[
|
||||||
|
@ -94,16 +96,26 @@ A generalized version of \autoref{levycontinuity} is the following:
|
||||||
\end{example}
|
\end{example}
|
||||||
|
|
||||||
\begin{example}
|
\begin{example}
|
||||||
Suppose $C$ is a random variable which is Cauchy distributed, i.e.~$C$
|
Suppose $C$ is a random variable which is \vocab[Cauchy distribution]{Cauchy distributed}, i.e.~$C$
|
||||||
has probability distribution $f_C(x) = \frac{1}{\pi} \frac{1}{1 + x^2}$.
|
has probability distribution $f_C(x) = \frac{1}{\pi} \frac{1}{1 + x^2}$.
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\centering
|
||||||
|
\begin{tikzpicture}
|
||||||
|
\begin{axis}[samples=100, smooth]
|
||||||
|
\addplot[] { (1/3.14159265358979323846) * (1 / ( 1 + x * x))};
|
||||||
|
\end{axis}
|
||||||
|
\end{tikzpicture}
|
||||||
|
\caption{Probability density function of $C$}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
We know that $\bE[|C|] = \infty$.
|
We know that $\bE[|C|] = \infty$.
|
||||||
|
|
||||||
We have $\phi_C(t) = \bE[e^{\i t C}] = e^{-|t|}$.
|
We have $\phi_C(t) = \bE[e^{\i t C}] = e^{-|t|}$.
|
||||||
Suppose $C_1, C_2, \ldots, C_n$ are i.i.d.~Cauchy distributed
|
Suppose $C_1, C_2, \ldots, C_n$ are i.i.d.~Cauchy distributed
|
||||||
and let $S_n \coloneqq C_1 + \ldots + C_n$.
|
and let $S_n \coloneqq C_1 + \ldots + C_n$.
|
||||||
|
|
||||||
Exercise: $\phi_{S_n}(t) = e^{-|t|} = \phi_{C_1}(t)$, thus $S_n \sim C$.
|
Exercise: $\phi_{\frac{S_n}{n}}(t) = e^{-|t|} = \phi_{C_1}(t)$, thus $\frac{S_n}{n} \sim C$.
|
||||||
\end{example}
|
\end{example}
|
||||||
|
|
||||||
We will prove \autoref{levycontinuity} assuming
|
We will prove \autoref{levycontinuity} assuming
|
||||||
|
@ -229,7 +241,7 @@ We still need to show that $\mu_n \implies \mu$.
|
||||||
\begin{subproof}
|
\begin{subproof}
|
||||||
\todo{in the notes}
|
\todo{in the notes}
|
||||||
\end{subproof}
|
\end{subproof}
|
||||||
Assume $\mu_n$ does not converge to $\mu$.
|
Assume that $\mu_n$ does not converge to $\mu$.
|
||||||
By \autoref{lec10_thm1}, pick a continuity point $x_0$ of $F$,
|
By \autoref{lec10_thm1}, pick a continuity point $x_0$ of $F$,
|
||||||
such that $F_n(x_0) \not\to F(x_0)$.
|
such that $F_n(x_0) \not\to F(x_0)$.
|
||||||
Pick $\delta > 0$ and a subsequence $F_{n_1}(x_0), F_{n_2}(x_0), \ldots$
|
Pick $\delta > 0$ and a subsequence $F_{n_1}(x_0), F_{n_2}(x_0), \ldots$
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
The SLLN follows from the claim.
|
The SLLN follows from the claim.
|
||||||
\end{refproof}
|
\end{refproof}
|
||||||
|
|
||||||
We need the following inequality:
|
We need the fol]
|
||||||
\begin{theorem}[Kolmogorov's inequality]
|
\begin{theorem}[Kolmogorov's inequality]
|
||||||
If $X_1,\ldots, X_n$ are independent with $\bE[X_i] = 0$
|
If $X_1,\ldots, X_n$ are independent with $\bE[X_i] = 0$
|
||||||
and $\Var(X_i) = \sigma_i^2$, then
|
and $\Var(X_i) = \sigma_i^2$, then
|
||||||
|
@ -41,7 +41,8 @@ We need the following inequality:
|
||||||
|
|
||||||
We have
|
We have
|
||||||
\begin{IEEEeqnarray*}{rCl}
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
\int_{A_i} (\underbrace{X_1 + \ldots + X_i}_C + \underbrace{X_{i+1} + \ldots + X_n}_D)^2 d \bP &=& \int_{A_i} C^2 d\bP + \underbrace{\int_{A_i} D^2 d \bP}_{\ge 0} + 2 \int_{A_i} CD d\bP\\
|
&&\int_{A_i} (\underbrace{X_1 + \ldots + X_i}_C + \underbrace{X_{i+1} + \ldots + X_n}_D)^2 d \bP\\
|
||||||
|
&=& \int_{A_i} C^2 d\bP + \underbrace{\int_{A_i} D^2 d \bP}_{\ge 0} + 2 \int_{A_i} CD d\bP\\
|
||||||
&\ge & \int_{A_i} \underbrace{C^2}_{\ge \epsilon^2} d \bP + 2 \int \underbrace{\One_{A_i} (X_1 + \ldots + X_i)}_E \underbrace{(X_{i+1} + \ldots + X_n)}_D d \bP\\
|
&\ge & \int_{A_i} \underbrace{C^2}_{\ge \epsilon^2} d \bP + 2 \int \underbrace{\One_{A_i} (X_1 + \ldots + X_i)}_E \underbrace{(X_{i+1} + \ldots + X_n)}_D d \bP\\
|
||||||
&\ge& \int_{A_i} \epsilon^2 d\bP
|
&\ge& \int_{A_i} \epsilon^2 d\bP
|
||||||
\end{IEEEeqnarray*}
|
\end{IEEEeqnarray*}
|
||||||
|
|
|
@ -85,7 +85,7 @@ for any $k \in \N$.
|
||||||
This follows from the independence of the $X_i$.
|
This follows from the independence of the $X_i$.
|
||||||
It is
|
It is
|
||||||
\[
|
\[
|
||||||
\sigma\left( X_1,\ldots,X_n \right) = \sigma\left(\underbrace{\{X_{1}^{-1}(B_1) \cap \ldots \cap X_n^{-1}(B_n)\} | B_1,\ldots,B_n \in \cB(\R)\}}_{\text{\reflectbox{$\coloneqq$}}\cA} \right).
|
\sigma\left( X_1,\ldots,X_n \right) = \sigma\left(\underbrace{\{X_{1}^{-1}(B_1) \cap \ldots \cap X_n^{-1}(B_n) | B_1,\ldots,B_n \in \cB(\R)\}}_{\text{\reflectbox{$\coloneqq$}}\cA} \right).
|
||||||
\]
|
\]
|
||||||
$\cA$ is a semi-algebra, since
|
$\cA$ is a semi-algebra, since
|
||||||
\begin{enumerate}[(i)]
|
\begin{enumerate}[(i)]
|
||||||
|
|
|
@ -64,9 +64,11 @@ Why is $\sqrt{n}$ the right order? (Handwavey argument)
|
||||||
|
|
||||||
Suppose $X_1, X_2,\ldots$ are i.i.d. $\cN(0,1)$.
|
Suppose $X_1, X_2,\ldots$ are i.i.d. $\cN(0,1)$.
|
||||||
The mean of the l.h.s.~is $0$ and for the variance we get
|
The mean of the l.h.s.~is $0$ and for the variance we get
|
||||||
\[
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
\Var(\frac{X_1 + \ldots + X_n - n \bE(X_1)}{\sqrt{n} }) = \Var\left( \frac{X_1+ \ldots + X_n}{\sqrt{n} } \right) = \frac{1}{n} \left( \Var(X_1) + \ldots + \Var(X_n) \right) = 1
|
\Var(\frac{X_1 + \ldots + X_n - n \bE(X_1)}{\sqrt{n} }) &=& \Var\left( \frac{X_1+ \ldots + X_n}{\sqrt{n} } \right)\\
|
||||||
\]
|
&=& \frac{1}{n} \left( \Var(X_1) + \ldots + \Var(X_n) \right) = 1
|
||||||
|
\end{IEEEeqnarray*}
|
||||||
|
|
||||||
For the r.h.s.~we get a mean of $0$ and a variance of $1$.
|
For the r.h.s.~we get a mean of $0$ and a variance of $1$.
|
||||||
So, to determine what $(\ast)$ could mean, it is necessary that $\sqrt{n}$
|
So, to determine what $(\ast)$ could mean, it is necessary that $\sqrt{n}$
|
||||||
is the right scaling.
|
is the right scaling.
|
||||||
|
@ -77,8 +79,17 @@ This notion of convergence will be defined in terms of characteristic functions
|
||||||
|
|
||||||
\subsection{Characteristic functions and Fourier transform}
|
\subsection{Characteristic functions and Fourier transform}
|
||||||
|
|
||||||
|
\begin{definition}
|
||||||
Consider $(\R, \cB(\R), \bP)$.
|
Consider $(\R, \cB(\R), \bP)$.
|
||||||
For every $t \in \R$ define a function $\phi(t) \coloneqq \phi_\bP(t) \coloneqq \int_{\R} e^{\i t x} \bP(dx)$.
|
The \vocab{characteristic function} of $\bP$ is defined as
|
||||||
|
\begin{IEEEeqnarray*}{rCl}
|
||||||
|
\phi_{\bP}: \R &\longrightarrow & \C \\
|
||||||
|
t &\longmapsto & \int_{\R} e^{\i t x} \bP(\dif x).
|
||||||
|
\end{IEEEeqnarray*}
|
||||||
|
\end{definition}
|
||||||
|
\begin{abuse}
|
||||||
|
$\phi_\bP(t)$ will often be abbreviated as $\phi(t)$.
|
||||||
|
\end{abuse}
|
||||||
We have
|
We have
|
||||||
\[
|
\[
|
||||||
\phi(t) = \int_{\R} \cos(tx) \bP(dx) + \i \int_{\R} \sin(tx) \bP(dx).
|
\phi(t) = \int_{\R} \cos(tx) \bP(dx) + \i \int_{\R} \sin(tx) \bP(dx).
|
||||||
|
@ -88,7 +99,6 @@ We have
|
||||||
\item We have $\phi(0) = 1$.
|
\item We have $\phi(0) = 1$.
|
||||||
\item $|\phi(t)| \le \int_{\R} |e^{\i t x} | \bP(dx) = 1$.
|
\item $|\phi(t)| \le \int_{\R} |e^{\i t x} | \bP(dx) = 1$.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
We call $\phi_{\bP}$ the \vocab{characteristic function} of $\bP$.
|
|
||||||
|
|
||||||
\begin{remark}
|
\begin{remark}
|
||||||
Suppose $(\Omega, \cF, \bP)$ is an arbitrary probability space and
|
Suppose $(\Omega, \cF, \bP)$ is an arbitrary probability space and
|
||||||
|
|
|
@ -23,6 +23,10 @@
|
||||||
\usepackage{float}
|
\usepackage{float}
|
||||||
%\usepackage{algorithmicx}
|
%\usepackage{algorithmicx}
|
||||||
|
|
||||||
|
\usepackage{pgfplots}
|
||||||
|
\pgfplotsset{compat = newest}
|
||||||
|
|
||||||
|
|
||||||
\newcounter{subsubsubsection}[subsubsection]
|
\newcounter{subsubsubsection}[subsubsection]
|
||||||
\renewcommand\thesubsubsubsection{\thesubsubsection.\arabic{subsubsubsection}}
|
\renewcommand\thesubsubsubsection{\thesubsubsection.\arabic{subsubsubsection}}
|
||||||
\newcommand\subsubsubsection[1]
|
\newcommand\subsubsubsection[1]
|
||||||
|
@ -97,3 +101,5 @@
|
||||||
\DeclareSimpleMathOperator{Bin}
|
\DeclareSimpleMathOperator{Bin}
|
||||||
\DeclareSimpleMathOperator{Ber}
|
\DeclareSimpleMathOperator{Ber}
|
||||||
\DeclareSimpleMathOperator{Exp}
|
\DeclareSimpleMathOperator{Exp}
|
||||||
|
|
||||||
|
\newcommand*\dif{\mathop{}\!\mathrm{d}}
|
||||||
|
|
Loading…
Reference in a new issue