lecture 13 part 1

This commit is contained in:
Josia Pietsch 2023-05-23 17:10:43 +02:00
parent ee6f4fe851
commit ee61e963c1
Signed by: jrpie
GPG key ID: E70B571D66986A2D
3 changed files with 123 additions and 15 deletions

View file

@ -64,7 +64,7 @@ where $\mu = \bP X^{-1}$.
Let $\bP \in M_1(\R)$ such that $\phi_\R \in L^1(\lambda)$. Let $\bP \in M_1(\R)$ such that $\phi_\R \in L^1(\lambda)$.
Then $\bP$ has a continuous probability density given by Then $\bP$ has a continuous probability density given by
\[ \[
f(x) = \frac{1}{2 \pi} \int_{\R} e^{-\i t x} \phi_{\R(t) dt}. f(x) = \frac{1}{2 \pi} \int_{\R} e^{-\i t x} \phi_{\R}(t) dt.
\] \]
\end{theorem} \end{theorem}
@ -247,11 +247,13 @@ Unfortunately, we won't prove \autoref{bochnersthm} in this lecture.
which is the distribution of $X \equiv 0$. which is the distribution of $X \equiv 0$.
But $F_n(0) \centernot\to F(0)$. But $F_n(0) \centernot\to F(0)$.
\end{example} \end{example}
\begin{theorem} \begin{theorem} % Theorem 1
\label{lec10_thm1}
$X_n \xrightarrow{\text{dist}} X$ iff $X_n \xrightarrow{\text{dist}} X$ iff
$F_n(t) \to F(t)$ for all continuity points $t$ of $F$. $F_n(t) \to F(t)$ for all continuity points $t$ of $F$.
\end{theorem} \end{theorem}
\begin{theorem}[Levy's continuity theorem]\label{levycontinuity} \begin{theorem}[Levy's continuity theorem]\label{levycontinuity}
% Theorem 2
$X_n \xrightarrow{\text{dist}} X$ iff $X_n \xrightarrow{\text{dist}} X$ iff
$\phi_{X_n}(t) \to \phi(t)$ for all $t \in \R$. $\phi_{X_n}(t) \to \phi(t)$ for all $t \in \R$.
\end{theorem} \end{theorem}

View file

@ -139,19 +139,19 @@ First, we need to prove some properties of characteristic functions.
- \int_{-\infty}^\infty t \cos(tx) \frac{1}{\sqrt{2 \pi} } e^{-\frac{x^2}{2}} \d x\\ - \int_{-\infty}^\infty t \cos(tx) \frac{1}{\sqrt{2 \pi} } e^{-\frac{x^2}{2}} \d x\\
&=& -t \phi_X(t) &=& -t \phi_X(t)
\end{IEEEeqnarray*} \end{IEEEeqnarray*}
Thus, for all $t \in \R$ Thus, for all $t \in \R$
\[ \[
(\log(\phi_X(t))' = \frac{\phi'_X(t)}{\phi_X(t)} = -t. (\log(\phi_X(t))' = \frac{\phi'_X(t)}{\phi_X(t)} = -t.
\] \]
Hence there exists $c \in \R$, such that Hence there exists $c \in \R$, such that
\[ \[
\log(\phi_X(t)) = -\frac{t^2}{2} + c. \log(\phi_X(t)) = -\frac{t^2}{2} + c.
\] \]
Since $\phi_X(0) = 1$, we obtain $c = 0$. Since $\phi_X(0) = 1$, we obtain $c = 0$.
Thus Thus
\[ \[
\phi_X(t) = e^{-\frac{t^2}{2}}. \phi_X(t) = e^{-\frac{t^2}{2}}.
\] \]
\end{refproof} \end{refproof}
@ -163,12 +163,12 @@ Now, we can finally prove the CLT:
Let Let
\[ \[
Y_i \coloneqq \frac{X_i - \mu}{\sigma} Y_i \coloneqq \frac{X_i - \mu}{\sigma}
\] \]
i.e.~we normalize to $\bE[Y_1] = 0$ and $\Var(Y_1) = 1$. i.e.~we normalize to $\bE[Y_1] = 0$ and $\Var(Y_1) = 1$.
We need to show that We need to show that
\[ \[
V_n \coloneqq \frac{S_n - n \mu}{ \sigma \sqrt{n}} = \frac{Y_1+ \ldots + Y_n}{\sqrt{n}} \xrightarrow{\omega, n\to \infty} \cN(0,1) % TODO V_n \coloneqq \frac{S_n - n \mu}{ \sigma \sqrt{n}} = \frac{Y_1+ \ldots + Y_n}{\sqrt{n}} \xrightarrow{\omega, n\to \infty} \cN(0,1) % TODO
\] \]
Let $t \in \R$. Let $t \in \R$.
Then Then
\begin{IEEEeqnarray*}{rCl} \begin{IEEEeqnarray*}{rCl}
@ -181,7 +181,7 @@ Now, we can finally prove the CLT:
We have We have
\begin{IEEEeqnarray*}{rCl} \begin{IEEEeqnarray*}{rCl}
\phi(s) &=& \phi(0) + \phi'(0) s + \frac{\phi''(0)}{2} s^2 + o(s^2), \text{as $s \to 0$}\\ \phi(s) &=& \phi(0) + \phi'(0) s + \frac{\phi''(0)}{2} s^2 + o(s^2), \text{as $s \to 0$}\\
&=& 1 - \underbrace{\i \bE[Y_1] s}_{=0} &=& 1 - \underbrace{\i \bE[Y_1] s}_{=0}
- \bE[Y_1^2] \frac{s^2}{2} + o(s^2)\\ - \bE[Y_1^2] \frac{s^2}{2} + o(s^2)\\
&=& 1 - \frac{s^2}{2} + o(s^2), \text{as $s \to $} &=& 1 - \frac{s^2}{2} + o(s^2), \text{as $s \to $}
\end{IEEEeqnarray*} \end{IEEEeqnarray*}
@ -189,13 +189,13 @@ Now, we can finally prove the CLT:
Setting $s \coloneqq \frac{t}{\sqrt{n}}$ we obtain Setting $s \coloneqq \frac{t}{\sqrt{n}}$ we obtain
\[ \[
\phi\left(\frac{t}{ \sqrt{n} }\right) = 1 - \frac{t^2}{2n} + o\left( \frac{t^2}{n} \right) \text{ as $n \to \infty$} \phi\left(\frac{t}{ \sqrt{n} }\right) = 1 - \frac{t^2}{2n} + o\left( \frac{t^2}{n} \right) \text{ as $n \to \infty$}
\] \]
\[ \[
\phi_{V_n}(t) = \left( \phi\left( \frac{t}{\sqrt{n} } \right) \right)^n = \phi_{V_n}(t) = \left( \phi\left( \frac{t}{\sqrt{n} } \right) \right)^n =
(1 - \frac{t^2}{2 n } + o\left( \frac{t^2}{n} \right)^n \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}}, (1 - \frac{t^2}{2 n } + o\left( \frac{t^2}{n} \right)^n \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}},
\] \]
where we have used the following: where we have used the following:
\begin{claim} \begin{claim}
@ -207,7 +207,7 @@ Now, we can finally prove the CLT:
We have shown that We have shown that
\[ \[
\phi_n(t) \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}} = \phi_N(t). \phi_n(t) \xrightarrow{n \to \infty} e^{-\frac{t^2}{2}} = \phi_N(t).
\] \]
Using \autoref{levycontinuity}, we obtain \autoref{clt}. Using \autoref{levycontinuity}, we obtain \autoref{clt}.
\end{refproof} \end{refproof}
@ -221,6 +221,3 @@ Now, we can finally prove the CLT:
where $\langle t, X\rangle \coloneqq \sum_{i = 1}^d t_i X_i$. where $\langle t, X\rangle \coloneqq \sum_{i = 1}^d t_i X_i$.
\end{remark} \end{remark}
Exercise: Find out, which properties also hold for $d > 1$. Exercise: Find out, which properties also hold for $d > 1$.

109
inputs/lecture_13.tex Normal file
View file

@ -0,0 +1,109 @@
% Lecture 13 2023-05
%The difficult part is to show \autoref{levycontinuity}.
%This is the last lecture, where we will deal with independent random variables.
We have seen, that
if $X_1, X_2,\ldots$ are i.i.d.~with $ \mu = \bE[X_1]$,
$\sigma^2 = \Var(X_1)$,
then $\frac{\sum_{i=1}^{n} (X_i - \mu)}{\sigma \sqrt{n} } \xrightarrow{(d)} \cN(0,1)$.
\begin{question}
What happens if $X_1, X_2,\ldots$ are independent, but not identically distributed? Do we still have a CLT?
\end{question}
\begin{theorem}[Lindeberg CLT]
\label{lindebergclt}
Assume $X_1, X_2, \ldots,$ are independent (but not necessarily identically distributed) with $\mu_i = \bE[X_i] < \infty$ and $\sigma_i^2 = \Var(X_i) < \infty$.
Let $S_n = \sqrt{\sum_{i=1}^{n} \sigma_i^2}$
and assume that $\lim_{n \to \infty} \frac{1}{S_n^2} \bE\left[(X_i - \mu_i)^2 \One_{|X_i - \mu_i| > \epsilon \S_n}\right] = 0$ for all $\epsilon > 0$
(\vocab{Lindeberg condition}, ``The truncated variance is negligible compared to the variance.'').
Then the CLT holds, i.e.~
\[
\frac{\sum_{i=1}^n (X_i - \mu_i)}{S_n} \xrightarrow{(d)} \cN(0,1).
\]
\end{theorem}
\begin{theorem}[Lyapunov condition]
\label{lyapunovclt}
Let $X_1, X_2,\ldots$ be independent, $\mu_i = \bE[X_i] < \infty$,
$\sigma_i^2 = \Var(X_i) < \infty$
and $S_n \coloneqq \sqrt{\sum_{i=1}^n \sigma_i^2}$.
Then, assume that, for some $\delta > 0$,
\[
\lim_{n \to \infty} \sum_{i=1}^{n} \bE[(X_i - \mu_i)^{2 + \delta}] = 0
\]
(\vocab{Lyapunov condition}).
Then the CLT holds.
\end{theorem}
\begin{remark}
The Lyapunov condition implies the Lindeberg condition.
(Exercise).
\end{remark}
We will not prove the \autoref{linebergclt} or \autoref{lyapunovclt}
in this lecture. However, they are quite important.
We will now sketch the proof of \autoref{levycontinuity},
details can be found in the notes.\todo{Complete this}
A generalized version of \autoref{levycontinuity} is the following:
\begin{theorem}[A generalized version of Levy's continuity \autoref{levycontinuity}]
\label{genlevycontinuity}
Suppose we have random variables $(X_n)_n$ such that
$\bE[e^{\i t X_n}] \xrightarrow{n \to \infty} \phi(t)$ for all $t \in \R$
for some function $\phi$ on $\R$.
Then the following are equivalent:
\begin{enumerate}[(a)]
\item The distribution of $X_n$ is \vocab[Distribution!tight]{tight} (dt. ``straff''),
i.e.~$\lim_{a \to \infty} \sup_{n \in \N} \bP[|X_n| > a] = 0$.
\item $X_n \xrightarrow{(d)} X$ for some real-valued random variable $X$.
\item $\phi$ is the characteristic function of $X$.
\item $\phi$ is continuous on all of $\R$.
\item $\phi$ is continuous at $0$.
\end{enumerate}
\end{theorem}
\begin{example}
Let $Z \sim \cN(0,1)$ and $X_n \coloneqq n Z$.
We have $\phi_{X_n}(t) = \bE[[e^{\i t X_n}] = e^{-\frac{1}{2} t^2 n^2} \xrightarrow{n \to \infty} \One_{\{t = 0\} }$.
$\One_{\{t = 0\}}$ is not continuous at $0$.
By \autoref{genlevycontinuity}, $X_n$ can not converge to a real-valued
random variable.
Exercise: $X_n \xrightarrow{(d)} \overline{X}$,
where $\bP[\overline{X} = \infty] = \frac{1}{2} = \bP[\overline{X} = -\infty]$.
Similar examples are $\mu_n \coloneqq \delta_n$ and
$\mu_n \coloneqq \frac{1}{2} \delta_n + \frac{1}{2} \delta_{-n}$.
\end{example}
\begin{example}
Suppose that $X_1, X_2,\ldots$ are i.d.d.~with $\bE[X_1] = 0$.
Let $\sigma^2 \coloneqq \Var(X_i)$.
Then the distribution of $\frac{S_n}{\sigma \sqrt{n}}$ is tight:
\begin{IEEEeqnarray*}{rCl}
\bE\left[ \left( \frac{S_n}{\sqrt{n} }^2 \right)^2 \right] &=&
\frac{1}{n} \bE[ (X_1+ \ldots + X_n)^2]\\
&=& \sigma^2
\end{IEEEeqnarray*}
For $a > 0$, by Chebyshev's inequality, % TODO
we have
\[
\bP\left[ \left| \frac{S_n}{\sqrt{n}} \right| > a \right] \leq \frac{\sigma^2}{a^2} \xrightarrow{a \to \infty} 0.
\]
verifying \autoref{genlevycontinuity}.
\end{example}
\begin{example}
Suppose $C$ is a random variable which is Cauchy distributed, i.e.~$C$
has probability distribution $f_C(x) = \frac{1}{\pi} \frac{1}{1 + x^2}$.
We know that $\bE[|C|] = \infty$.
We have $\phi_C(t) = \bE[e^{\i t C}] = e^{-|t|}$.
Suppose $C_1, C_2, \ldots, C_n$ are i.i.d.~Cauchy distributed
and let $S_n \coloneqq C_1 + \ldots + C_n$.
Exercise: $\phi_{S_n}(t) = e^{-|t|} = \phi_{C_1}(t)$, thus $S_n \sim C$.
\end{example}