This repository has been archived on 2023-10-20. You can view files and clone it, but cannot push or open issues or pull requests.
probability-theory/inputs/lecture_07.tex

180 lines
7.7 KiB
TeX
Raw Normal View History

2023-07-05 17:53:41 +02:00
\lecture{7}{}{Kolmogorov's three series theorem}
\begin{goal}
We want to drop our assumptions on finite mean or variance
and say something about the behaviour of $ \sum_{n \ge 1} X_n$
when the $X_n$ are independent.
\end{goal}
\begin{theorem}[Kolmogorov's three-series theorem] % Theorem 3
2023-05-10 18:56:36 +02:00
\label{thm3}
Let $X_n$ be a family of independent random variables.
\begin{enumerate}[(a)]
\item Suppose for some $C \ge 0$, the following three series
of numbers converge:
\begin{itemize}
\item $\sum_{n \ge 1} \bP(|X_n| > C)$,
\item $\sum_{n \ge 1} \underbrace{\int_{|X_n| \le C} X_n d\bP}_{\text{\vocab{truncated mean}}}$,
\item $\sum_{n \ge 1} \underbrace{\int_{|X_n| \le C} X_n^2 d\bP - \left( \int_{|X_n| \le C} X_n d\bP \right)^2}_{\text{\vocab{truncated variance} }}$.
\end{itemize}
Then $\sum_{n \ge 1} X_n$ converges almost surely.
\item Suppose $\sum_{n \ge 1} X_n$ converges almost surely.
Then all three series above converge for every $C > 0$.
\end{enumerate}
\end{theorem}
For the proof we'll need a slight generalization of \autoref{thm2}:
2023-07-05 17:53:41 +02:00
\begin{theorem} %[Theorem 4]
2023-05-10 18:56:36 +02:00
\label{thm4}
Let $\{X_n\}_n$ be independent and \vocab{uniformly bounded}
(i.e. $\exists M < \infty : \sup_n \sup_\omega |X_n(\omega)| \le M$).
Then $\sum_{n \ge 1} X_n$ converges almost surely
$\iff$ $\sum_{n \ge 1} \bE(X_n)$ and $\sum_{n \ge 1} \Var(X_n)$
converge.
\end{theorem}
\begin{refproof}{thm3}
Assume, that we have already proved \autoref{thm4}.
We prove part (a) first.
Put $Y_n = X_n \cdot \One_{\{|X_n| \le C\}}$.
Since the $X_n$ are independent, the $Y_n$ are independent as well.
Furthermore, the $Y_n$ are uniformly bounded.
By our assumption, the series
$\sum_{n \ge 1} \int_{|X_n| \le C} X_n d\bP = \sum_{n \ge 1} \bE[Y_n]$
and $\sum_{n \ge 1} \int_{|X_n| \le C} X_n^2 d\bP - \left( \int_{|X_n| \le C} X_n d\bP \right)^2 = \sum_{n \ge 1} \Var(Y_n)$
converges.
By \autoref{thm4} it follows that $\sum_{n \ge 1} Y_n < \infty$
almost surely.
Let $A_n \coloneqq \{\omega : |X_n(\omega)| > C\}$.
Since the first series $\sum_{n \ge 1} \bP(A_n) < \infty$,
by Borel-Cantelli, $\bP[\text{infinitely many $A_n$ occcur}] = 0$.
For the proof of (b), suppose $\sum_{n\ge 1} X_n(\omega) < \infty$
for almost every $\omega$.
Fix an arbitrary $C > 0$.
Define
\[
Y_n(\omega) \coloneqq \begin{cases}
X_n(\omega) & \text{if} |X_n(\omega)| \le C,\\
C &\text{if } |X_n(\omega)| > C.
\end{cases}
\]
Then the $Y_n$ are independent and $\sum_{n \ge 1} Y_n(\omega) < \infty$
almost surely and the $Y_n$ are uniformly bounded.
By \autoref{thm4} $\sum_{n \ge 1} \bE[Y_n]$ and $\sum_{n \ge 1} \Var(Y_n)$
converge.
Define
\[
Z_n(\omega) \coloneqq \begin{cases}
X_n(\omega) &\text{if } |X_n| \le C,\\
-C &\text{if } |X_n| > C.
\end{cases}
\]
Then the $Z_n$ are independent, uniformly bounded and $\sum_{n \ge 1} Z_n(\omega) < \infty$
almost surely.
By \autoref{thm4} we have
$\sum_{n \ge 1} \bE(Z_n) < \infty$
and $\sum_{n \ge 1} \Var(Z_n) < \infty$.
We have
\begin{IEEEeqnarray*}{rCl}
\bE(Y_n) &=& \int_{|X_n| \le C} X_n d \bP + C \bP(|X_n| \ge C),\\
\bE(Z_n) &=& \int_{|X_n| \le C} X_n d \bP - C \bP(|X_n| \ge C).
\end{IEEEeqnarray*}
Since $\bE(Y_n) + \bE(Z_n) = 2 \int_{|X_n| \le C} X_n d\bP$
the second series converges,
and since
$\bE(Y_n) - \bE(Z_n)$ converges, the first series converges.
For the third series, we look at
$\sum_{n \ge 1} \Var(Y_n)$ and
$\sum_{n \ge 1} \Var(Z_n)$ to conclude that this series converges
as well.
\end{refproof}
Recall \autoref{thm2}.
We will see, that the converse of \autoref{thm2} is true if the $X_n$ are uniformly bounded.
More formally:
\begin{theorem}[Theorem 5]
\label{thm5}
Let $X_n$ be a series of independent variables with mean $0$,
that are uniformly bounded.
If $\sum_{n \ge 1} X_n < \infty$ almost surely,
then $\sum_{n \ge 1} \Var(X_n) < \infty$.
\end{theorem}
\begin{refproof}{thm4}
Assume we have proven \autoref{thm5}.
``$\impliedby$'' Assume $\{X_n\} $ are independent, uniformly bounded
and $\sum_{n \ge 1} \bE(X_n) < \infty$ as well as $\sum_{n \ge 1} \Var(X_n) < \infty$.
We need to show that $\sum_{n \ge 1} X_n < \infty$ a.s.
Let $Y_n \coloneqq X_n - \bE(X_n)$.
Then the $Y_n$ are independent, $\bE(Y_n) = 0$ and $\Var(Y_n) = \Var(X_n)$.
By \autoref{thm2} $\sum_{n \ge 1} Y_n < \infty$ a.s.
Thus $\sum_{n \ge 1} X_n < \infty$ a.s.
``$\implies$'' We assume that $\{X_n\}$ are independent, uniformly bounded
and $\sum_{n \ge 1} X_n(\omega) < \infty$ a.s.
We have to show that $\sum_{n \ge 1} \bE(X_n) < \infty$
and $\sum_{n \ge 1} \Var(X_n) < \infty$.
Consider the product space $(\Omega, \cF, \bP) \otimes (\Omega, \cF, \bP)$.
On this product space, we define
$Y_n \left( (\omega, \omega') \right) \coloneqq X_n(\omega)$
and $Z_n \left( (\omega, \omega') \right) \coloneqq X_n(\omega')$.
\begin{claim}
For every fixed $n$, $Y_n$ and $Z_n$ are independent.
\end{claim}
\begin{subproof}
This is obvious, but well prove it carefully here.
\begin{IEEEeqnarray*}{rCl}
&&(\bP \otimes \bP) [Y_n \in (a,b) , Z_n \in (a',b') ]\\
&=& (\bP\otimes\bP) \left( (\omega, \omega') : X_n(\omega) \in (a,b) \land X_n(\omega') \in (a',b') \right)\\
&=& (\bP\otimes\bP)(A \times A') \text{where }
A \coloneqq X_n^{-1}\left( (a,b)\right) \text{ and } A' \coloneqq X_n^{-1}\left( (a',b') \right)\\
&=& \bP(A)\bP(A')
\end{IEEEeqnarray*}
\end{subproof}
Now $\bE[Y_n - Z_n] = 0$ (by definition) and $\Var(Y_n - Z_n) = 2\Var(X_n)$.
Obviously, $(Y_n - Z_n)_{n \ge 1}$ is also uniformly bounded.
\begin{claim}
$\sum_{n \ge 1} (Y_n - Z_n) < \infty$ almost surely
on $(\Omega \otimes \Omega, \cF \otimes\cF, \bP \otimes\bP)$.
\end{claim}
\begin{subproof}
Suppose $\Omega_0 = \{\omega: \sum_{n \ge 1} X_n(\omega) < \infty\}$.
Then $\bP(\Omega_0) = 1$.
Thus $(\bP\otimes\bP)(\Omega_0 \otimes \Omega_0) = 1$.
Furthermore
$\sum_{n \ge 1} \left(Y_n(\omega, \omega') - Z_n(\omega, \omega') \right)= \sum_{n \ge 1} \left(X_n(\omega) - X_n(\omega')\right)$.
Thus $\sum_{n \ge 1} \left( Y_n(\omega, \omega') - Z_n(\omega, \omega') \right) < \infty$ a.s.~on $\Omega_0\otimes\Omega_0$.
\end{subproof}
By \autoref{thm5}, $\sum_{n} \Var(X_n) = \frac{1}{2}\sum_{n \ge 1} \Var(Y_n - Z_n) < \infty$ a.s.
Define $U_n \coloneqq X_n - \bE(X_n)$.
Then $\bE(U_n) = 0$ and the $U_n$ are independent
and uniformly bounded.
We have $\sum_{n} \Var(U_n) = \sum_{n} \Var(X_n) < \infty$.
Thus $\sum_{n} U_n$ converges a.s.~by \autoref{thm2}.
Since by assumption $\sum_{n} X_n < \infty$ a.s.,
it follows that $\sum_{n} \bE(X_n) < \infty$.
\end{refproof}
\begin{remark}
In the proof of \autoref{thm4}
``$\impliedby$'' is just a trivial application of \autoref{thm2}
and uniform boundedness was not used.
The idea of `` $\implies$ '' will lead to coupling. % TODO ?
\end{remark}
% TODO Proof of thm5 in the notes
\begin{example}[Application of \autoref{thm5}]
The series $\sum_{n} \frac{1}{n^{\frac{1}{2} + \epsilon}}$
does not converge for $\epsilon < \frac{1}{2}$.
However
\[
\sum_{n} X_n \frac{1}{n^{\frac{1}{2} + \epsilon}}
2023-07-05 17:53:41 +02:00
\]
2023-05-10 18:56:36 +02:00
where $\bP[X_n = 1] = \bP[X_n = -1] = \frac{1}{2}$
converges almost surely for all $\epsilon > 0$.
And
\[
\sum_{n} X_n \frac{1}{n^{\frac{1}{2} - \epsilon}}
2023-07-05 17:53:41 +02:00
\]
2023-05-10 18:56:36 +02:00
does not converge.
\end{example}