First part of lecture 8

This commit is contained in:
Josia Pietsch 2023-04-27 17:07:50 +02:00
parent 2c7de67fbb
commit dca8a1b435
Signed by: jrpie
GPG Key ID: E70B571D66986A2D
6 changed files with 509 additions and 0 deletions

23
inputs/prerequisites.tex Normal file
View File

@ -0,0 +1,23 @@
\begin{theorem}[Chebyshev's inequality] % TODO Proof
Let $X$ be a r.v.~with $\Var(x) < \infty$.
Then $\forall \epsilon > 0 : \bP \left[ \left| X - \bE[X] \right| > \epsilon\right] \le \frac{\Var(x)}{\epsilon^2}$.
\end{theorem}
We used Chebyshev's inequality. Linearity of $\bE$, $\Var(cX) = c^2\Var(X)$ and $\Var(X_1 +\ldots + X_n) = \Var(X_1) + \ldots + \Var(X_n)$ for independent $X_i$.
How do we prove that something happens almost surely?
\begin{lemma}[Borel-Cantelli]
If we have a sequence of events $(A_n)_{n \ge 1}$
such that $\sum_{n \ge 1} \bP(A_n) < \infty$,
then $\bP[ A_n \text{for infinitely many $n$}] = 0$
(more precisely: $\bP[\limsup_{n \to \infty} A_n] = 0$).
The converse also holds for independent events $A_n$.
\end{lemma}
Modes of covergence: $L^p$, in probability, a.s.

117
inputs/vl3.tex Normal file
View File

@ -0,0 +1,117 @@
$(\Omega, \cF, \bP)$ Probability Space, $X : ( \Omega, \cF) \to (\R, \cB(\R))$ random variable.
Then $\Q(\cdot) = \bP [ x\in \cdot ]$ is the distribution of $X$ under $\bP$.
\section{Independence and product measures}
In order to define the notion of independence, we first need to construct
product measures in order to be able to consider several random variables
at the same time.
The finite case of a product is straightforward:
\begin{theorem}{Product measure (finite)}
Let $(\Omega_1, \cF, \bP)$ and $(\Omega_2, \cF_2, \bP_2)$ be probability spaces.
Let $\Omega \coloneqq \Omega_1 \times \Omega_2$
and $R \coloneqq \{A_1 \times A_2 | A_1 \in \cF_1, A_2 \in \cF_2 \}$.
Let $\cF$ be $\sigma(R)$ (the sigma algebra generated by $R$).
Then there exists a unique probability measure $\bP$ on $\Omega$
such that for every rectangle $R = A_1 \times A_2 \in \cR$, $\bP(A_1 \times A_2) = \bP(A_1) \times \bP(A_2)$.
\end{theorem}
\begin{proof}
See Theorem 5.1.1 in the lecture notes on Stochastik.
\end{proof}
We now want to construct a product measure for infinite products.
\begin{definition}[Independence]
A collection $X_1, X_2, \ldots, X_n$ of random variables are called
\vocab{mutually independent} if
\[
\forall a_1,\ldots,a_n \in \R :
\bP[X_1 \le a_1, \ldots, x_n \le a_n]
= \prod_{i=1}^n \bP[X_i \le a_i]
\]
This is equivalent to
\[
\forall B_1, \ldots, B_n \in \cB(\R):
\bP[X_1 \in B_1, \ldots, X_n \in B_n]
= \prod_{i=1}^n \bP[X_i \in B_i]
\]
\end{definition}
\begin{example}
Suppose we throw a dice twice. Let $A \coloneqq \{\text{first throw even}\}$,
$B \coloneqq \{second throw even\}$
and $C \coloneqq \{\text{sum even}\} $.
Are $\One_A, \One_B, \One_C$ mutually independent random variables?
\end{example}
It is easy the see, that the random variables are pairwise independent,
but not mutually independent.
The definition of mutual independence can be rephrased as follos:
Let $X_1, X_2, \ldots, X_n$ r.v.s. Let $\bP[(X_1,\ldots, X_n) \in \cdot ] \text{\reflectbox{$\coloneqq$}} \Q^{\otimes}(\cdot )$.
Then $\Q^{\otimes}$ is a probability measure on $\R^n$.
\begin{fact}
$X_1,\ldots, X_n$ are mutually independent iff $\Q^{\otimes} = \Q_1 \otimes \ldots \otimes \Q_n$.
\end{fact}
By constructing an infinite product, we can thus extend the notion of independence
to an infinite number of r.v.s.
\begin{goal}
Can we construct infinitely many independent random variables?
\end{goal}
\begin{definition}[Consistent family of random variables]
Let $\bP_n, n \in \N$ be a family of probability measures on $(\R^n, \cB(\R^n))$.
The family is called \vocab{consistent} if if
$\bP_{n+1}[B_1 \times B_2 \times \ldots \times B_n \times \R] = \bP_n[B_1 \times \ldots \times B_n]$
for all $n \in \N, B_i \in B(\R)$.
\end{definition}
\begin{theorem}[Kolmogorov extension / consistency theorem]
Informally:
``Probability measures are determined by finite-dimensional marginals
(as long as these marginals are nice)''
Let $\bP_n, n \in \N$ be probability measures on $(\R^n, \cB(\R^n))$
which are \vocab{consistent},
then there exists a unique probability measure $\bP^{\otimes}$
on $(\R^\infty, B(R^\infty))$ (where $B(R^{\infty}$ has to be defined),
such that
\[
\forall n \in \N, B_1,\ldots, B_n \in B(\R):
\bP^\otimes [\cX : X_i \in B_i \forall 1 \le i \le n]
= \bP_n[B_1 \times \ldots \times B_n]
\]
\end{theorem}
\begin{remark}
Kolmogorov's theorem can be strengthened to the case of arbitrary
index sets. However this requires a different notion of consistency.
\end{remark}
\begin{example}of a consistent family:
Let $F_1, \ldots, F_n$ be probability distribution functions
and let $\bP_n$ be the probability measure on $\R^n$ defined
by
\[
\bP_n[(a_1,b_1] \times \ldots (a_n, b_n]]
\coloneqq (F_1(b_1) - F_1(a_1)) \cdot \ldots \cdot (F_n(b_n) - F_n(a_n)).
\]
It is easy to see that each $\bP_n$ is a probability measure.
Define $X_i(\omega) = \omega_i$ where $\omega = (\omega_1, .., \omega_n)$.
Then $X_1, \ldots, X_n$ are mutually independent with $F_i$ being
the distribution function of $X_i$.
In the case of $F_1 = \ldots = F_n$, then $X_1,\ldots, X_n$ are i.i.d.
\end{example}

119
inputs/vl4.tex Normal file
View File

@ -0,0 +1,119 @@
\begin{notation}
Let $\cB_n$ denote $\cB(\R^n)$.
\end{notation}
\begin{goal}
Suppose we have a probability measure $\mu_n$ on $(\R^n, \cB(\R^n))$
for each $n$.
We want to show that there exists a unique probability measure $\bP^{\otimes}$
on $(\R^\infty, \cB_\infty)$ (where $\cB_{\infty}$ still needs to be defined),
such that $\bP^{\otimes}\left( \prod_{n \in \N} B_n \right) = \prod_{n \in \N} \mu_n(B_n)$
for all $\{B_n\}_{n \in \N}$, $B_n \in \cB_1$.
\end{goal}
% $\bP_n = \mu_1 \otimes \ldots \otimes \mu_n$.
\begin{remark}
$\prod_{n \in \N} \mu_n(B_n)$ converges, since $0 \le \mu_n(B_n) \le 1$
for all $n$.
\end{remark}
First we need to define $\cB_{\infty}$.
This $\sigma$-algebra must contain all sets $\prod_{n \in \N} B_n$
for all $B_n \in \cB_1$. We simply define $\cB_{\infty}$ to be the
$\sigma$-algebra
Let $\cB_\infty \coloneqq \sigma \left( \{\prod_{n \in \N} B_n | B_n \in \cB(\R)\} \right)$.
\begin{question}
What is there in $\cB_\infty$?
Can we identify sets in $\cB_\infty$ for which we can define the product measure
easily?
\end{question}
Let $\cF_n \coloneqq \{ C \times \R^{\infty} | C \in \cB_n\}$.
It is easy to see that $\cF_n \subseteq \cF_{n+1}$
and using that $\cB_n$ is a $\sigma$-algebra, we can show that $\cF_n$
is also a $\sigma$-algebra.
Now, for any $C \subseteq \R^n$ let $C^\ast \coloneqq C \times \R^{\infty}$.
Thus $\cF_n = \{C^\ast : C \in \cB_n\}$.
Define $\lambda_n : \cF_n : \to [0,1]$ by $\lambda_n(C^\ast) \coloneqq (\mu_1 \otimes \ldots \otimes \mu_n)(C)$.
It is easy to see that $\lambda_{n+1} \defon{\cF_n} = \lambda_n$ (\vocab{consistency}).
Recall the following theorem from measure theory:
\begin{theorem}[Caratheodory's extension theorem] % 2.3.3 in the notes
\label{caratheodory}
Suppose $\cA$ is an algebra (i.e.~closed under finite union)
und $\Omega \neq \emptyset$.
Suppose $\bP$ is countably additive on $\cA$ (i.e.~if $(A_n)_{n}$
are pairwise disjoint and $\bigcup_{n \in \N} A_n \subseteq \cA $
then $\bP\left( \bigcup_{n \in \N} A_n \right) = \sum_{n \in \N} \bP(A_n)$).
Then $\bP$ extends uniquely to a probability measure on $(\Omega, \cF)$,
where $\cF = \sigma(\cA)$.
\end{theorem}
Define $\cF = \bigcup_{n \in \N} \cF_n$. Check that $\cF$ is an algebra.
We'll show that if we define $\lambda: \cF \to [0,1]$ with
$\lambda(A) = \lambda_n(A)$ for any $n$ where this is well defined,
then $\lambda$ is countably additive on $\cF$.
Using \autoref{caratheodory} $\lambda$ will extend uniquely to a probability measure on $\sigma(\cF)$.
We want to prove:
\begin{enumerate}[(1)]
\item $\sigma(\cF) = \cB_\infty$,
\item $\lambda$ as defined above is countably additive on $\F$.
\end{enumerate}
\begin{proof}[Proof of (1)]
Consider an infinite dimensional box $\prod_{n \in \N} B_n$.
We have
\[
\left( \prod_{n=1}^N B_n \right)^\ast \in \cF_n \subseteq \cF
\]
thus
\[
\prod_{n \in \N} B_n = \bigcap_{N \in \N} \left( \prod_{n=1}^N B_n \right)^\ast \in \sigma(\cF).
\]
Since $\sigma(\cF)$ is a $\sigma$-algebra, $\cB_\infty \subseteq \sigma(\cF)$. This proves ``$\supseteq$''.
For the other direction we'll show $\cF_n \subseteq \cB_\infty$ for all $n$.
Let $\cC \coloneqq \{ Q \in \cB_n | Q^\ast \in \cB_\infty\}$.
For $B_1,\ldots,B_n \in \cB$, $B_1 \times \ldots \times B_n \in \cB_n$
and $(B_1 \times \ldots \times B_n)^\ast \in \cB_\infty$.
We have $B_1 \times \ldots \times B_n \in \cC$.
And $\cC$ is a $\sigma$-algebra, because:
\begin{itemize}
\item $\cB_n$ is a $\sigma$-algebra
\item $\cB_\infty$ is a $\sigma$-algebra,
\item $\phi^\ast \phi$, $(\R^n \setminus Q)^\ast = \R^{\infty} \setminus Q^\ast$, $\bigcup_{i \in I} Q_i^\ast = (\bigcup_{i \in I} Q_i)^\ast$.
\end{itemize}
Thus $\cC \subseteq \cB_n$ is a $\sigma$-algebra and contains all rectangles, hence $\cC = \cB_n$.
Hence $\cF_n \subseteq \cB_\infty$ for all $n$,
thus $\cF \subseteq \cB_\infty$. Since $\cB_\infty$ is a $\sigma$-algebra,
$\sigma(\cF) \subseteq \cB_\infty$.
\end{proof}
We are going to use the following
\begin{fact}
\label{fact:finaddtocountadd}
Suppose $\cA$ is an algebra on $\Omega \neq \emptyset$,
and suppose $\bP: \cA \to [0,1]$ is a finitely additive
probability measure.
Suppose whenever $\{B_n\}_n$ is a sequence of sets from $\cA $
decreasing to $\emptyset$ it is the case that
$\bP(B_n) \to 0$. Then $\bP$ must be countably additive.
\end{fact}
\begin{proof}
Exercise
\end{proof}
\begin{proof}[Proof of (2)]
Let's prove that $\lambda$ is finitely additive.
$\lambda(\R^\infty) = \lambda_1(\R^\infty) = 1$.
$\lambda(\emptyset) = \lambda_1(\emptyset) = 0$.
Suppose $A_1, A_2 \in \cF$ are disjoint.
Then pick some $n$ such that $A_1, A_2 \in \cF_n$.
Take $C_1, C_2 \in \cB_n$ such that $C_1^\ast = A_1$
and $C_2^\ast = A_2$.
Then $C_1$ and $C_2$ are disjoint and $A_1 \cup A_2 = (C_1 \cup C_2)^\ast$.
$\lambda(A_1 \cup A_2) = \lambda_n(A_1 \cup A_2) = (\mu_1 \otimes \ldots \otimes \mu_n)(C_1 \cup C_2) = \lambda_n(C_1) + \lambda_n(C_2)$
by the definition of the finite product measure.
In order to use \autoref{fact:finaddtocountadd},
we need to show that if $B_n \in \cF$ with $B_n \to \emptyset \implies \lambda(B_n) \to 0$.
%TODO
\end{proof}

62
inputs/vl6.tex Normal file
View File

@ -0,0 +1,62 @@
We want to show laws of large numbers:
The LHS is random and represents ``sane'' averaging.
The RHS is constant, which we can explicitly compute from the distribution of the RHS.
We fix a probability space $(\Omega, \cF, \bP)$ once and for all.
\begin{theorem}
\label{lln}
Let $X_1, X_2,\ldots$ be i.i.d.~random variables on $(\R, \cB(\R))$
and $m = \bE[X_i] < \infty$
and $\sigma^{2} = \Var(X_i) = \bE[ (X_i - \bE(X_i))^2] = \bE[X_i^2] - \bE[X_i]^2 < \infty$.
Then
\begin{enumerate}[(a)]
\item $\frac{X_1 + \ldots + X_n}{n} \xrightarrow{n \to \infty} m$
in probability (\vocab{weak law of large numbers}, WLLN),
\item $\frac{X_1 + \ldots + X_n}{n} \xrightarrow{n \to \infty} m$
almost surely (\vocab{strong law of large numbers}, SLLN).
\end{enumerate}
\end{theorem}
\begin{refproof}{lln}
\begin{enumerate}[(a)]
\item Given $\epsilon > 0$, we need to show that
\[
\bP\left[ \left| \frac{X_1 + \ldots + X_n}{n}\right| > \epsilon\right] \to 0 \]
as $n \to 0$.
Let $S_n \coloneqq X_1 + \ldots + X_n$.
Then $\bE[S_n] = \bE[X_1] + \ldots + \bE[X_n] = nm$.
We have
\begin{IEEEeqnarray*}{rCl}
\bP\left[ \left| \frac{X_1 + \ldots + X_n}{n}\right| > \epsilon\right] &=& \bP\left[\left|\frac{S_n}{n}-m\right| > \epsilon\right]\\
&\overset{\text{Chebyshev}}{\le }& \frac{\Var\left( \frac{S_n}{n} \right) }{\epsilon^2} = \frac{1}{n} \frac{\Var(X_1)}{\epsilon^2} \xrightarrow{n \to \infty} 0
\end{IEEEeqnarray*}
since
\[\Var(\frac{S_n}{n}) = \frac{1}{n^2} \Var(S_n) = \frac{1}{n^2} n \Var(X_i).\]
\end{enumerate}
\end{refproof}
For the proof of (b) we need the following general result:
\begin{theorem}
\label{thm2}
Let $X_1, X_2, \ldots$ be independent (but not necessarily identically distributed) random variables with $\bE[X_i] = 0$ for all $i$
and $\sum_{i=1}^n \Var(X_i) < \infty$.
Then $\sum_{n \ge 1} X_n$ converges almost surely.
\end{theorem}
\begin{proof}
\end{proof}
\begin{question}
Does the converse hold? I.e.~does $\sum_{n \ge 1} X_n < \infty$ a.s.~
then $\sum_{n \ge 1} \Var(X_n) < \infty$.
\end{question}
This does not hold. Consider for example $X_n = \frac{1}{n^2} \delta_n + \frac{1}{n^2} \delta_{-n} + (1-\frac{2}{n^2}) \delta_0$.
\begin{refproof}{lln}
\begin{enumerate}
\item[(b)]
\end{enumerate}
\end{refproof}

97
inputs/vl7.tex Normal file
View File

@ -0,0 +1,97 @@
\begin{refproof}{lln}
We want to deduce the SLLN (\autoref{lln}) from \autoref{thm2}.
W.l.o.g.~let us assume that $\bE[X_i] = 0$ (otherwise define $X'_i \coloneqq X_i - \bE[X_i]$).
We will show that $\frac{S_n}{n} \xrightarrow{a.s.} 0$.
Define $Y_i \coloneqq \frac{X_i}{i}$.
Then the $Y_i$ are independent and we have $\bE[Y_i] = 0$
and $\Var(Y_i) = \frac{\sigma^2}{i^2}$.
Thus $\sum_{i=1}^\infty \Var(Y_i) < \infty$.
From \autoref{thm2} we obtain that $\sum_{i=1}^\infty Y_i < \infty$ a.s.
\begin{claim}
Let $(a_n)$ be a sequence in $\R$ such that $\sum_{n=1}^{\infty} \frac{a_n}{n}$, then $\frac{a_1 + \ldots + a_n}{n} \to 0$.
\end{claim}
\begin{subproof}
Let $S_m \coloneqq \sum_{n=1}^\infty \frac{a_n}{n}$.
By assumption, there exists $S \in \R$
such that $S_m \to S$ as $m \to \infty$.
Note that $j \cdot (S_{j} - S_{j-1}) = a_j$.
Define $S_0 \coloneqq 0$.
Then $a_1 + \ldots + a_n = (S_1 - S_0) + 2(S_2 - S_1) + 3(S_3 - S_2) +
\ldots + n (S_n - S_{n-1})$.
Thus $a_1 + \ldots + a_n = n S_n - (S1 $ % TODO
\end{subproof}
The claim implies SLLN.
\end{refproof}
We need the following inequality:
\begin{theorem}[Kolmogorov's inequality]
If $X_1,\ldots, X_n$ are independent with $\bE[X_i] = 0$
and $\Var(X_i) = \sigma_i^2$, then
\[
\bP\left[\max_{1 \le i \le n} \left| \sum_{j=1}^{i} X_j \right| > \epsilon \right] \le \frac{1}{\epsilon ^2} \sum_{i=1}^m \sigma_i^2 % TODO
\]
\end{theorem}
\begin{proof}
Let $A_1 \coloneqq \{\omega : |X_1(\omega)| > \epsilon\}, \ldots,
A_i := \{\omega: |X_1(\omega)| \le \epsilon, |X_1(\omega) + X_2(\omega)| \le \epsilon, \ldots, |X_1(\omega) + \ldots + X_{i-1}(\omega)| \le \epsilon,
|X_1(\omega) + \ldots + X_i(\omega)| > \epsilon\}$.
We are interested in $\bigcup_{1 \le i \le n} A_i$.
We have
\begin{IEEEeqnarray*}{rCl}
\int_{A_i} (\underbrace{X_1 + \ldots + X_i}_C + \underbrace{X_{i+1} + \ldots + X_n}_D)^2 d \bP &=& \int_{A_i} C^2 d\bP + \underbrace{\int_{A_i} D^2 d \bP}_{\ge 0} + 2 \int_{A_i} CD d\bP\\
&\ge & \int_{A_i} \underbrace{C^2}_{\ge \epsilon^2} d \bP + 2 \int \underbrace{\One_{A_i} (X_1 + \ldots + X_i)}_E \underbrace{(X_{i+1} + \ldots + X_n)}_D d \bP\\
&\ge& \int_{A_i} \epsilon^2 d\bP
\end{IEEEeqnarray*}
(By the independence of $X_1,\ldots, X_n$ and therefore that of $E$ and $D$ and $\bE(X_{i+1}) = \ldots = \bE(X_n) = 0$ we have $\int D E d\bP = 0$.)
% TODO
\end{proof}
\begin{refproof}{thm2}
% TODO
\end{refproof}
\paragraph{Application of SLLN}
\begin{theorem}[Renewal theorem]
Let $X_1,X_2,\ldots$ i.i.d.~random variables with $X_i \ge 0$, $\bE[X_i] = m > 0$. The $X_i$ model waiting times.
Let $S_n \coloneqq \sum_{i=1}^n X_i$.
For all $t > 0$ let \[
N_t \coloneqq \sup \{n : S_n \le t\}.
\]
Then $\frac{N_t}{t} \xrightarrow{a.s.} \frac{1}{m}$ as $t \to \infty$.
\end{theorem}
The $X_i$ can be thought of as waiting times.
$S_i$ models how long you have to wait for $i$ events to occur.
\begin{proof}
By SLLN, $\frac{S_n}{n} \xrightarrow{a.s.} m$ as $n \to \infty$.
Note that $N_t \uparrow \infty$ a.s.~as $t \to \infty (\ast\ast)$, since
$\{N_t \ge n\} = \{X_1 + \ldots+ X_n \le t\}$ thus $N_t \uparrow \infty$ as $t \uparrow \infty$.
\begin{claim}
$\bP[\frac{S_n}{n} \xrightarrow{n \to \infty} m , N_t \xrightarrow{t \to \infty} \infty] = 1$.
\end{claim}
\begin{subproof}
Let $A \coloneqq \{\omega: \frac{S_n(\omega)}{n} \xrightarrow{n \to \infty} m\}$ and $B \coloneqq \{\omega : N_t(\omega \xrightarrow{t \to \infty} \infty\}$.
By the SLLN, we have $\bP(A^C) = 0$ and $\ast\ast \implies \bP(B^C) = 0$.
\end{subproof}
Equivalently, $\bP\left[ \frac{S_{N_t}}{N_t} \xrightarrow{t \to \infty} m, \frac{S_{N_t + 1}}{N_t + 1} \xrightarrow{t \to \infty} m \right] = 1$.
By definition, we have $S_{N_t} \le t \le S_{N_t + t}$.
Then $\frac{S_{N_t}}{N_t} \le \frac{t}{N_t} \le S_{N_t + 1}{N_t} \le \frac{S_{N_t + 1}}{N_t + 1} \cdot \frac{N_t + 1}{N_t}$.
Hence $\frac{t}{N_t} \to m$.
\end{proof}

91
inputs/vl8.tex Normal file
View File

@ -0,0 +1,91 @@
% TODO \begin{goal}
% TODO We want to drop our assumptions on finite mean or variance
% TODO and say something about the behaviour of $ \sum_{n \ge 1} X_n$
% TODO when the $X_n$ are independent.
% TODO \end{goal}
\begin{theorem}[Theorem 3, Kolmogorov's three-series theorem] % Theorem 3
\label{thm3}
Let $X_n$ be a family of independent random variables.
\begin{enumerate}[(a)]
\item Suppose for some $C \ge 0$, the following three series
of numbers converge:
\begin{itemize}
\item $\sum_{n \ge 1} \bP(|X_n| > C)$,
\item $\sum_{n \ge 1} \underbrace{\int_{|X_n| \le C} X_n d\bP}_{\text{\vocab{truncated mean}}}$,
\item $\sum_{n \ge 1} \underbrace{\int_{|X_n| \le C} X_n^2 d\bP - \left( \int_{|X_n| \le C} X_n d\bP \right)^2}_{\text{\vocab{truncated variance} }}$.
\end{itemize}
Then $\sum_{n \ge 1} X_n$ converges almost surely.
\item Suppose $\sum_{n \ge 1} X_n$ converges almost surely.
Then all three series above converge for every $C > 0$.
\end{enumerate}
\end{theorem}
For the proof we'll need a slight generalization of \autoref{thm2}:
\begin{theorem}[Theorem 4] % Theorem 4
\label{thm4}
Let $\{X_n\}_n$ be independent and \vocab{uniformly bounded}
(i.e. $\exists M < \infty : \sup_n \sup_\omega |X_n(\omega)| \le M$).
Then $\sum_{n \ge 1} X_n$ converges almost surely
$\iff$ $\sum_{n \ge 1} \bE(X_n)$ and $\sum_{n \ge 1} \Var(X_n)$
converge.
\end{theorem}
\begin{refproof}{thm3}
Assume, that we have already proved \autoref{thm4}.
We prove part (a) first.
Put $Y_n = X_n \cdot \One_{\{|X_n| \le C\}}$.
Since the $X_n$ are independent, the $Y_n$ are independent as well.
Furthermore, the $Y_n$ are uniformly bounded.
By our assumption, the series
$\sum_{n \ge 1} \int_{|X_n| \le C} X_n d\bP = \sum_{n \ge 1} \bE[Y_n]$
and $\sum_{n \ge 1} \int_{|X_n| \le C} X_n^2 d\bP - \left( \int_{|X_n| \le C} X_n d\bP \right)^2 = \sum_{n \ge 1} \Var(Y_n)$
converges.
By \autoref{thm4} it follows that $\sum_{n \ge 1} Y_n < \infty$
almost surely.
Let $A_n \coloneqq \{\omega : |X_n(\omega)| > C\}$.
Since the first series $\sum_{n \ge 1} \bP(A_n) < \infty$,
by Borel-Cantelli, $\bP[\text{infinitely many $A_n$ occcur}] = 0$.
For the proof of (b), suppose $\sum_{n\ge 1} X_n(\omega) < \infty$
for almost every $\omega$.
Fix an arbitrary $C > 0$.
Define
\[
Y_n(\omega) \coloneqq \begin{cases}
X_n(\omega) & \text{if} |X_n(\omega)| \le C,\\
C &\text{if } |X_n(\omega)| > C.
\end{cases}
\]
Then the $Y_n$ are independent and $\sum_{n \ge 1} Y_n(\omega) < \infty$
almost surely and the $Y_n$ are uniformly bounded.
By \autoref{thm4} $\sum_{n \ge 1} \bE[Y_n]$ and $\sum_{n \ge 1} \Var(Y_n)$
converge.
Define
\[
Z_n(\omega) \coloneqq \begin{cases}
X_n(\omega) &\text{if } |X_n| \le C,\\
-C &\text{if } |X_n| > C.
\end{cases}
\]
Then the $Z_n$ are independent, uniformly bounded and $\sum_{n \ge 1} Z_n(\omega) < \infty$
almost surely.
By \autoref{thm4} we have
$\sums_{n \ge 1} \bE(Z_n) < \infty$
and $\sums_{n \ge 1} \Var(Z_n) < \infty$.
We have
\[
\bE(Y_n) &=& \int_{|X_n| \le C} X_n d \bP + C \bP(|X_n| \ge C)\\
\bE(Z_n) &=& \int_{|X_n| \le C} X_n d \bP - C \bP(|X_n| \ge C)\\
\]
Since $\bE(Y_n) + \bE(Z_n) = 2 \int_{|X_n| \le C} X_n d\bP$
the second series converges,
and since
$\bE(Y_n) - \bE(Z_n)$ converges, the first series converges.
For the third series, we look at
$\sum_{n \ge 1} \Var(Y_n)$ and
$\sum_{n \ge 1} \Var(Z_n)$ to conclude that this series converges
as well.
\end{refproof}