From 872cb3b0bb22cc0f2f5da331faa31c8b8e34f432 Mon Sep 17 00:00:00 2001 From: Josia Pietsch Date: Fri, 7 Jul 2023 17:42:38 +0200 Subject: [PATCH] some small changes --- Makefile | 2 +- inputs/a_1_counterexamples.tex | 14 ++- inputs/a_2_additional_stuff.tex | 21 ++++ inputs/lecture_02.tex | 8 +- inputs/lecture_03.tex | 1 + inputs/lecture_05.tex | 17 +--- inputs/lecture_07.tex | 3 +- inputs/lecture_09.tex | 1 + inputs/lecture_10.tex | 1 + inputs/lecture_13.tex | 1 - inputs/lecture_22.tex | 1 + inputs/lecture_23.tex | 170 ++++++++++++++++++++++++++++++++ inputs/prerequisites.tex | 7 +- 13 files changed, 224 insertions(+), 23 deletions(-) create mode 100644 inputs/a_2_additional_stuff.tex create mode 100644 inputs/lecture_23.tex diff --git a/Makefile b/Makefile index ddfbd71..f42a265 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ pdf: init - latexmk + latexmk < /dev/null clean: latexmk -c diff --git a/inputs/a_1_counterexamples.tex b/inputs/a_1_counterexamples.tex index b451952..ebe6368 100644 --- a/inputs/a_1_counterexamples.tex +++ b/inputs/a_1_counterexamples.tex @@ -1,5 +1,17 @@ -\section{Counterexamples} +\section{(Counter)examples} + +Consistent families and inconsistent families + +Notions of convergence Exercise 4.3 10.2 + +Martingales converging a.s.~but not in $L^1$. + +Stopping times + + + + diff --git a/inputs/a_2_additional_stuff.tex b/inputs/a_2_additional_stuff.tex new file mode 100644 index 0000000..95a734d --- /dev/null +++ b/inputs/a_2_additional_stuff.tex @@ -0,0 +1,21 @@ +Important stuff not done in the lecture. + +Moments: + +$\bE[X^k]$ + +\begin{lemma} + Let $X, Y : \Omega \to [a,b]$ + If $\bE[X^k] = \bE[Y^k]$, + for every $k \in \N_0$ + then $X = Y$. +\end{lemma} +\begin{proof} + We have $\bE[p(X)] = \bE[p(Y)]$ for + every polynomial $p$. + Approximate $e^{\i t X}$ + with polynomials and use Fourier transforms. +\end{proof} + + +Laplace transforms diff --git a/inputs/lecture_02.tex b/inputs/lecture_02.tex index 996ea5f..5720809 100644 --- a/inputs/lecture_02.tex +++ b/inputs/lecture_02.tex @@ -1,4 +1,4 @@ -\lecture{2}{}{} +\lecture{2}{2023-04-11}{Independence, Kolmogorov's consistency theorem, consistent families} \section{Independence and Product Measures} In order to define the notion of independence, we first need to construct @@ -80,9 +80,9 @@ to an infinite number of random variables. \begin{theorem}[Kolmogorov extension / consistency theorem] \label{thm:kolmogorovconsistency} - Informally: + \footnote{Informally: ``Probability measures are determined by finite-dimensional marginals - (as long as these marginals are nice)'' + (as long as these marginals are nice)''} Let $\bP_n, n \in \N$ be probability measures on $(\R^n, \cB(\R^n))$ which are \vocab{consistent}, @@ -117,5 +117,3 @@ to an infinite number of random variables. the distribution function of $X_i$. In the case of $F_1 = \ldots = F_n$, then $X_1,\ldots, X_n$ are i.i.d. \end{example} - - diff --git a/inputs/lecture_03.tex b/inputs/lecture_03.tex index 4f0a0c4..ffee178 100644 --- a/inputs/lecture_03.tex +++ b/inputs/lecture_03.tex @@ -154,4 +154,5 @@ we are going to use the following: = \lambda_n(C_1) + \lambda_n(C_2) \] by the definition of the finite product measure. + \phantom\qedhere \end{refproof} diff --git a/inputs/lecture_05.tex b/inputs/lecture_05.tex index a466324..76fc1ef 100644 --- a/inputs/lecture_05.tex +++ b/inputs/lecture_05.tex @@ -1,4 +1,4 @@ -\lecture{5}{2023-04-21}{} +\lecture{5}{2023-04-21}{Laws of large numbers} \subsection{The Laws of Large Numbers} @@ -44,23 +44,14 @@ For the proof of (b) we need the following general result: \begin{theorem} \label{thm2} Let $X_1, X_2, \ldots$ be independent (but not necessarily identically distributed) random variables with $\bE[X_i] = 0$ for all $i$ - and $\sum_{i=1}^n \Var(X_i) < \infty$. + and + \[\sum_{i=1}^n \Var(X_i) < \infty.\] Then $\sum_{n \ge 1} X_n$ converges almost surely. \end{theorem} +We'll prove this later\todo{Move proof} -\begin{proof} - - -\end{proof} \begin{question} Does the converse hold? I.e.~does $\sum_{n \ge 1} X_n < \infty$ a.s.~ then $\sum_{n \ge 1} \Var(X_n) < \infty$. \end{question} This does not hold. Consider for example $X_n = \frac{1}{n^2} \delta_n + \frac{1}{n^2} \delta_{-n} + (1-\frac{2}{n^2}) \delta_0$. - -\begin{refproof}{lln} - \begin{enumerate} - \item[(b)] - \end{enumerate} -\end{refproof} - diff --git a/inputs/lecture_07.tex b/inputs/lecture_07.tex index 4bcb241..86555a5 100644 --- a/inputs/lecture_07.tex +++ b/inputs/lecture_07.tex @@ -5,6 +5,7 @@ when the $X_n$ are independent. \end{goal} \begin{theorem}[Kolmogorov's three-series theorem] % Theorem 3 + \label{thm:kolmogorovthreeseries} \label{thm3} Let $X_n$ be a family of independent random variables. \begin{enumerate}[(a)] @@ -43,7 +44,7 @@ For the proof we'll need a slight generalization of \autoref{thm2}: almost surely. Let $A_n \coloneqq \{\omega : |X_n(\omega)| > C\}$. Since the first series $\sum_{n \ge 1} \bP(A_n) < \infty$, - by Borel-Cantelli, $\bP[\text{infinitely many $A_n$ occcur}] = 0$. + by Borel-Cantelli, $\bP[\text{infinitely many $A_n$ occur}] = 0$. For the proof of (b), suppose $\sum_{n\ge 1} X_n(\omega) < \infty$ diff --git a/inputs/lecture_09.tex b/inputs/lecture_09.tex index 15f6e60..a7b947c 100644 --- a/inputs/lecture_09.tex +++ b/inputs/lecture_09.tex @@ -80,6 +80,7 @@ This notion of convergence will be defined in terms of characteristic functions \subsection{Characteristic Functions and Fourier Transform} \begin{definition} + \label{def:characteristicfunction} Consider $(\R, \cB(\R), \bP)$. The \vocab{characteristic function} of $\bP$ is defined as \begin{IEEEeqnarray*}{rCl} diff --git a/inputs/lecture_10.tex b/inputs/lecture_10.tex index adddc04..c5fa42e 100644 --- a/inputs/lecture_10.tex +++ b/inputs/lecture_10.tex @@ -184,6 +184,7 @@ Unfortunately, we won't prove \autoref{bochnersthm} in this lecture. \begin{definition}[Convergence in distribution / weak convergence] + \label{def:weakconvergence} We say that $\bP_n \subseteq M_1(\R)$ \vocab[Convergence!weak]{converges weakly} towards $\bP \in M_1(\R)$ (notation: $\bP_n \implies \bP$), iff \[ \forall f \in C_b(\R)~ \int f d\bP_n \to \int f d\bP. diff --git a/inputs/lecture_13.tex b/inputs/lecture_13.tex index d31db12..e147b69 100644 --- a/inputs/lecture_13.tex +++ b/inputs/lecture_13.tex @@ -264,7 +264,6 @@ which converges. $G_1, G_2, \ldots$ is a subsequence of $F_1, F_2,\ldots$. However $G_1, G_2, \ldots$ is not converging to $F$, as this would fail at $x_0$. This is a contradiction. - \end{refproof} diff --git a/inputs/lecture_22.tex b/inputs/lecture_22.tex index bf7c7c6..314dbba 100644 --- a/inputs/lecture_22.tex +++ b/inputs/lecture_22.tex @@ -1,3 +1,4 @@ +\pagebreak \lecture{22}{2023-07-04}{Introduction Markov Chains II} \section{Markov Chains} \todo{Merge this with the end of lecture 21} diff --git a/inputs/lecture_23.tex b/inputs/lecture_23.tex new file mode 100644 index 0000000..1ecc0ac --- /dev/null +++ b/inputs/lecture_23.tex @@ -0,0 +1,170 @@ +\lecture{23}{2023-07-06}{} +\section{Recap} +In this lecture we will recall the most important point from the lecture. + +\subsection{Construction of iid random variables.} + +\begin{itemize} + \item Definition of a consistent family (\autoref{def:consistentfamily}) + \item Important construction: + + Consider a distribution function $F$ and define + \[ + \prod_{i=1}^n (F(b_i) - F(a_i)) \text{\reflectbox{$\coloneqq$}} + \mu_n \left( (a_1,b_1] \times x \ldots \times x (a_n, b_n] \right). + \] + + \item Examples of consistent and inconsistent families + \todo{Exercises} + \item Kolmogorov's consistency theorem + (\autoref{thm:kolmogorovconsistency}) +\end{itemize} + +\subsection{Limit theorems} +\begin{itemize} + \item Work with iid.~random variables. + \item Notions of convergence (\autoref{def:convergence}) + \item Implications between different notions of convergence (very important) and counter examples. + (\autoref{thm:convergenceimplications}) + + \item \begin{itemize} + \item Laws of large numbers: (\autoref{lln}) + \begin{itemize} + \item WLLN: convergence in probability + \item SLLN: weak convergence + \end{itemize} + \end{itemize} + \item \autoref{thm2} (building block for SLLN): + Let $(X_n)$ be independent with mean $0$ and $\sum \sigma_n^2 < \infty$, + then $ \sum X_n $ converges a.s. + \begin{itemize} + \item Counter examples showing that $\impliedby$ does not hold in general are important + \item $\impliedby$ holds for iid.~uniformly bounded random variables + \item Application: + + $\sum_{i=1}^{\infty} \frac{(\pm_1)}{n^{\frac{1}{2} + \epsilon}}$ converges a.s.~for all $\epsilon > 0$. + + $\sum \frac{\pm 1}{ n^{\frac{1}{2} -\epsilon}}$ does not converge a.s.~for any $\epsilon > 0$. + \end{itemize} + \item Kolmogorov's inequality (\autoref{thm:kolmogorovineq}) + \item Kolmogorov's $0-1$-law. (\autoref{kolmogorov01}) + + In particular, a series of independent random variables converges with probability $0$ or $1$. + + \item Kolmogorov's 3 series theorem. (\autoref{thm:kolmogorovthreeseries}) + \begin{itemize} + \item What are those $3$ series? + \item Applications + \end{itemize} +\end{itemize} + +\subsubsection{Fourier transform / characteristic functions / weak convegence} + +\begin{itemize} + \item Definition of Fourier transform + (\autoref{def:characteristicfunction}) + \item The Fourier transform uniquely determines the probability distribution. + It is bounded, so many theorems are easily applicable. + \item Uniqueness theorem (\autoref{charfuncuniqueness}), + inversion formula (\autoref{inversionformula}), ... + \item Levy's continuity theorem (\autoref{levycontinuity}), + (\autoref{genlevycontinuity}) + \item Bockner's theorem for positive definite function % TODO REF + \item Bockner's theorem for the mass at a point (\autoref{bochnersformula}) + \item Related notions + \todo{TODO} + \begin{itemize} + \item Laplace transforms $\bE[e^{-\lambda X}]$ for some $\lambda > 0$ + (not done in the lecture, but still useful). + \item Moments $\bE[X^k]$ (not done in the lecture, but still useful) + All moments together uniquely determine the distribution. + \end{itemize} +\end{itemize} + +\paragraph{Weak convergence} +\begin{itemize} + \item Definition of weak convergence % ( test against continuous, bounded functions). + (\autoref{def:weakconvergence}) + \item Examples: + \begin{itemize} + \item $(\delta_{\frac{1}{n}})_n$, + \item $(\frac{1}{2} \delta_{-\frac{1}{n}} + \frac{1}{2} \delta_{\frac{1}{n}}$, + \item $(\cN(0, \frac{1}{n}))_n$, + \item $(\frac{1}{n} \delta_n + (1- \frac{1}{n}) \delta_{\frac{1}{n}})_n$. + \end{itemize} + + \item Non-examples: $(\delta_n)_n$ + \item How does one prove weak convergence? How does one write this down in a clear way? + % TODO +\end{itemize} + +\paragraph{Convolution} +\begin{itemize} + \item Definition of convolution. + \todo{Copy from exercise sheet and write a section about this} + \item $X_i \sim \mu_i \text{ iid. }\implies X_1 + \ldots + X_n \sim \mu_1 \ast \ldots \ast \mu_n$. +\end{itemize} + + + +\subsubsection{CLT} +\begin{itemize} + \item Statement of the CLT + \item Several versions: + \begin{itemize} + \item iid (\autoref{clt}), + \item Lindeberg (\autoref{lindebergclt}), + \item Luyapanov (\autoref{lyapunovclt}) + \end{itemize} + \item How to apply this? Exercises! +\end{itemize} + +\subsection{Conditional expectation} +\begin{itemize} + \item Definition and existence of conditional expectation for $X \in L^1(\Omega, \cF, \bP)$ + \item If $H = L^2(\Omega, \cF, \bP)$, then $\bE[ \cdot | \cG]$ + is the (unique) projection on the closed subspace $L^2(\Omega, \cG, \bP)$. + Why is this a closed subspace? Why is the projection orthogonal? + \item Radon-Nikodym Theorem (Proof not relevant for the exam) + \item (Non-)examples of mutually absolutely continuous measures + Singularity in this context? % TODO +\end{itemize} + +\subsection{Martingales} + +\begin{itemize} + \item Definition of Martingales + \item Doob's convergence theorem, Upcrossing inequality + (downcrossings for submartingales) + \item Examples of Martingales converging a.s.~but not in $L^1$ + \item Bounded in $L^2$ $\implies$ convergence in $L^2$. + \item Martingale increments are orthogonal in $L^2$! + \item Doob's (sub-)martingale inequalities + \item $\bP[\sup_{k \le n} M_k \ge x]$ $\leadsto$ Look at martingale inequalities! + Estimates might come from Doob's inequalities if $(M_k)_k$ is a (sub-)martingale. + \item Doob's $L^p$ convergence theorem. + \begin{itemize} + \item Why is $p > 1$ important? \textbf{Role of Banach-Alaoglu} + \item This is an important proof. + \end{itemize} + \item Uniform integrability % TODO + \item What are stopping times? + \item (Non-)examples of stopping times + \item \textbf{Optional stopping theorem} - be really comfortable with this. +\end{itemize} + + +\subsection{Markov Chains} + +\begin{itemize} + \item What are Markov chains? + \item State space, initial distribution + \item Important examples + \item \textbf{What is the relation between Martingales and Markov chains?} + $u$ \vocab{harmonic} $\iff Lu = 0$. + (sub-/super-) harmonic $u$ $\iff$ for a Markov chain $(X_n)$, + $u(X_n)$ is a (sub-/super-)martingale + \item Dirichlet problem + (Not done in the lecture) + \item ... (more in Probability Theory II) +\end{itemize} diff --git a/inputs/prerequisites.tex b/inputs/prerequisites.tex index f16a38d..4777387 100644 --- a/inputs/prerequisites.tex +++ b/inputs/prerequisites.tex @@ -3,6 +3,7 @@ from the lecture on stochastic. \subsection{Notions of Convergence} \begin{definition} + \label{def:convergence} Fix a probability space $(\Omega,\cF,\bP)$. Let $X, X_1, X_2,\ldots$ be random variables. \begin{itemize} @@ -30,9 +31,10 @@ from the lecture on stochastic. \] \end{itemize} \end{definition} -% TODO Connect to ANaIII +% TODO Connect to AnaIII \begin{theorem} + \label{thm:convergenceimplications} \vspace{10pt} Let $X$ be a random variable and $X_n, n \in \N$ a sequence of random variables. Then @@ -80,6 +82,9 @@ from the lecture on stochastic. &=& \epsilon \cdot c > 0 \lightning \end{IEEEeqnarray*} \todo{Improve this with Markov} + \todo{Counter examples} + \todo{weak convergence} + \todo{$L^p$ convergence} \end{subproof} \begin{claim} $X_n \xrightarrow{\bP} X \notimplies X_n\xrightarrow{L^1} X$