This repository has been archived on 2023-10-20. You can view files and clone it, but cannot push or open issues or pull requests.
probability-theory/inputs/lecture_1.tex

96 lines
3.6 KiB
TeX
Raw Normal View History

2023-05-10 18:56:36 +02:00
% Lecture 1 - 2023-04-04
First, let us recall some basic definitions:
\begin{definition}
A \vocab{probability space} is a triplet $(\Omega, \cF, \bP)$,
such that
\begin{itemize}
\item $\Omega \neq \emptyset$,
\item $\cF$ is a $\sigma$-algebra over $\Omega$, i.e.~$\cF \subseteq \cP(\Omega)$ and
\begin{itemize}
\item $\emptyset, \Omega \in \cF$,
\item $A \in \cF \implies A^c \in \cF$,
\item $A_1, A_2,\ldots \in \cF \implies \bigcup_{i \in \N} A_i \in \cF$.
\end{itemize}
The elements of $\cF$ are called \vocab[Event]{events}.
\item $\bP$ is a \vocab{probability measure}, i.e.~$\bP$ is a function $\bP: \cF \to [0,1]$
such that
\begin{itemize}
\item $\bP(\emptyset) = 1$, $\bP(\Omega) = 1$,
\item $\bP\left( \bigsqcup_{n \in \N} A_n \right) = \sum_{n \in \N} \bP(A_n)$
for mutually disjoint $A_n \in \cF$.
\end{itemize}
\end{itemize}
\end{definition}
\begin{definition}
A \vocab{random variable} $X : (\Omega, \cF) \to (\R, \cB(\R))$
is a measurable function, i.e.~for all $B \in \cB(\R)$ we have $X^{-1}(B) \in \cF$.
(Equivalently $X^{-1}\left( (a,b] \right) \in \cF$ for all $a < b \in \R$ ).
\end{definition}
\begin{definition}
$F: \R \to \R_+$ is a \vocab{distribution function} iff
\begin{itemize}
\item $F$ is monotone non-decreasing,
\item $F$ is right-continuous,
\item $\lim_{x \to -\infty} F(x) = 0$ and $\lim_{x \to \infty} F(x) = 1$.
\end{itemize}
\end{definition}
\begin{fact}
Let $\bP$ be a probability measure on $(\R, \cB(\R))$.
Then $F(x) \coloneqq\bP\left( (-\infty, x] \right)$
is a probability distribution function.
(See lemma 2.4.2 in the lecture notes of Stochastik)
\end{fact}
The converse to this fact is also true:
\begin{theorem}[Kolmogorov's existence theorem / basic existence theorem of probability theory]
\label{kolmogorovxistence}
Let $\cF(\R)$ be the set of all distribution functions on $\R$
and let $\cM(\R)$ be the set of all probability measures on $\R$.
Then there is a one-to-one correspondence between $\cF(\R)$ and $\cM(\R)$
given by
\begin{IEEEeqnarray*}{rCl}
\cM(\R) &\longrightarrow & \cF(\R)\\
\bP &\longmapsto & \begin{pmatrix*}[l]
\R &\longrightarrow & \R_+ \\
x &\longmapsto & \bP((-\infty, x]).
\end{pmatrix*}
\end{IEEEeqnarray*}
\end{theorem}
\begin{proof}
See theorem 2.4.3 in Stochastik.
\end{proof}
\begin{example}[Some important probability distribution functions]\hfill
\begin{enumerate}[(1)]
\item \vocab{Uniform distribution} on $[0,1]$:
\[
F(x) = \begin{cases}
0 & x \in (-\infty, 0],\\
x & x \in (0,1],\\
1 & x \in (1,\infty).\\
\end{cases}
\]
\item \vocab{Exponential distribution}:
\[
F(x) = \begin{cases}
1 - e^{-\lambda x} & x \ge 0,\\
0 & x < 0.
\end{cases}
\]
\item \vocab{Gaussian distribution}:
\[
\Phi(x) \coloneqq \frac{1}{\sqrt{2\pi}} \int_{-\infty}^x e^{-\frac{y^2}{2}} dy.
\]
\item $\bP[X = 1] = \bP[X = -1] = \frac{1}{2}$ :
\[
F(x) = \begin{cases}
0 & x \in (-\infty, -1),\\
\frac{1}{2} & x \in [-1,1),\\
1 & x \in [1, \infty).
\end{cases}
\]
\end{enumerate}
\end{example}