diff --git a/blueprint/src/chapter/clt.tex b/blueprint/src/chapter/clt.tex index 74a04f8..d560583 100644 --- a/blueprint/src/chapter/clt.tex +++ b/blueprint/src/chapter/clt.tex @@ -1,31 +1,6 @@ \chapter{The central limit theorem} -\begin{lemma}[Concavity]\label{concave} - \lean{Real.strictConcaveOn_negMulLog}\leanok - $h$ is strictly concave on $[0,1]$. -\end{lemma} - -\begin{proof} \leanok Check that $h'$ is strictly monotone decreasing. -\end{proof} - -\begin{lemma}[Jensen]\label{jensen} - \lean{Real.sum_negMulLog_le} \leanok - If $S$ is a finite set, and $\sum_{s \in S} w_s = 1$ for some non-negative $w_s$, and $p_s \in [0,1]$ for all $s \in S$, then - $$ \sum_{s \in S} w_s h(p_s) \leq h(\sum_{s \in S} w_s p_s).$$ -\end{lemma} - -\begin{proof} \uses{concave}\leanok Apply Jensen and Lemma \ref{concave}. -\end{proof} - -\begin{lemma}[Converse Jensen]\label{converse-jensen} - \lean{Real.sum_negMulLog_eq}\leanok -If equality holds in the above lemma, then $p_s = \sum_{s \in S} w_s h(p_s)$ whenever $w_s \neq 0$. -\end{lemma} - -\begin{proof} \uses{concave}\leanok Need some converse form of Jensen, not sure if it is already in Mathlib. May also wish to state it as an if and only if. -\end{proof} - \begin{theorem}[Central limit theorem]\label{clt} Let $\xi_1, \xi_2, \ldots$ be i.i.d. random variables with mean 0 and variance 1, and let $\zeta$ be a random variable with law $\mathcal N(0,1)$. Then \begin{align*} diff --git a/blueprint/src/chapter/mathlib_defs.tex b/blueprint/src/chapter/mathlib_defs.tex index a88ad54..102c6af 100644 --- a/blueprint/src/chapter/mathlib_defs.tex +++ b/blueprint/src/chapter/mathlib_defs.tex @@ -2,9 +2,20 @@ \chapter{Useful definitions from Mathlib} \begin{definition}[Convergence in probability]\label{def:cvg_probability} \lean{MeasureTheory.TendstoInMeasure} \leanok -We say that $(\xi_i)_{i \in \mathbb{N}}$ tends to $\xi$ in probability (or in measure) if for all $\varepsilon > 0$ +We say that $(X_i)_{i \in \mathbb{N}}$ tends to $X$ in probability (or in measure) in a space with distance function $d$ if for all $\varepsilon > 0$ \begin{align*} -\mathbb{P}(d(\xi_n, \xi) \ge \varepsilon) \to_{n \to +\infty} 0 +\mathbb{P}(d(X_n, X) \ge \varepsilon) \to_{n \to +\infty} 0 \end{align*} +\end{definition} +\begin{definition}[Weak convergence of probability measures]\label{def:weak_cvg_measure} +\lean{MeasureTheory.ProbabilityMeasure.tendsto_iff_forall_integral_tendsto} \leanok +In a topological space $S$ in which the opens are measurable, we say that a sequence of probability measures $\mu_n$ converges weakly to a probability measure $\mu$ and write $\mu_n \xrightarrow{w} \mu$ if $\mu_n[f] \to \mu[f]$ for every bounded continuous function $f : S \to \mathbb{R}$. +\end{definition} + +We write $\mathcal L(X)$ for the law of a random variable $X : \Omega \to S$. This is a measure on $S$, defined as the map of the measure on $\Omega$ by $X$. + +\begin{definition}[Convergence in distribution]\label{def:cvg_distribution} +\uses{def:weak_cvg_measure} +We say that $X_n$ converges in distribution to $X$ and write $X_n \xrightarrow{d} X$ if $\mathcal L(X_n) \xrightarrow{w} \mathcal L(X)$. \end{definition} \ No newline at end of file