diff --git a/inputs/lecture_15.tex b/inputs/lecture_15.tex
index a3b53b8..4ba12b1 100644
--- a/inputs/lecture_15.tex
+++ b/inputs/lecture_15.tex
@@ -219,22 +219,28 @@ Assume $Y = \One_B$, then $Y$ simple, then take the limit (using that $Y$ is bou
 \begin{definition}
     Let $\cG$ and $\cH$ be $\sigma$-algebras.
     We call $\cG$ and $\cH$ \vocab[$\sigma$-algebra!independent]{independent},
-    if % TODO
+    \todo{TODO}
 \end{definition}
 
 \begin{theorem}[Role of independence]
     \label{ceprop12}
-    \label{roleofindependence}
-    If $\cH$ is a sub-$\sigma$-algebra of $\cF$ and  $\cH$ is independent
-    of $\sigma(\sigma(X), \cG)$, then
-     \[
-         \bE[X | \sigma(\cG, \cH)] \overset{\text{a.s.}}{=} \bE[X | \cG].
-    \]
+    \label{ceroleofindependence}
+    Let $X$ be a random variable,
+    and let $\cG, \cH$ be $\sigma$-algebras.
+
+    If $\cH$ is independent of $\sigma\left( \sigma(X), \cG \right)$,
+    then
+    \[
+    \bE[X | \sigma(\cG, \cH)] \overset{\text{a.s.}}{=} \bE[X | \cG].
+    \] 
+
+    In particular, if $X$ is independent of $\cG$,
+    then
+    \[
+    \bE[X | \cG] \overset{\text{a.s.}}{=} \bE[X].
+    \] 
 \end{theorem}
-\begin{example}
-    If $X$ is independent of $\cG$,
-    then $\bE[X | \cG] \overset{\text{a.s.}}{=} \bE[X]$.
-\end{example}
+
 \begin{example}[Martingale property of the simple random walk]
     Suppose $X_1,X_2,\ldots$ are i.i.d.~with $\bP[X_i = 1] = \bP[X_i = -1] = \frac{1}{2}$.
     Let $S_n \coloneqq  \sum_{i=1}^n X_i$ be the \vocab{simple random walk}.
diff --git a/inputs/lecture_16.tex b/inputs/lecture_16.tex
index e78e929..91182b0 100644
--- a/inputs/lecture_16.tex
+++ b/inputs/lecture_16.tex
@@ -1,29 +1,8 @@
 \lecture{16}{2023-06-13}{}
 
-\subsection{Conditional expectation}
+% \subsection{Conditional expectation}
 
-\begin{theorem}
-    \label{ceprop11}
-    \label{ceroleofindependence}
-    Let $X$ be a random variable,
-    and let $\cG, \cH$ be $\sigma$-algebras.
-
-    If $\cH$ is independent of $\sigma\left( \sigma(X), \cG \right)$,
-    then
-    \[
-    \bE[X | \sigma(\cG, \cH)] \overset{\text{a.s.}}{=} \bE[X | \cG].
-    \] 
-
-    In particular, if $X$ is independent of $\cG$,
-    then
-    \[
-    \bE[X | \cG] \overset{\text{a.s.}}{=} \bE[X].
-    \] 
-\end{theorem}
-
-\todo{Definition of independence wrt a $\sigma$-algebra}
-
-\begin{proof}
+\begin{refproof}{ceroleofindependence}
     Let $\cH$ be independent of $\sigma(\sigma(X), \cG)$.
     Then for all $H \in \cH$, we have that $\One_H$
     and any random variable measurable with respect to either $\sigma(X)$ 
@@ -50,7 +29,7 @@
     The claim of the theorem follows by the uniqueness of conditional expectation.
 
     To deduce the second statement, choose $\cG = \{\emptyset, \Omega\}$.
-\end{proof}
+\end{refproof}
 
 
 \subsection{The Radon Nikodym theorem}
diff --git a/inputs/lecture_17.tex b/inputs/lecture_17.tex
index 164472e..ae63e06 100644
--- a/inputs/lecture_17.tex
+++ b/inputs/lecture_17.tex
@@ -1,7 +1,9 @@
 \lecture{17}{2023-06-15}{}
 
 \begin{definition}[Stochastic process]
-    % TODO
+    A \vocab{stochastic process} is a collection of random
+    variables $(X_t)_{t \in T}$ for some index set $T$.
+    In this lecture we will consider the case $T = \N$.
 \end{definition}
 
 \begin{goal}
diff --git a/inputs/lecture_19.tex b/inputs/lecture_19.tex
index 6766371..505d65e 100644
--- a/inputs/lecture_19.tex
+++ b/inputs/lecture_19.tex
@@ -61,7 +61,7 @@ However, some subsets can be easily described, e.g.
 
 \begin{fact}\label{lec19f2}
     If $(X_n)_n$ is uniformly integrable,
-    then $(X_n)_n$ is bounded in $L^1$.k:w
+    then $(X_n)_n$ is bounded in $L^1$.
 \end{fact}
 
 \begin{fact}\label{lec19f3}
@@ -223,7 +223,7 @@ Let $(\Omega, \cF, \bP)$ as always and let $(\cF_n)_n$ always be a filtration.
     to $X$ in $L^p$.
 \end{theorem}
 \begin{proof}
-    
+    \todo{TODO}
 \end{proof}
 
 \begin{theorem}
@@ -231,7 +231,6 @@ Let $(\Omega, \cF, \bP)$ as always and let $(\cF_n)_n$ always be a filtration.
     Let $(X_n)_n$ be a martingale bounded in $L^p$.
     Then there exists a random variable $X \in  L^p$, such that
     $X_n = \bE[X | \cF_n]$ for all $n$.
-    
 \end{theorem}