Skip to content

Commit

Permalink
Fixup some more stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
Zentrik committed May 14, 2024
1 parent 68199d5 commit 8adda98
Show file tree
Hide file tree
Showing 5 changed files with 16 additions and 9 deletions.
2 changes: 1 addition & 1 deletion CodingAndCryptography/01_noiseless_coding.tex
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ \subsection{Gibbs' inequality}
As the $p_i$ form a probability distribution, $\sum_{i \in I} p_i = 1$ and $\sum_{i \in I} q_i \leq 1$, so the right hand side is at most 0.
Therefore,
\begin{align*}
-\sum_{i=1}^n p_i \ln p_i = -\sum_{i \in I} p_i \ln p_i \leq -\sum_{i \in I} p_i \ln q_i \leq -\sum_{i=1}^n p_i \ln q_i
-\sum_{i=1}^n p_i \ln p_i = -\sum_{i \in I} p_i \ln p_i \leq -\sum_{i \in I} p_i \ln q_i = -\sum_{i=1}^n p_i \ln q_i
\end{align*}
If equality holds, we must have $\sum_{i \in I} q_i = 1$ and $\frac{q_i}{p_i} = 1$ for all $i \in I$, giving that $p_i = q_i$ for all $i$.
\end{proof}
Expand Down
23 changes: 15 additions & 8 deletions CodingAndCryptography/03_information_theory.tex
Original file line number Diff line number Diff line change
Expand Up @@ -164,20 +164,27 @@ \subsection{Shannon's first coding theorem}
\end{theorem}

\begin{proof}
% Let $\varepsilon > \frac{1}{n}$, for each $\epsilon_n$ $\exists n_0(\epsilon_n)$ s.t. $\exists$ typical sets $T_n \; \forall n \geq n_0(\epsilon_0)$.
% Let $T_k \subseteq \mathcal A^n$ be $\epsilon_n$ typical sets where $n_0(\epsilon_n) \leq k \leq n_0(\epsilon_{n+1})$.
% Define $n_k = \min \qty{n : T_k}$
% Then, $\forall \; n \geq n_0(\varepsilon)$, $\forall \; (x_1, \dots, x_n) \in T_n$ we have $p(x_1, \dots, x_n) \geq 2^{-n(H + \varepsilon)}$.
% Therefore, $1 \geq \prob{T_n} \geq \abs{T_n} 2^{-n(H + \varepsilon)}$, giving $\frac{1}{n} \log \abs{T_n} \leq H + \varepsilon$.
% Taking $A_n = T_n$ in the defn of reliable encoding shows that the source is reliably encodable at rate $H + \varepsilon$.

Let $\varepsilon > 0$, and let $T_n \subseteq \mathcal A^n$ be typical sets.
Then, $\forall \; n \geq n_0(\varepsilon)$, $\forall \; (x_1, \dots, x_n) \in T_n$ we have $p(x_1, \dots, x_n) \geq 2^{-n(H + \varepsilon)}$.
Therefore, $1 \geq \prob{T_n} \geq \abs{T_n} 2^{-n(H + \varepsilon)}$, giving $\frac{1}{n} \log \abs{T_n} \leq H + \varepsilon$.
Taking $A_n = T_n$ in the defn of reliable encoding shows that the source is reliably encodable at rate $H + \varepsilon$.

Conversely, if $H = 0$ the proof concludes, so we may assume $H > 0$.
If $H = 0$ the proof concludes, so we may assume $H > 0$.
Let $0 < \varepsilon < \frac{H}{2}$, and suppose that the source is reliably encodable at rate $H - 2\varepsilon$ with sets $A_n \subseteq \mathcal A^n$.
Let $T_n \subseteq \mathcal A^n$ be typical sets.
Then, $\forall \; (x_1, \dots, x_n) \in T_n$, $p(x_1, \dots, x_n) \leq 2^{-n(H - \varepsilon)}$, so $\prob{A_n \cap T_n} \leq 2^{-n(H - \varepsilon)} \abs{A_n}$, giving
\begin{align*}
\frac{1}{n} \log \prob{A_n \cap T_n} \leq -(H - \varepsilon) + \frac{1}{n} \log \abs{A_n} \to -(H - \varepsilon) + (H - 2 \varepsilon) = -\varepsilon
\end{align*}
Then, $\log \prob{A_n \cap T_n} \to -\infty$, so $\prob{A_n \cap T_n} \to 0$.
But $\prob{T_n} \leq \prob{A_n \cap T_n} + \prob{\mathcal A^n \setminus A_n} \to 0 + 0$, contradicting typicality.
Then, $\log \prob{A_n \cap T_n} \to -\infty$ as $n \to \infty$, so $\prob{A_n \cap T_n} \to 0$.
But $\prob{T_n} \to 1$ and $\prob{A_n} \to 1$ as $n \to \infty$ \Lightning\footnote{$\prob{T_n} \leq \prob{A_n \cap T_n} + \prob{\mathcal A^n \setminus A_n} \to 0 + 0$ as $n \to \infty$, contradicting typicality.}.
So we cannot reliably encode at rate $H - 2\varepsilon$, so the information rate is at least $H$.
\end{proof}

Expand Down Expand Up @@ -228,11 +235,11 @@ \subsection{Capacity}

\begin{proof}
Let $\delta$ be s.t. $2p < \delta < \frac{1}{2}$.
We claim that we can reliably transmit at rate $R = 1 - H(\delta) > 0$.
Let $C_n$ be a code of length $n$, and suppose it has minimum distance $\floor*{n\delta}$ of maximal size.
We claim that we can reliably transmit at rate $R = 1 - H(\delta)\footnote{$-\delta \log \delta$} > 0$.
Let $C_n$ be a code of length $n$, and suppose it has minimum distance $\floor*{n\delta}$ and it's of maximal size.
Then, by the GSV bound,
\begin{align*}
\abs{C_n} = A(n, \floor*{n\delta}) \geq 2^{-n(1-H(\delta))} = 2^{nR}
\abs{C_n} = A(n, \floor*{n\delta}) \geq \frac{2^n}{V(n, \floor*{n\delta} - 1)} \geq \frac{2^n}{2^{-n\delta \log \delta\footnote{$-\log \delta > 1$.}}} = 2^{n(1 - H(\delta))} = 2^{nR}
\end{align*}
Replacing $C_n$ with a subcode if necessary, we can assume $\abs{C_n} = \floor*{2^{nR}}$, with minimum distance at least $\floor*{n\delta}$.
Using minimum distance decoding,
Expand Down Expand Up @@ -285,7 +292,7 @@ \subsection{Conditional entropy}
\end{definition}

\begin{lemma}
$H(X,Y) = H(X \mid Y) - H(Y)$.
$H(X,Y) = H(X \mid Y) + H(Y)$.
\end{lemma}

\begin{proof}
Expand All @@ -312,7 +319,7 @@ \subsection{Conditional entropy}
\end{corollary}

\begin{proof}
Combine this result with the fact that $H(X,Y) \leq H(X) + H(Y)$ where equality holds iff $H(X), H(Y)$ are independent.
Combine the previous result with the fact that $H(X,Y) \leq H(X) + H(Y)$ where equality holds iff $H(X), H(Y)$ are independent.
\end{proof}

Now, replace r.v.s $X$ and $Y$ with random vectors $X^{(r)} = (X_1, \dots, X_r)$ and $Y^{(s)} = (Y_1, \dots, Y_s)$.
Expand Down
Binary file modified CodingAndCryptography/cc.pdf
Binary file not shown.
Binary file modified QuantumInfoAndComputing/qic.pdf
Binary file not shown.
Binary file added QuantumInfoAndComputing/qic.synctex(busy)
Binary file not shown.

0 comments on commit 8adda98

Please sign in to comment.