From 000922f9bd2e265e62ead9f745ffb65deaa92782 Mon Sep 17 00:00:00 2001
From: Trance-0 <60459821+Trance-0@users.noreply.github.com>
Date: Sat, 22 Nov 2025 15:18:52 -0600
Subject: [PATCH] bugfix
---
content/CSE5313/CSE5313_L15.md | 2 +
content/CSE5313/CSE5313_L18.md | 12 +--
content/Math4201/Math4201_L35.md | 125 +++++++++++++++++++++++++++++++
content/Math4201/_meta.js | 1 +
4 files changed, 134 insertions(+), 6 deletions(-)
create mode 100644 content/Math4201/Math4201_L35.md
diff --git a/content/CSE5313/CSE5313_L15.md b/content/CSE5313/CSE5313_L15.md
index cf050e2..d3dfb58 100644
--- a/content/CSE5313/CSE5313_L15.md
+++ b/content/CSE5313/CSE5313_L15.md
@@ -140,6 +140,7 @@ $$
\begin{aligned}
H(Y|X=x)&=-\sum_{y\in \mathcal{Y}} \log_2 \frac{1}{Pr(Y=y|X=x)} \\
&=-\sum_{y\in \mathcal{Y}} Pr(Y=y|X=x) \log_2 Pr(Y=y|X=x) \\
+\end{aligned}
$$
The conditional entropy $H(Y|X)$ is defined as:
@@ -150,6 +151,7 @@ H(Y|X)&=\mathbb{E}_{x\sim X}[H(Y|X=x)] \\
&=-\sum_{x\in \mathcal{X}} Pr(X=x)H(Y|X=x) \\
&=-\sum_{x\in \mathcal{X}, y\in \mathcal{Y}} Pr(X=x, Y=y) \log_2 Pr(Y=y|X=x) \\
&=-\sum_{x\in \mathcal{X}, y\in \mathcal{Y}} Pr(x)\sum_{y\in \mathcal{Y}} Pr(Y=y|X=x) \log_2 Pr(Y=y|X=x) \\
+\end{aligned}
$$
Notes:
diff --git a/content/CSE5313/CSE5313_L18.md b/content/CSE5313/CSE5313_L18.md
index f76508a..a7a87b0 100644
--- a/content/CSE5313/CSE5313_L18.md
+++ b/content/CSE5313/CSE5313_L18.md
@@ -196,7 +196,7 @@ $\operatorname{Pr}(s_\mathcal{Z}|m_1, \cdots, m_{t-z}) = \operatorname{Pr}(U_1,
Conclude similarly by the law of total probability.
-$\operatorname{Pr}(s_\mathcal{Z}|m_1, \cdots, m_{t-z}) = \operatorname{Pr}(s_\mathcal{Z}) \implies I(S_\mathcal{Z}; M_1, \cdots, M_{t-z}) = 0.
+$\operatorname{Pr}(s_\mathcal{Z}|m_1, \cdots, m_{t-z}) = \operatorname{Pr}(s_\mathcal{Z}) \implies I(S_\mathcal{Z}; M_1, \cdots, M_{t-z}) = 0$.
### Conditional mutual information
@@ -246,14 +246,14 @@ A: Fix any $\mathcal{T} = \{i_1, \cdots, i_t\} \subseteq [n]$ of size $t$, and l
$$
\begin{aligned}
H(M) &= I(M; S_\mathcal{T}) + H(M|S_\mathcal{T}) \text{(by def. of mutual information)}\\
-&= I(M; S_\mathcal{T}) \text{(since S_\mathcal{T} suffice to decode M)}\\
-&= I(M; S_{i_t}, S_\mathcal{Z}) \text{(since S_\mathcal{T} = S_\mathcal{Z} ∪ S_{i_t})}\\
+&= I(M; S_\mathcal{T}) \text{(since }S_\mathcal{T}\text{ suffice to decode M)}\\
+&= I(M; S_{i_t}, S_\mathcal{Z}) \text{(since }S_\mathcal{T} = S_\mathcal{Z} ∪ S_{i_t})\\
&= I(M; S_{i_t}|S_\mathcal{Z}) + I(M; S_\mathcal{Z}) \text{(chain rule)}\\
-&= I(M; S_{i_t}|S_\mathcal{Z}) \text{(since \mathcal{Z} ≤ z, it reveals nothing about M)}\\
+&= I(M; S_{i_t}|S_\mathcal{Z}) \text{(since }\mathcal{Z}\leq z \text{, it reveals nothing about M)}\\
&= I(S_{i_t}; M|S_\mathcal{Z}) \text{(symmetry of mutual information)}\\
&= H(S_{i_t}|S_\mathcal{Z}) - H(S_{i_t}|M,S_\mathcal{Z}) \text{(def. of conditional mutual information)}\\
-\leq H(S_{i_t}|S_\mathcal{Z}) \text{(entropy is non-negative)}\\
-\leq H(S_{i_t}|S_\mathcal{Z}) \text{(conditioning reduces entropy). \\
+&\leq H(S_{i_t}|S_\mathcal{Z}) \text{(entropy is non-negative)}\\
+&\leq H(S_{i_t}|S_\mathcal{Z}) \text{(conditioning reduces entropy)} \\
\end{aligned}
$$
diff --git a/content/Math4201/Math4201_L35.md b/content/Math4201/Math4201_L35.md
new file mode 100644
index 0000000..8cf210f
--- /dev/null
+++ b/content/Math4201/Math4201_L35.md
@@ -0,0 +1,125 @@
+# Math4201 Topology I (Lecture 35)
+
+## Countability axioms
+
+### Kolmogorov classification
+
+Consider the topological space $X$.
+
+$X$ is $T_0$ means for every pair of points $x,y\in X$, $x\neq y$, there is one of $x$ and $y$ is in an open set $U$ containing $x$ but not $y$.
+
+$X$ is $T_1$ means for every pair of points $x,y\in X$, $x\neq y$, each of them have a open set $U$ and $V$ such that $x\in U$ and $y\in V$ and $x\notin V$ and $y\notin U$. (singleton sets are closed)
+
+$X$ is $T_2$ means for every pair of points $x,y\in X$, $x\neq y$, there exists disjoint open sets $U$ and $V$ such that $x\in U$ and $y\in V$. (Hausdorff)
+
+$X$ is $T_3$ means that $X$ is regular: for any $x\in X$ and any close set $A\subseteq X$ such that $x\notin A$, there are **disjoint open sets** $U,V$ such that $x\in U$ and $A\subseteq V$.
+
+$X$ is $T_4$ means that $X$ is normal: for any disjoint closed sets, $A,B\subseteq X$, there are **disjoint open sets** $U,V$ such that $A\subseteq U$ and $B\subseteq V$.
+
+
+Example
+
+Let $\mathbb{R}_{\ell}$ with lower limit topology.
+
+$\mathbb{R}_{\ell}$ is normal since for any disjoint closed sets, $A,B\subseteq \mathbb{R}_{\ell}$, $x\in A$ and $B$ is closed and doesn't contain $x$. Then there exists $\epsilon_x>0$ such that $[x,x+\epsilon_x)\subseteq A$ and does not intersect $B$.
+
+Therefore, there exists $\delta_y>0$ such that $[y,y+\delta_y)\subseteq B$ and does not intersect $A$.
+
+Let $U=\bigcup_{x\in A}[x,x+\epsilon_x)$ is open and contains $A$.
+
+$V=\bigcup_{y\in B}[y,y+\delta_y)$ is open and contains $B$.
+
+We show that $U$ and $V$ are disjoint.
+
+If $U\cap V\neq \emptyset$, then there exists $x\in A$ and $Y\in B$ such that $[x,x+\epsilon_x)\cap [y,y+\delta_y)\neq \emptyset$.
+
+This is a contradiction since $[x,x+\epsilon_x)\subseteq A$ and $[y,y+\delta_y)\subseteq B$.
+
+
+
+#### Theorem Every metric space is normal
+
+Use the similar proof above.
+
+
+Proof
+
+Let $A,B\subseteq X$ be closed.
+
+Since $B$ is closed, for any $x\in A$, there exists $\epsilon_x>0$ such that $B_{\epsilon_x}(x)\subseteq B$.
+
+Since $A$ is closed, for any $y\in B$, there exists $\delta_y>0$ such that $A_{\delta_y}(y)\subseteq A$.
+
+Let $U=\bigcup_{x\in A}B_{\epsilon_x/2}(x)$ and $V=\bigcup_{y\in B}B_{\delta_y/2}(y)$.
+
+We show that $U$ and $V$ are disjoint.
+
+If $U\cap V\neq \emptyset$, then there exists $x\in A$ and $Y\in B$ such that $B_{\epsilon_x/2}(x)\cap B_{\delta_y/2}(y)\neq \emptyset$.
+
+Consider $z\in B_{\epsilon_x/2}(x)\cap B_{\delta_y/2}(y)$. Then $d(x,z)<\epsilon_x/2$ and $d(y,z)<\delta_y/2$. Therefore $d(x,y)\leq d(x,z)+d(z,y)<\epsilon_x/2+\delta_y/2$.
+
+If $\delta_y<\epsilon_x$, then $d(x,y)<\delta_y/2+\delta_y/2=\delta_y$. Therefore $x\in B_{\delta_y}(y)\subseteq A$. This is a contradiction since $U\cap B=\emptyset$.
+
+If $\epsilon_x<\delta_y$, then $d(x,y)<\epsilon_x/2+\epsilon_x/2=\epsilon_x$. Therefore $y\in B_{\epsilon_x}(x)\subseteq B$. This is a contradiction since $V\cap A=\emptyset$.
+
+Therefore, $U$ and $V$ are disjoint.
+
+
+
+#### Lemma fo regular topological space
+
+$X$ is regular topological space if and only if for any $x\in X$ and any open neighborhood $U$ of $x$, there is open neighborhood $V$ of $x$ such that $\overline{V}\subseteq U$.
+
+#### Lemma of normal topological space
+
+$X$ is a normal topological space if and only if for any $A\subseteq X$ closed and any open neighborhood $U$ of $A$, there is open neighborhood $V$ of $A$ such that $\overline{V}\subseteq U$.
+
+
+Proof
+
+$\implies$
+
+Let $A$ and $U$ are given as in the statement.
+
+So $A$ and $(X-U)$ are disjoint closed.
+
+Since $X$ is normal and $A\subseteq V\subseteq X$ and $V\cap W=\emptyset$. $X-U\subseteq W\subseteq X$. where $W$ is open in $X$.
+
+And $\overline{V}\subseteq (X-W)\subseteq U$.
+
+And $A\subseteq V$.
+
+The proof of reverse direction is similar.
+
+Let $A,B$ be disjoint and closed.
+
+Then $A\subseteq U\coloneqq X-B\subseteq X$ and $X-B$ is open in $X$.
+
+Apply the assumption to find $A\subseteq V\subseteq X$ and $V$ is open in $X$ and $\overline{V}\subseteq U\coloneqq X-B$.
+
+
+
+#### Proposition of regular and Hausdorff on subspaces
+
+1. If $X$ is a regular topological space, and $Y$ is a subspace. Then $Y$ with induced topology is regular. (same holds for Hausdorff)
+2. If $\{X_\alpha\}$ is a collection of regular topological spaces, then their product with the product topology is regular. (same holds for Hausdorff)
+
+> [!CAUTION]
+>
+> The above does not hold for normal.
+
+Recall that $\mathbb{R}_{\ell}$ with lower limit topology is normal. But $\mathbb{R}_{\ell}\times \mathbb{R}_{\ell}$ with product topology is not normal. (In problem set 11)
+
+This shows that $\mathbb{R}_{\ell}$ is not metrizable. Otherwise $\mathbb{R}_{\ell}\times \mathbb{R}_{\ell}$ would be metrizable. Which could implies that $\mathbb{R}_{\ell}$ is normal.
+
+#### Theorem of metrizability
+
+If $X$ is normal and second countable, then $X$ is metrizable.
+
+> [!NOTE]
+>
+> - Every metrizable topological space is normal.
+> - Every metrizable space is first countable.
+> - But there are some metrizable space that is not second countable.
+>
+> Note that if $X$ is normal and first countable, then it is not necessarily metrizable. (Example $\mathbb{R}_{\ell}$)
\ No newline at end of file
diff --git a/content/Math4201/_meta.js b/content/Math4201/_meta.js
index d874e5e..764d860 100644
--- a/content/Math4201/_meta.js
+++ b/content/Math4201/_meta.js
@@ -38,4 +38,5 @@ export default {
Math4201_L32: "Topology I (Lecture 32)",
Math4201_L33: "Topology I (Lecture 33)",
Math4201_L34: "Topology I (Lecture 34)",
+ Math4201_L35: "Topology I (Lecture 35)",
}