diff --git "a/wiki/proofwiki/shard_14.txt" "b/wiki/proofwiki/shard_14.txt"
new file mode 100644--- /dev/null
+++ "b/wiki/proofwiki/shard_14.txt"
@@ -0,0 +1,11286 @@
+\section{Rule of Transposition/Variant 1/Formulation 2/Forward Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\vdash \left({p \implies \neg q}\right) \implies \left({q \implies \neg p}\right)$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 2/Forward Implication/Proof}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\vdash \left({\neg p \implies q}\right) \implies \left({\neg q \implies p}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({\neg p \implies q}\right) \implies \left({\neg q \implies p}\right)}}
+{{Assumption|1|\neg p \implies q}}
+{{Assumption|2|\neg q}}
+{{ModusTollens|3|1, 2|\neg \neg p|1|2}}
+{{DoubleNegElimination|4|1, 2|p|3}}
+{{Implication|5|1|\neg q \implies p|2|4}}
+{{Implication|6||\left({\neg p \implies q}\right) \implies \left({\neg q \implies p}\right)|1|5}}
+{{EndTableau}}
+{{Qed}}
+{{LEM|Double Negation Elimination|4}}
+\end{proof}<|endoftext|>
+\section{Boundary of Polygon is Jordan Curve}
+Tags: Topology
+
+\begin{theorem}
+Let $P$ be a [[Definition:Polygon|polygon]] embedded in $\R^2$.
+Then there exists a [[Definition:Jordan Curve|Jordan curve]] $\gamma: \closedint 0 1 \to \R^2$ such that the [[Definition:Image of Mapping|image]] of $\gamma$ is equal to the [[Definition:Boundary (Geometry)|boundary]] $\partial P$ of $P$.
+\end{theorem}
+
+\begin{proof}
+The [[Definition:Polygon|polygon]] $P$ has $n$ [[Definition:Side of Polygon|sides]], where $n \in \N$.
+Denote the [[Definition:Vertex of Polygon|vertices]] of $P$ as $A_1, \ldots, A_n$ and its sides as $S_1, \ldots, S_n$, such that each [[Definition:Vertex of Polygon|vertex]] $A_i$ has [[Definition:Adjacent Side to Vertex|adjacent sides]] $S_{i - 1}$ and $S_i$.
+We use the conventions that $S_0 = S_n$, and $A_{n + 1} = A_1$.
+As each side $S_i$ is a [[Definition:Convex Set (Vector Space)/Line Segment|line segment]] joining $A_i$ and $A_{i + 1}$, when we define the [[Definition:Path (Topology)|path]] $\gamma_i: \closedint 0 1 \to \R^2$ by:
+:$\map {\gamma_i} t = \paren {1 - t} A_i + t A_{i + 1}$
+the [[Definition:Image of Mapping|image]] of $\gamma_i$ is equal to the [[Definition:Side of Polygon|side]] $S_i$.
+Define $\gamma: \closedint 0 1 \to \R^2$ as the [[Definition:Concatenation (Topology)|concatenation]] $\paren {\cdots \paren {\paren {\gamma_1 * \gamma_2} * \gamma_3} * \ldots * \gamma_{n - 1} } * \gamma_n$.
+Then each [[Definition:Point|point]] in the [[Definition:Image of Mapping|image]] of $\gamma$ corresponds to a point in a [[Definition:Side of Polygon|side]] of $P$.
+As $\map \gamma 0 = A_1 = \map \gamma 1$ by our definition of $A_{n + 1}$, it follows that $\gamma$ is a [[Definition:Closed Path (Topology)|closed path]].
+It follows from the [[Definition:Polygon|definition of polygon]] that the [[Definition:Side of Polygon|sides]] of $P$ do not [[Definition:Intersection (Geometry)|intersect]], except at the [[Definition:Vertex of Polygon|vertices]].
+For $i \ne 1$, each [[Definition:Vertex of Polygon|vertex]] $A_i$ is the [[Definition:Initial Point of Path|initial point]] of $\gamma_i$ and the [[Definition:Final Point of Path|final point]] of $\gamma_{i - 1}$, and is equal to exactly one [[Definition:Point|point]] $\map \gamma {2^{-n - 1 + i} }$ in the [[Definition:Image of Mapping|image]] of $\gamma$.
+Then we have that $\gamma$ [[Definition:Restriction of Mapping|restricted]] to $\hointr 0 1$ is [[Definition:Injection|injective]].
+Hence $\gamma$ is a [[Definition:Jordan Curve|Jordan curve]].
+{{qed}}
+[[Category:Topology]]
+7lfmlxoo0byynfdg2uvcwlcosct733d
+\end{proof}<|endoftext|>
+\section{Rule of Explosion/Variant 1}
+Tags: Rule of Explosion
+
+\begin{theorem}
+:$\vdash p \implies \paren {\neg p \implies q}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash p \implies \paren {\neg p \implies q} }}
+{{Assumption|1|p}}
+{{Assumption|2|\neg p}}
+{{NonContradiction|3|1, 2|1|2}}
+{{Explosion|4|1, 2|q|3}}
+{{Implication|5|1|\neg p \implies q|2|4}}
+{{Implication|6||p \implies \paren {\neg p \implies q}|1|5}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Negation of Conditional implies Antecedent}
+Tags: Implication, Negation
+
+\begin{theorem}
+:$\vdash \neg \paren {p \implies q} \implies p$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \neg \paren {p \implies q} \implies p}}
+{{Assumption|1|\neg \paren {p \implies q} }}
+{{SequentIntro|2|1|p \land \neg q|1|[[Conjunction with Negative Equivalent to Negation of Implication]]}}
+{{Simplification|3|1|p|2|1}}
+{{Implication|4||\neg \paren {p \implies q} \implies p|1|3}}
+{{EndTableau}}
+{{Qed}}
+\end{proof}<|endoftext|>
+\section{Negation of Conditional implies Negation of Consequent}
+Tags: Implication, Negation
+
+\begin{theorem}
+:$\vdash \neg \left({p \implies q}\right) \implies \neg q$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \neg \left({p \implies q}\right) \implies \neg q}}
+{{Assumption|1|\neg \left({p \implies q}\right)}}
+{{SequentIntro|2|1|p \land \neg q|1|[[Conjunction with Negative Equivalent to Negation of Implication]]}}
+{{Simplification|3|1|\neg q|2|2}}
+{{Implication|4||\neg \left({p \implies q}\right) \implies \neg q|1|3}}
+{{EndTableau}}
+{{Qed}}
+\end{proof}<|endoftext|>
+\section{Matrix Multiplication Interpretation of Relation Composition}
+Tags: Relation Theory
+
+\begin{theorem}
+Let $A$, $B$ and $C$ be [[Definition:Finite Set|finite]] [[Definition:Non-Empty Set|non-empty]] [[Definition:Set|sets]] that are [[Definition:Initial Segment|initial segments]] of $\N_{\ne 0}$.
+Let $\mathcal R \subseteq B \times A$ and $\mathcal S \subseteq C \times B$ be [[Definition:Relation|relations]].
+Let $\mathbf R$ and $\mathbf S$ be [[Definition:Matrix|matrices]] which we define as follows:
+:$\left[{r}\right]_{i j} = \begin{cases}
+T & : (i, j) \in \mathcal R \\
+F & : (i, j) \notin \mathcal R\\
+\end{cases}$
+:$\left[{s}\right]_{i j} = \begin{cases}
+T & : (i, j) \in \mathcal S \\
+F & : (i, j) \notin \mathcal S\\
+\end{cases}$
+Then we can interpret the [[Definition:Matrix Product (Conventional)|matrix product]] $\mathbf R \mathbf S$ as the [[Definition:Composition of Relations|composition]] $\mathcal S \circ \mathcal R$.
+To do so we temporarily consider $\left({\left\{ {T, F}\right\}, \land, \lor}\right)$ to be our "[[Definition:Ring (Abstract Algebra)|ring]]" on which we are basing [[Definition:Matrix Product (Conventional)|matrix multiplication]].
+Then:
+:$\left[{r s}\right]_{i j} = T \iff (i, j) \in \mathcal S \circ \mathcal R$
+\end{theorem}
+
+\begin{proof}
+=== Sufficient Condition ===
+Suppose for some $i, j$:
+:$\left[{r s}\right]_{i j} = T$
+Then by the definition of $\lor$ there must exist some $k$ for which:
+:$\left[{r}\right]_{i k} \land \left[{s}\right]_{k j} = T$
+which by our definition implies:
+:$\left({i, k}\right) \in \mathcal R$
+:$\left({k, j}\right) \in \mathcal S$
+Then by the definition of a [[Definition:Composite Relation|composite relation]]:
+$(\left({i, j}\right) \in \mathcal R \circ \mathcal S$
+=== Necessary Condition ===
+Suppose for some $i, j$:
+:$\left({i, j}\right) \in \mathcal S \circ \mathcal R$
+Then there exists a $k$ for which:
+:$\left({i, k}\right) \in \mathcal R$
+:$\left({k, j}\right) \in \mathcal S$
+Hence:
+:$\left[{r}\right]_{i k} = T$
+:$\left[{s}\right]_{k j} = T$
+and there exists some $k$ for which:
+:$\left[{r}\right]_{i k} \land \left[{s}\right]_{k j} = T$
+Hence by the definition of $\lor$:
+:$\left[{r s}\right]_{i j} = T$
+{{qed}}
+[[Category:Relation Theory]]
+7uk4uhou0t7d40rr3ek2kiswutnihi1
+\end{proof}<|endoftext|>
+\section{Praeclarum Theorema/Formulation 1/Proof 1}
+Tags: Praeclarum Theorema
+
+\begin{theorem}
+: $\left({p \implies q}\right) \land \left({r \implies s}\right) \vdash \left({p \land r}\right) \implies \left({q \land s}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({p \implies q}\right) \land \left({r \implies s}\right) \vdash \left({p \land r}\right) \implies \left({q \land s}\right)}}
+{{Premise|1|\left({p \implies q}\right) \land \left({r \implies s}\right)}}
+{{Simplification|2|1|p \implies q|1|1}}
+{{Simplification|3|1|r \implies s|1|2}}
+{{Assumption|4|p \land r}}
+{{Simplification|5|4|p|4|1}}
+{{Simplification|6|4|r|4|2}}
+{{ModusPonens|7|1, 4|q|2|5}}
+{{ModusPonens|8|1, 4|s|3|6}}
+{{Conjunction|9|1, 4|q \land s|7|8|}}
+{{Implication|10|1|\left({p \land r}\right) \implies \left({q \land s}\right)|4|9}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Compactness from Basis}
+Tags: Compact Spaces
+
+\begin{theorem}
+Let $\struct {X, \tau}$ be a [[Definition:Topological Space|topological space]].
+Let $B$ be a [[Definition:Basis (Topology)|basis]] for $\tau$.
+Then the following propositions are equivalent:
+{{begin-axiom}}
+{{axiom | n = 1
+ | t = $\struct {X, \tau}$ is compact.
+}}
+{{axiom | n = 2
+ | t = Every open cover of $X$ by elements of $B$ has a finite subcover.
+}}
+{{end-axiom}}
+\end{theorem}
+
+\begin{proof}
+=== $(1)$ implies $(2)$ ===
+This follows immediately from the definition of compactness.
+{{qed|lemma}}
+=== $(2)$ implies $(1)$ ===
+Suppose that $(2)$ holds.
+Let $\AA$ be an open cover of $X$.
+Let $f: \AA \to \powerset B$ map each element of $\AA$ to the set of all elements of $B$ it contains.
+Since each element of $\AA$ is open, $A = \bigcup f(A)$ for each $A \in \AA$.
+Let $\AA' = \bigcup \map f \AA$.
+Then $\AA'$ is an open cover of $X$ consisting solely of elements of $B$.
+By the premise, $\AA'$ has a finite subset $\FF'$ that covers $X$.
+Let $g: \FF' \to \AA$ map each element of $\FF'$ to an element of $\AA$ containing it.
+Note that since $\FF'$ is finite, this does not require the [[Axiom:Axiom of Choice|Axiom of Choice]].
+Let $\FF = \map g {\FF'}$.
+Then $\FF$ is a finite subcover of $\AA$.
+Since every open cover of $X$ has a finite subcover, $X$ is compact.
+{{qed}}
+[[Category:Compact Spaces]]
+a377rwn6vjz6p4kspxbkb0a6naooxx1
+\end{proof}<|endoftext|>
+\section{Set of Subsets is Cover iff Set of Complements is Free}
+Tags: Set Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\mathcal C$ be a [[Definition:Set of Sets|set of sets]].
+Then $\mathcal C$ is a [[Definition:Cover of Set|cover for $S$]] {{iff}} $\set {\relcomp S X: X \in \mathcal C}$ is [[Definition:Free Set of Sets|free]].
+\end{theorem}
+
+\begin{proof}
+Let $S$ be a [[Definition:Set|set]].
+=== Necessary Condition ===
+Let $\mathcal C$ be a [[Definition:Cover of Set|cover for $S$]]:
+:$S \subseteq \bigcup \mathcal C$
+Suppose:
+:$\set {\relcomp S X: X \in \mathcal C}$
+is not [[Definition:Free Set of Sets|free]].
+Then there exists an $x \in S$ such that:
+:$\forall X \in \mathcal C: x \in \relcomp S X$
+But by the definition of [[Definition:Relative Complement|(relative) complement]]:
+:$\forall X \in \mathcal C: x \notin X$
+That is:
+:$x \notin \bigcup \mathcal C$
+which contradicts:
+:$S \subseteq \bigcup \mathcal C$
+{{qed|lemma}}
+=== Sufficient Condition ===
+Let:
+:$\set {\relcomp S X: X \in \mathcal C}$
+be [[Definition:Free Set of Sets|free]].
+Suppose:
+:$\bigcup \mathcal C$
+is not a [[Definition:Cover of Set|cover for $S$]].
+Then there exists some $x \in S$ such that:
+:$\forall X \in \mathcal C: x \notin X$
+Hence:
+:$\forall \relcomp S X \in \mathcal C: x \in X$
+which contradicts the supposition that:
+:$\set {\relcomp S X: X \in \mathcal C}$
+is [[Definition:Free Set of Sets|free]].
+{{qed}}
+[[Category:Set Theory]]
+j3kwwh8bhy821r0podg8mwieey7rh2c
+\end{proof}<|endoftext|>
+\section{Implication Equivalent to Negation of Conjunction with Negative/Formulation 2/Reverse Implication}
+Tags: Implication Equivalent to Negation of Conjunction with Negative
+
+\begin{theorem}
+:$\vdash \left({\neg \left({p \land \neg q}\right)}\right) \implies \left({p \implies q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({\neg \left({p \land \neg q}\right)}\right) \implies \left({p \implies q}\right)}}
+{{Assumption|1|\neg \left({p \land \neg q}\right)}}
+{{SequentIntro|2|1|p \implies q|1|[[Implication Equivalent to Negation of Conjunction with Negative/Formulation 1/Reverse Implication|Implication Equivalent to Negation of Conjunction with Negative: Formulation 1]]}}
+{{Implication|3||\left({\neg \left({p \land \neg q}\right)}\right) \implies \left({p \implies q}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+{{LEM|Implication Equivalent to Negation of Conjunction with Negative/Formulation 1/Reverse Implication}}
+[[Category:Implication Equivalent to Negation of Conjunction with Negative]]
+estsbm4ogmgfblagpg6qq8budnw25nl
+\end{proof}<|endoftext|>
+\section{Biconditional Elimination}
+Tags: Biconditional, Biconditional Elimination, Implication
+
+\begin{theorem}
+The rule of '''biconditional elimination''' is a [[Definition:Valid Argument|valid]] deduction [[Definition:Sequent|sequent]] in [[Definition:Propositional Logic|propositional logic]].
+\end{theorem}<|endoftext|>
+\section{Biconditional Introduction/Sequent Form}
+Tags: Biconditional Introduction
+
+\begin{theorem}
+:$p \implies q, q \implies p \vdash p \iff q$
+\end{theorem}<|endoftext|>
+\section{Biconditional Elimination/Sequent Form}
+Tags: Biconditional Elimination
+
+\begin{theorem}
+:$(1): \quad p \iff q \vdash p \implies q$
+:$(2): \quad p \iff q \vdash q \implies p$
+\end{theorem}<|endoftext|>
+\section{Biconditional Elimination/Sequent Form/Proof 1/Form 1}
+Tags: Biconditional Elimination
+
+\begin{theorem}
+:$p \iff q \vdash p \implies q$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \iff q \vdash p \implies q}}
+{{Premise|1|p \iff q}}
+{{BiconditionalElimination|2|1|p \implies q|1|1}}
+{{EndTableau}}
+{{Qed}}
+\end{proof}<|endoftext|>
+\section{Biconditional Elimination/Sequent Form/Proof 1/Form 2}
+Tags: Biconditional Elimination
+
+\begin{theorem}
+:$p \iff q \vdash q \implies p$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \iff q \vdash q \implies p}}
+{{Premise|1|p \iff q}}
+{{BiconditionalElimination|2|1|q \implies p|1|2}}
+{{EndTableau}}
+{{Qed}}
+\end{proof}<|endoftext|>
+\section{Biconditional Elimination/Sequent Form/Proof 1}
+Tags: Biconditional Elimination
+
+\begin{theorem}
+:$(1): \quad p \iff q \vdash p \implies q$
+:$(2): \quad p \iff q \vdash q \implies p$
+\end{theorem}
+
+\begin{proof}
+=== [[Biconditional Elimination/Sequent Form/Proof 1/Form 1|Form 1]] ===
+{{:Biconditional Elimination/Sequent Form/Proof 1/Form 1}}
+=== [[Biconditional Elimination/Sequent Form/Proof 1/Form 2|Form 2]] ===
+{{:Biconditional Elimination/Sequent Form/Proof 1/Form 2}}
+\end{proof}<|endoftext|>
+\section{Biconditional Elimination/Sequent Form/Proof 2}
+Tags: Truth Table Proofs, Biconditional Elimination
+
+\begin{theorem}
+:$(1): \quad p \iff q \vdash p \implies q$
+:$(2): \quad p \iff q \vdash q \implies p$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+$\begin{array}{|ccc||ccc|ccc|} \hline
+p & \iff & q & p & \implies & q & q & \implies & p \\
+\hline
+F & T & F & F & T & F & F & T & F \\
+F & F & T & F & T & T & T & F & F \\
+T & F & F & T & F & F & F & T & T \\
+T & T & T & T & F & T & T & T & T \\
+\hline
+\end{array}$
+As can be seen, when $p \iff q$ is [[Definition:True|true]] so are both $p \implies q$ and $q \implies p$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Biconditional is Commutative/Formulation 1/Proof 1}
+Tags: Biconditional is Commutative
+
+\begin{theorem}
+: $p \iff q \dashv \vdash q \iff p$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \iff q \vdash q \iff p}}
+{{Premise|1|p \iff q}}
+{{BiconditionalElimination|2|1|p \implies q|1|1}}
+{{BiconditionalElimination|3|1|q \implies p|1|2}}
+{{BiconditionalIntro|4|1|q \iff p|3|2}}
+{{EndTableau}}
+{{qed|lemma}}
+{{BeginTableau|q \iff p \vdash p \iff q}}
+{{Premise|1|q \iff p}}
+{{BiconditionalElimination|2|1|q \implies p|1|1}}
+{{BiconditionalElimination|3|1|p \implies q|1|2}}
+{{BiconditionalIntro|4|1|p \iff q|3|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Rule of Material Equivalence/Formulation 1}
+Tags: Rule of Material Equivalence
+
+\begin{theorem}
+:$p \iff q \dashv \vdash \paren {p \implies q} \land \paren {q \implies p}$
+\end{theorem}<|endoftext|>
+\section{Equivalences are Interderivable/Proof 1}
+Tags: Equivalences are Interderivable, Truth Table Proofs
+
+\begin{theorem}
+If two [[Definition:Propositional Formula|propositional formulas]] are [[Definition:Interderivable|interderivable]], they are [[Definition:Biconditional|equivalent]]:
+: $\left ({p \dashv \vdash q}\right) \dashv \vdash \left ({p \iff q}\right)$
+\end{theorem}
+
+\begin{proof}
+The result follows directly from the [[Definition:Truth Table|truth table]] for the [[Definition:Biconditional|biconditional]]:
+$\begin{array}{|cc||ccc|} \hline
+p & q & p & \iff & q \\
+\hline
+F & F & F & T & F \\
+F & T & F & F & T \\
+T & F & F & F & F \\
+T & T & F & T & T \\
+\hline
+\end{array}$
+By inspection, it is seen that $\mathcal M \left({p \iff q}\right) = T$ precisely when $\mathcal M \left({p}\right) = \mathcal M \left({q}\right)$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalences are Interderivable/Forward Implication}
+Tags: Equivalences are Interderivable
+
+\begin{theorem}
+: $\left ({p \dashv \vdash q}\right) \vdash \left ({p \iff q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({p \dashv \vdash q}\right) \vdash p \iff q}}
+{{Premise|1|p \dashv \vdash q}}
+{{SequentIntro|2|1|\left ({p \vdash q}\right) \land \left ({q \vdash p}\right)|1
+ |Definition of [[Definition:Interderivable|Interderivable]]}}
+{{Assumption|3|p}}
+{{Simplification|4|1, 3|p \vdash q|2|1}}
+{{Implication|5|1|p \implies q|3|4}}
+{{Assumption|6|q}}
+{{Simplification|7|1, 6|q \vdash p|2|2}}
+{{Implication|8|1|q \implies p|6|7}}
+{{BiconditionalIntro|9|1|p \iff q|5|8}}
+{{EndTableau}}
+{{Qed}}
+[[Category:Equivalences are Interderivable]]
+s1qqzy938u5n0099qdwkh50zrgg4tkc
+\end{proof}<|endoftext|>
+\section{Equivalences are Interderivable/Reverse Implication}
+Tags: Equivalences are Interderivable
+
+\begin{theorem}
+: $\left ({p \iff q}\right) \vdash \left ({p \dashv \vdash q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \iff q \vdash \left({p \vdash q}\right)}}
+{{Premise|1|p \iff q}}
+{{BiconditionalElimination|3|1|p \implies q|1|1}}
+{{Assumption|3|p}}
+{{ModusPonens|4|1, 3|q|2|3}}
+{{EndTableau}}
+{{qed|lemma}}
+{{BeginTableau|p \iff q \vdash \left({q \vdash p}\right)}}
+{{Premise|1|p \iff q}}
+{{BiconditionalElimination|3|1|p \implies q|1|2}}
+{{Assumption|3|q}}
+{{ModusPonens|4|1, 3|p|2|3}}
+{{EndTableau}}
+{{qed}}
+[[Category:Equivalences are Interderivable]]
+mdne6teoypnkvrjoq2gr6rw5bg536yu
+\end{proof}<|endoftext|>
+\section{Compact Subspace of Linearly Ordered Space/Reverse Implication}
+Tags: Compact Subspace of Linearly Ordered Space
+
+\begin{theorem}
+Let $\struct {X, \preceq, \tau}$ be a [[Definition:Linearly Ordered Space|linearly ordered space]].
+Let $Y \subseteq X$ be a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $X$.
+Let the following hold:
+:$(1): \quad$ For every [[Definition:Non-Empty Set|non-empty]] $S \subseteq Y$, $S$ has a [[Definition:Supremum of Set|supremum]] and an [[Definition:Infimum of Set|infimum]] in $X$.
+:$(2): \quad$ For every [[Definition:Non-Empty Set|non-empty]] $S \subseteq Y$: $\sup S, \inf S \in Y$.
+Then $Y$ is a [[Definition:Compact Subspace|compact subspace]] of $\struct {X, \tau}$.
+\end{theorem}<|endoftext|>
+\section{Modus Ponendo Tollens/Sequent Form}
+Tags: Modus Ponendo Tollens
+
+\begin{theorem}
+==== [[Modus Ponendo Tollens/Sequent Form/Case 1|Case 1]] ====
+{{:Modus Ponendo Tollens/Sequent Form/Case 1}}
+==== [[Modus Ponendo Tollens/Sequent Form/Case 2|Case 2]] ====
+{{:Modus Ponendo Tollens/Sequent Form/Case 2}}
+\end{theorem}<|endoftext|>
+\section{Modus Ponendo Tollens/Sequent Form/Case 1}
+Tags: Modus Ponendo Tollens
+
+\begin{theorem}
+:$\neg \paren {p \land q}, p \vdash \neg q$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg \paren {p \land q}, p \vdash \neg q}}
+{{Premise|1|\neg \paren {p \land q} }}
+{{Premise|2|p}}
+{{Assumption|3|q}}
+{{Conjunction|4|2, 3|p \land q|2|3}}
+{{NonContradiction|5|1, 2, 3|4|1}}
+{{Contradiction|6|1, 2|\neg q|3|5}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Modus Ponendo Tollens/Sequent Form/Case 2}
+Tags: Modus Ponendo Tollens
+
+\begin{theorem}
+:$\neg \left({p \land q}\right), q \vdash \neg p$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg \left({p \land q}\right), q \vdash \neg p}}
+{{Premise|1|\neg \left({p \land q}\right)}}
+{{Premise|2|q}}
+{{Assumption|3|p}}
+{{Conjunction|4|2, 3|p \land q|3|2}}
+{{NonContradiction|5|1, 2, 3|4|1}}
+{{Contradiction|6|1, 2|\neg p|3|5}}
+{{EndTableau}}
+{{qed}}
+[[Category:Modus Ponendo Tollens]]
+mjeey35ngrumpe26ifn1ui05rzz2i9e
+\end{proof}<|endoftext|>
+\section{Union of Subsets is Subset/Subset of Power Set}
+Tags: Set Union, Subsets
+
+\begin{theorem}
+Let $S$ and $T$ be [[Definition:Set|sets]].
+Let $\powerset S$ be the [[Definition:Power Set|power set]] of $S$.
+Let $\mathbb S$ be a [[Definition:Subset|subset]] of $\powerset S$.
+Then:
+:$\displaystyle \paren {\forall X \in \mathbb S: X \subseteq T} \implies \bigcup \mathbb S \subseteq T$
+\end{theorem}
+
+\begin{proof}
+Let $\mathbb S \subseteq \powerset S$.
+Suppose that $\forall X \in \mathbb S: X \subseteq T$.
+Consider any $\displaystyle x \in \bigcup \mathbb S$.
+By definition of [[Definition:Set Union|set union]], it follows that:
+:$\exists X \in \mathbb S: x \in X$
+But as $X \subseteq T$ it follows that $x \in T$.
+Thus it follows that:
+:$\displaystyle \bigcup \mathbb S \subseteq T$
+So:
+:$\displaystyle \paren {\forall X \in \mathbb S: X \subseteq T} \implies \bigcup \mathbb S \subseteq T$
+{{qed}}
+[[Category:Set Union]]
+[[Category:Subsets]]
+b4h728rptgi1xeujekujfg47szo7nag
+\end{proof}<|endoftext|>
+\section{Conjunction with Negative Equivalent to Negation of Implication/Formulation 2/Forward Implication}
+Tags: Conjunction with Negative Equivalent to Negation of Implication
+
+\begin{theorem}
+:$\vdash \paren {p \land \neg q} \implies \paren {\neg \paren {p \implies q} }$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \paren {p \land \neg q} \implies \paren {\neg \paren {p \implies q} } }}
+{{Assumption|1|p \land \neg q}}
+{{SequentIntro|2|1|\neg \paren {p \implies q}|1|[[Conjunction with Negative Equivalent to Negation of Implication/Formulation 1/Forward Implication|Conjunction with Negative Equivalent to Negation of Implication: Formulation 1]]}}
+{{Implication|3||\paren {p \land \neg q} \implies \paren {\neg \paren {p \implies q} }|1|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Conjunction with Negative Equivalent to Negation of Implication/Formulation 2/Reverse Implication}
+Tags: Conjunction with Negative Equivalent to Negation of Implication
+
+\begin{theorem}
+:$\vdash \left({\neg \left({p \implies q}\right)}\right) \implies \left({p \land \neg q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({\neg \left({p \implies q}\right)}\right) \implies \left({p \land \neg q}\right)}}
+{{Assumption|1|\neg \left({p \implies q}\right)}}
+{{SequentIntro|2|1|p \land \neg q|1|[[Conjunction with Negative Equivalent to Negation of Implication/Formulation 1/Reverse Implication|Conjunction with Negative Equivalent to Negation of Implication: Formulation 1]]}}
+{{Implication|3||\left({\neg \left({p \implies q}\right)}\right) \implies \left({p \land \neg q}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+{{LEM|Conjunction with Negative Equivalent to Negation of Implication/Formulation 1/Reverse Implication}}
+[[Category:Conjunction with Negative Equivalent to Negation of Implication]]
+sbryqpjb0wps953pi6kun8dd03dtd07
+\end{proof}<|endoftext|>
+\section{Convex Set is Star Convex Set}
+Tags: Vector Spaces
+
+\begin{theorem}
+Let $V$ be a [[Definition:Vector Space|vector space]] over $\R$ or $\C$.
+Let $A \subseteq V$ be a [[Definition:Empty Set|non-empty]] [[Definition:Convex Set (Vector Space)|convex set]].
+Then $A$ is a [[Definition:Star Convex Set|star convex set]], and every point in $A$ is a [[Definition:Star Convex Set|star center]].
+\end{theorem}
+
+\begin{proof}
+Let $a \in A$.
+Note that there is at least one point in $A$, as $A$ is [[Definition:Empty Set|non-empty]].
+If $x \in A$, then there is a [[Definition:Convex Set (Vector Space)/Line Segment|line segment]] joining $a$ and $x$.
+By [[Definition:Star Convex Set|definition of star convex set]], it follows that $A$ is star convex, and $a$ is a star center.
+{{qed}}
+[[Category:Vector Spaces]]
+2fqyb2qiefdy7qshallf7xpvdwu1sxh
+\end{proof}<|endoftext|>
+\section{Negation implies Negation of Conjunction/Case 1}
+Tags: Negation implies Negation of Conjunction
+
+\begin{theorem}
+:$\neg p \implies \neg \paren {p \land q}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg p \implies \neg \paren {p \land q} }}
+{{Assumption|1|\neg p}}
+{{Assumption|2|p \land q}}
+{{Simplification|3|2|p|2|1}}
+{{NonContradiction|4|1, 2|3|1}}
+{{Contradiction|5|1|\neg \paren {p \land q}|2|4}}
+{{Implication|6||\neg p \implies \neg \paren {p \land q}|1|5}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Negation implies Negation of Conjunction/Case 2}
+Tags: Negation implies Negation of Conjunction
+
+\begin{theorem}
+:$\neg q \implies \neg \left({p \land q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg q \implies \neg \left({p \land q}\right)}}
+{{Assumption|1|\neg q}}
+{{Assumption|2|p \land q}}
+{{Simplification|3|2|q|2|2}}
+{{NonContradiction|4|1, 2|3|1}}
+{{Contradiction|5|1|\neg \left({p \land q}\right)|2|4}}
+{{Implication|6||\neg q \implies \neg \left({p \land q}\right)|1|5}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Neighborhood Sub-Basis Criterion for Filter Convergence}
+Tags: Filter Theory, Topological Bases
+
+\begin{theorem}
+Let $\left({X, \tau}\right)$ be a [[Definition:Topological Space|topological space]].
+Let $\mathcal F$ be a [[Definition:Filter on Set|filter]] on $X$.
+Let $p \in X$.
+Then $\mathcal F$ converges to $p$ [[Definition:Iff|iff]] $\mathcal F$ contains as a subset a [[Definition:Neighborhood Sub-Basis|neighborhood sub-basis]] at $x$.
+\end{theorem}
+
+\begin{proof}
+=== Forward implication ===
+If $\mathcal F$ converges to $p$, then it contains ''every'' neighborhood of $p$, and the set of neighborhoods of $p$ is trivially a [[Definition:Neighborhood Sub-Basis|neighborhood sub-basis]] at $p$.
+{{qed|lemma}}
+=== Reverse Implication ===
+Let $S_p$ be a [[Definition:Neighborhood Sub-Basis|neighborhood sub-basis]] at $p$.
+Let $S_p \subseteq \mathcal F$.
+Let $N$ be a neighborhood of $p$.
+Then by the definition of neighborhood sub-basis, there is a finite $T_N \subseteq S_p$ such that:
+:$\bigcap T_N \subseteq N$.
+Since a filter is closed under finite intersections, $\bigcap T_N \in \mathcal F$.
+Then $\bigcap T_N \in \mathcal F$ and $\bigcap T_N \subseteq N$, so by the definition of a filter, $N \in \mathcal F$.
+Since $\mathcal F$ contains every neighborhood of $p$, it converges to $p$.
+{{qed}}
+[[Category:Filter Theory]]
+[[Category:Topological Bases]]
+50pbzj0ow5lfvxhhnf75aln5p0i6353
+\end{proof}<|endoftext|>
+\section{Rule of Material Implication/Formulation 2/Forward Implication}
+Tags: Rule of Material Implication
+
+\begin{theorem}
+: $\vdash \left({p \implies q}\right) \implies \left({\neg p \lor q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({p \implies q}\right) \implies \left({\neg p \lor q}\right)}}
+{{Assumption|1|p \implies q}}
+{{SequentIntro|2|1|\neg p \lor q|1|[[Rule of Material Implication/Formulation 1/Forward Implication|Rule of Material Implication: Formulation 1]]}}
+{{Implication|3||\left({p \implies q}\right) \implies \left({\neg p \lor q}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+{{LEM|Rule of Material Implication/Formulation 1/Forward Implication|3}}
+\end{proof}<|endoftext|>
+\section{Rule of Material Implication/Formulation 2/Reverse Implication}
+Tags: Rule of Material Implication
+
+\begin{theorem}
+: $\vdash \left({\neg p \lor q}\right) \implies \left({p \implies q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({\neg p \lor q}\right) \implies \left({p \implies q}\right)}}
+{{Assumption|1|\neg p \lor q}}
+{{SequentIntro|2|1|p \implies q|1|[[Rule of Material Implication/Formulation 1/Reverse Implication|Rule of Material Implication: Formulation 1]]}}
+{{Implication|3||\left({\neg p \lor q}\right) \implies \left({p \implies q}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Implication is Left Distributive over Conjunction/Formulation 1/Proof 2}
+Tags: Implication is Left Distributive over Conjunction, Truth Table Proofs
+
+\begin{theorem}
+:$p \implies \left({q \land r}\right) \dashv \vdash \left({p \implies q}\right) \land \left({p \implies r}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connectives]] match for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccccc||ccccccc|} \hline
+p & \implies & (q & \land & r) & (p & \implies & q) & \land & (p & \implies & r) \\
+\hline
+F & T & F & F & F & F & T & F & T & F & T & F \\
+F & T & F & F & T & F & T & F & T & F & T & T \\
+F & T & T & F & F & F & T & T & T & F & T & F \\
+F & T & T & T & T & F & T & T & T & F & T & T \\
+T & F & F & F & F & T & F & F & F & T & F & F \\
+T & F & F & F & T & T & F & F & F & T & T & T \\
+T & F & T & F & F & T & T & T & F & T & F & F \\
+T & T & T & T & T & T & T & T & T & T & T & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Compact Subspace of Linearly Ordered Space}
+Tags: Linearly Ordered Spaces, Compact Spaces, Compact Subspace of Linearly Ordered Space
+
+\begin{theorem}
+Let $\struct {X, \preceq, \tau}$ be a [[Definition:Linearly Ordered Space|linearly ordered space]].
+Let $Y \subseteq X$ be a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $X$.
+Then $Y$ is a [[Definition:Compact Subspace|compact subspace]] of $\struct {X, \tau}$ {{iff}} both of the following hold:
+:$(1): \quad$ For every [[Definition:Non-Empty Set|non-empty]] $S \subseteq Y$, $S$ has a [[Definition:Supremum of Set|supremum]] and an [[Definition:Infimum of Set|infimum]] in $X$.
+:$(2): \quad$ For every [[Definition:Non-Empty Set|non-empty]] $S \subseteq Y$: $\sup S, \inf S \in Y$.
+\end{theorem}
+
+\begin{proof}
+=== Forward Implication ===
+Let $S$ be a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $Y$.
+By [[Compact Subspace of Linearly Ordered Space/Lemma|Compact Subspace of Linearly Ordered Space: Lemma]], $S$ has a [[Definition:Supremum of Set|supremum]] $k$ in $Y$.
+{{explain|supremum WRT $\preceq \restriction_Y$}}
+{{AimForCont}} that $S$ has an [[Definition:Upper Bound of Set|upper bound]] $b$ in $X$ such that $b \prec k$.
+Let:
+:$\AA = \set {s^\preceq: s \in S} \cup \set {b^\succeq}$
+where:
+:$s^\preceq$ denotes the [[Definition:Lower Closure of Element|lower closure]] of $s$ in $S$
+:$b^\succeq$ denotes the [[Definition:Upper Closure of Element|upper closure]] of $s$ in $S$.
+Then $\AA$ is an [[Definition:Open Cover|open cover]] of $Y$.
+But $\AA$ has no [[Definition:Finite Subcover|finite subcover]], contradicting the fact that $Y$ is [[Definition:Compact Subspace|compact]].
+{{finish|above lines need proof}}
+{{explain|Pronounce proper incantations about duality.}}
+A similar argument proves the corresponding statement for infima, so $(2)$ holds.
+{{qed|lemma}}
+=== [[Compact Subspace of Linearly Ordered Space/Reverse Implication|Reverse Implication]] ===
+{{:Compact Subspace of Linearly Ordered Space/Reverse Implication}}
+\end{proof}<|endoftext|>
+\section{Order Topology is Hausdorff}
+Tags: Order Topology
+
+\begin{theorem}
+Let $\struct {X, \preceq, \tau}$ be a [[Definition:Linearly Ordered Space|linearly ordered space]].
+Then $\struct {X, \tau}$ is a [[Definition:Hausdorff Space|Hausdorff space]].
+\end{theorem}
+
+\begin{proof}
+Let $x, y \in X$ with $x \ne y$.
+Since $\le$ is a [[Definition:Total Ordering|total ordering]], either $x \prec y$ or $y \prec x$.
+{{WLOG}}, assume that $x \prec y$.
+If there is a $z \in X$ such that $x \prec z \prec y$, then ${\downarrow}z$ and ${\uparrow}z$ separate $x$ and $y$.
+{{explain|We are replacing the ${\downarrow}z$ and ${\uparrow}z$ notation with notation as defined in [[Definition:Upper Closure]] and [[Definition:Lower Closure]] -- make sure the correct one is used here}}
+Otherwise, by [[Mind the Gap]], ${\downarrow}y$ and $\uparrow x$ separate $x$ and $y$.
+Since any two distinct points can be separated by neighborhoods, $\left({X, \tau}\right)$ is a [[Definition:Hausdorff Space|Hausdorff space]].
+{{qed}}
+[[Category:Order Topology]]
+e5qhmlhfjbqqdy2rxg2mmxjakz97rs6
+\end{proof}<|endoftext|>
+\section{Subset of Linearly Ordered Space which is Order-Complete and Closed but not Compact}
+Tags: Order Topology
+
+\begin{theorem}
+Let $X = \left[{0 \,.\,.\, 1}\right) \cup \left({2 \,.\,.\, 3}\right) \cup \left\{{4}\right\}$.
+Let $\preceq$ be the [[Definition:Ordering|ordering]] on $X$ induced by the [[Definition:Usual Ordering|usual ordering]] of the [[Definition:Real Number|real numbers]].
+Let $\tau$ be the $\preceq$ [[Definition:Order Topology|order topology]] on $X$.
+Let $Y = \left[{0 \,.\,.\, 1}\right) \cup \left\{{4}\right\}$.
+Let $\tau'$ be the $\tau$-relative [[Definition:Subspace Topology|subspace topology]] on $Y$.
+Then:
+:$\left({Y, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]]
+:$Y$ is [[Definition:Closed Set (Topology)|closed]] in $X$
+but:
+:$\left({Y, \tau'}\right)$ is not [[Definition:Compact Space|compact]].
+\end{theorem}
+
+\begin{proof}
+First it is demonstrated that $\left({Y, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+Let $\phi: Y \to \left[{0 \,.\,.\, 1}\right]$ be defined as:
+:$\phi \left({y}\right) = \begin{cases}
+y & : y \in \left[{0 \,.\,.\, 1}\right) \\
+1 & : y = 4 \end{cases}$
+Then $\phi$ is a [[Definition:Order Isomorphism|order isomorphism]].
+{{explain|The above needs to be proved.}}
+{{qed|lemma}}
+We have that $\left[{0 \,.\,.\, 1}\right]$ is a [[Definition:Complete Lattice|complete lattice]].
+{{explain|This is probably around here somewhere.}}
+Next is is shown that $\left({Y, \preceq}\right)$ is [[Definition:Closed Set (Topology)|closed]] in $X$.
+Let $x \in X \setminus Y$.
+Then:
+: $x \in \left({2 \,.\,.\, 3}\right)$
+Thus:
+:$x \in \left({\dfrac {x+2} 2 \,.\,.\, \dfrac {x+3} 2}\right) \in \tau'$
+Since the [[Definition:Relative Complement|complement]] of $Y$ is [[Definition:Open Set (Topology)|open]], $Y$ is [[Definition:Closed Set (Topology)|closed]].
+Finally it is shown that $\left({Y, \tau'}\right)$ is not [[Definition:Compact Space|compact]].
+Let:
+: $\mathcal A = \left\{ {x^\preceq: x \in \left[{{0}\,.\,.\,{1}}\right) }\right\} \cup \left\{ {\left({\dfrac 5 2}\right)^\succeq}\right\}$
+where:
+: $x^\preceq$ denotes the [[Definition:Lower Closure of Element|lower closure]] of $x$
+: $x^\succeq$ denotes the [[Definition:Upper Closure of Element|upper closure]] of $x$
+Then $\mathcal A$ is an [[Definition:Open Cover|open cover]] of $Y$ with no [[Definition:Finite Subcover|finite subcover]].
+{{explain|Prove the above statement.}}
+{{qed}}
+{{finish|The remaining parts of this proof need to be completed.}}
+[[Category:Order Topology]]
+i5nrywsoteeccup06eje27v86t8vy83
+\end{proof}<|endoftext|>
+\section{Rule of Explosion/Variant 2}
+Tags: Rule of Explosion
+
+\begin{theorem}
+:$\vdash \paren {p \land \neg p} \implies q$
+\end{theorem}<|endoftext|>
+\section{Jordan Curve Theorem}
+Tags: Jordan Curves
+
+\begin{theorem}
+Let $\gamma: \closedint 0 1 \to \R^2$ be a [[Definition:Jordan Curve|Jordan curve]].
+Let $\Img \gamma$ denote the [[Definition:Image of Mapping|image]] of $\gamma$.
+Then $\R^2 \setminus \Img \gamma$ is a [[Definition:Set Union|union]] of two [[Definition:Disjoint Sets|disjoint]] [[Definition:Connected (Topology)|connected]] [[Definition:Component (Topology)|components]].
+Both components are [[Definition:Open Set (Metric Space)|open]] in $\R^2$, and both components have $\Img \gamma$ as their [[Definition:Boundary (Topology)|boundary]].
+One component is [[Definition:Bounded Metric Space|bounded]], and is called the [[Definition:Interior of Jordan Curve|interior]] of $\gamma$.
+The other component is [[Definition:Unbounded Metric Space|unbounded]], and is called the [[Definition:Exterior of Jordan Curve|exterior]] of $\gamma$.
+\end{theorem}
+
+\begin{proof}
+{{proof wanted}}
+{{Namedfor|Marie Ennemond Camille Jordan|cat = Jordan}}
+\end{proof}<|endoftext|>
+\section{Heine-Borel Theorem/Dedekind Complete Space}
+Tags: Linearly Ordered Spaces, Dedekind Complete Sets, Heine-Borel Theorem
+
+\begin{theorem}
+Let $T = \left({X, \preceq, \tau}\right)$ be a [[Definition:Dedekind Complete|Dedekind-complete]] [[Definition:Linearly Ordered Space|linearly ordered space]].
+Let $Y$ be a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $X$.
+Then $Y$ is [[Definition:Compact Subspace|compact]] {{iff}} $Y$ is [[Definition:Closed Set (Topology)|closed]] and [[Definition:Bounded Ordered Set|bounded]] in $T$.
+\end{theorem}
+
+\begin{proof}
+=== Sufficient Condition ===
+Let $Y$ be a [[Definition:Compact Subspace|compact subspace]] of $T$.
+From:
+: [[Order Topology is Hausdorff]]
+: [[Compact Subspace of Hausdorff Space is Closed]]
+it follows that $Y$ is [[Definition:Closed Set (Topology)|closed]] in $T$.
+From [[Compact Subspace of Linearly Ordered Space/Lemma|Compact Subspace of Linearly Ordered Space: Lemma]], $\left({Y, \preceq {\restriction_Y} }\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+{{finish|Not much to do, just need to show that a complete lattice in this context is bounded.}}
+Hence $Y$ is [[Definition:Closed Set (Topology)|closed]] and [[Definition:Bounded Ordered Set|bounded]] in $T$.
+=== Necessary Condition ===
+Let $Y$ be a [[Definition:Closed Set (Topology)|closed]] and [[Definition:Bounded Ordered Set|bounded]] [[Definition:Topological Subspace|subspace]] of $T$.
+Let $S$ be a non-empty subset of $Y$.
+Since $Y$ is bounded and $S \subseteq Y$, $S$ is bounded.
+Since $X$ is Dedekind complete, $S$ has a supremum and infimum in $X$.
+We will show that $\sup S, \inf S \in Y$.
+Suppose for the sake of contradiction that $\sup S \notin Y$.
+By [[Closed Set in Linearly Ordered Space]], $b$ is not a supremum of $S$, a contradiction.
+Thus $\sup S \in Y$.
+A similar argument shows that $\inf S \in Y$.
+Thus by [[Compact Subspace of Linearly Ordered Space]], $Y$ is compact.
+{{qed}}
+[[Category:Linearly Ordered Spaces]]
+[[Category:Dedekind Complete Sets]]
+[[Category:Heine-Borel Theorem]]
+2ezmkf6n61wq6ysih05rnyoss72km89
+\end{proof}<|endoftext|>
+\section{Heine-Borel iff Dedekind Complete}
+Tags: Order Topology, Compact Spaces
+
+\begin{theorem}
+Let $\struct {X, \preceq, \tau}$ be a [[Definition:Linearly Ordered Space|linearly ordered space]].
+Then $X$ is [[Definition:Dedekind Complete|Dedekind complete]] {{iff}} every closed, bounded subset of $X$ is [[Definition:Compact Subspace|compact]].
+\end{theorem}
+
+\begin{proof}
+The forward implication follows from [[Heine-Borel Theorem/Dedekind-Complete Space|Heine-Borel Theorem: Dedekind-Complete Space]].
+Suppose that $X$ is not Dedekind complete.
+Then $X$ has a non-empty subset $S$ with an upper bound $b$ in $X$ but no supremum in $X$.
+Let $a \in S$ and let $Y = {\bar \downarrow} S \cap {\bar \uparrow} a$.
+$Y$ is nonempty and bounded below by $a$ and above by $b$.
+=== $Y$ is closed in $X$ ===
+Let $x \in X \setminus Y$.
+Then $x \prec a$ or $x$ strictly succeeds every element of $S$.
+If $x \prec a$, then $x \in {\downarrow} a \subseteq X \setminus Y$.
+If $x$ strictly succeeds each element of $S$, then it is an upper bound of $S$.
+Since $S$ has no supremum in $X$, it has an upper bound $p \prec x$.
+Then $x \in {\uparrow p} \subseteq X \setminus Y$.
+{{explain|What does ${\downarrow} a$ and ${\uparrow p}$ mean?}}
+=== $Y$ is not compact ===
+Let $\AA = \set { {\downarrow} s: s \in S}$.
+Then $\AA$ is an open cover of $Y$ with no finite subcover.
+{{qed}}
+[[Category:Order Topology]]
+[[Category:Compact Spaces]]
+2ze14h1qxh81mb7rrsz45wixi8rqibh
+\end{proof}<|endoftext|>
+\section{Jordan Polygon Theorem}
+Tags: Topology
+
+\begin{theorem}
+Let $P$ be a [[Definition:Polygon|polygon]] embedded in $\R^2$.
+Denote the [[Definition:Boundary (Geometry)|boundary]] of $P$ as $\partial P$.
+Then, $\R^2 \setminus \partial P$ is a [[Definition:Set Union|union]] of two [[Definition:Connected Set (Topology)|connected]] [[Definition:Component (Topology)|components]].
+Both components are [[Definition:Open Set (Metric Space)|open]] in $\R^2$.
+One component is [[Definition:Bounded Metric Space|bounded]], and is called the [[Definition:Interior of Jordan Curve|interior]] of $P$.
+The other component is [[Definition:Unbounded Metric Space|unbounded]], and is called the [[Definition:Exterior of Jordan Curve|exterior]] of $P$.
+\end{theorem}
+
+\begin{proof}
+=== [[Jordan Polygon Theorem/Lemma 1|Lemma]] ===
+{{:Jordan Polygon Theorem/Lemma 1}}{{qed|lemma}}
+We show that $\R^2 \setminus \partial P$ is not [[Definition:Path-Connected Metric Subspace|path-connected]].
+Find any $q_1 \in R^2 \setminus \partial P$ and $\theta \in \R$ such that the [[Definition:Ray (Geometry)|ray]] $\LL_\theta = \set {q_1 + s \map g \theta: s \in \R_{\ge 0} }$ has exactly one [[Definition:Crossing (Jordan Curve)|crossing]] of $\partial P$.
+Find any $q_2 \in \LL_\theta$ that lies on the ray after the crossing, so the ray $\set {q_2 + s \map g \theta: s \in \R_{\ge 0} }$ does not intersect $\partial P$.
+Then $\map {\mathrm {par} } {q_1} = 1 \ne 0 = \map {\mathrm {par} } {q_2}$.
+From [[Jordan Polygon Parity Lemma]], it follows that $q_1$ and $q_2$ cannot be connected by a [[Definition:Path (Topology)|path]].
+{{qed|lemma}}
+As $R^2 \setminus \partial P$ is not [[Definition:Path-Connected Metric Subspace|path-connected]], it follows from the [[Jordan Polygon Theorem/Lemma 1|Lemma]] that $R^2 \setminus \partial P$ is a [[Definition:Set Union|union]] of exactly two [[Definition:Disjoint Sets|disjoint]] [[Definition:Path-Connected Metric Subspace|path-connected]] [[Definition:Set|sets]], which we denote as $U_1$ and $U_2$.
+Let $q \in \R^2 \setminus \partial P$, and let $\map d {q, \partial P}$ be the [[Definition:Euclidean Metric on Real Number Plane|Euclidean distance]] between $q$ and $\partial P$.
+From [[Distance between Closed Sets in Euclidean Space]], it follows that $\map d {q, \partial P} > 0$.
+When we put $\epsilon = \map d {q, \partial P} / 2$, we have $\map {B_\epsilon} q \subseteq \R^2 \setminus \partial P$.
+As [[Open Ball is Convex Set]], it follows that $\map {B_\epsilon} q$ is path-connected, so $\map {B_\epsilon} q$ is a [[Definition:Subset|subset]] of either $U_1$ or $U_2$.
+Then, both $U_1$ and $U_2$ are [[Definition:Open Set (Metric Space)|open]].
+From [[Path-Connected Space is Connected]], it follows that $U_1$ and $U_2$ are [[Definition:Connected Set (Topology)|connected]].
+Then, $\R^2 \setminus \partial P$ is a union of two [[Definition:Component (Topology)|components]].
+{{qed|lemma}}
+From [[Boundary of Polygon is Jordan Curve]], it follows that $\partial P$ is equal to the [[Definition:Image of Mapping|image]] of a [[Definition:Jordan Curve|Jordan curve]] $\gamma: \closedint 0 1 \to \R^2$.
+From [[Continuous Image of Compact Space is Compact/Corollary 2]], it follows that $\partial P$ is [[Definition:Bounded Metric Space|bounded]].
+That is, there exist $a \in \R^2$ and $R \in \R_{>0}$ such that $\partial P \subseteq \map {B_R} a$.
+If $x_1, x_2 \in \R^2 \setminus \map {B_R} a$, $x_1$ to $x_2$ can be joined by a path in $\R^2 \setminus \map {B_R} a$ following:
+:the [[Definition:Circumference of Circle|circumference of the two circles]] with [[Definition:Center of Circle|center]] $a$ and [[Definition:Radius of Circle|radii]] $\map d {a, x_1}$ and $\map d {a, x_2}$
+:a [[Definition:Line Segment|line segment]] joining the two circumferences.
+Then $\R^2 \setminus \map {B_R} a$ is path-connected, so $\R^2 \setminus \map {B_R} a$ is a subset of one of the components of $\R^2 \setminus \partial P$, say $U_1$.
+As $\R^2 \setminus \map {B_R} a \subseteq U_1$, it follows that $U_1$ is [[Definition:Unbounded Metric Space|unbounded]], so $U_1$ is the [[Definition:Exterior of Jordan Curve|exterior]] of $\gamma$.
+Then $U_2 \subseteq \map {B_R} a$, so $U_2$ is [[Definition:Bounded Metric Space|bounded]], which implies that $U_2$ is the [[Definition:Interior of Jordan Curve|interior]] of $\gamma$.
+{{qed}}
+{{namedfor|Marie Ennemond Camille Jordan|cat = Jordan}}
+\end{proof}<|endoftext|>
+\section{Jordan Polygon Interior and Exterior Criterion}
+Tags: Topology
+
+\begin{theorem}
+Let $P$ be a [[Definition:Polygon|polygon]] embedded in $\R^2$.
+Let $q \in \R^2 \setminus \partial P$, where $\partial P$ denotes the [[Definition:Boundary (Geometry)|boundary]] of $P$.
+Let $\mathbf v \in \R^2 \setminus \left\{ {\mathbf 0}\right\}$ be a non-[[Definition:Zero Vector|zero]] [[Definition:Vector (Euclidean Space)|vector]], and let $\mathcal L = \left\{ {q + s \mathbf v: s \in \R_{\ge 0} }\right\}$ be a [[Definition:Ray (Geometry)|ray]] with start point $q$.
+Let $N \left({q}\right) \in \N$ be the number of [[Definition:Crossing (Jordan Curve)|crossings]] between $\mathcal L$ and $\partial P$.
+Then:
+:$(1): \quad$ $q \in \operatorname{Int} \left({P}\right)$, [[Definition:Iff|iff]] $N \left({q}\right) \equiv 1 \pmod 2$
+:$(2): \quad$ $q \in \operatorname{Ext} \left({P}\right)$, [[Definition:Iff|iff]] $N \left({q}\right) \equiv 0 \pmod 2$
+Here, $\operatorname{Int} \left({P}\right)$ and $\operatorname{Ext} \left({P}\right)$ denote the [[Definition:Interior of Jordan Curve|interior]] and [[Definition:Exterior of Jordan Curve|exterior]] of $\partial P$, when $\partial P$ is considered as a [[Definition:Jordan Curve|Jordan curve]].
+\end{theorem}
+
+\begin{proof}
+From [[Boundary of Polygon is Jordan Curve]], it follows that $\partial P$ is equal to the [[Definition:Image of Mapping|image]] of a [[Definition:Jordan Curve|Jordan curve]].
+From the [[Jordan Polygon Theorem]], it follows that $\operatorname{Int} \left({P}\right)$ and $\operatorname{Ext} \left({P}\right)$ are [[Definition:Path-Connected Set (Topology)|path-connected]].
+Then, [[Jordan Polygon Parity Lemma]] shows that $N \left({q}\right) = \operatorname{par} \left({q}\right)$, where $\operatorname{par} \left({q}\right)$ denotes the [[Definition:Crossing (Jordan Curve)/Parity|parity]] of $q$.
+From [[Jordan Polygon Theorem]], it follows that $\operatorname{Ext} \left({P}\right)$ is [[Definition:Unbounded Metric Space|unbounded]], while $\operatorname{Int} \left({P}\right)$ is [[Definition:Bounded Metric Space|bounded]].
+As $\partial P$ is the [[Definition:Image of Mapping|image]] of a [[Definition:Jordan Curve|Jordan curve]], it follows from [[Continuous Image of Compact Space is Compact/Corollary 2]] that $\partial P$ is also [[Definition:Bounded Metric Space|bounded]].
+Then, there exists $R \in \R_{>0}$ such that $\operatorname{Int} \left({P}\right) \cup \partial P \subseteq B_R \left({\mathbf 0}\right)$.
+If $q_0 \in \R^2 \setminus B_R \left({\mathbf 0}\right)$, then $q_0 \in \operatorname{Ext} \left({P}\right)$.
+Then, the [[Definition:Ray (Geometry)|ray]] $\left\{ {q_0 + s q_0: s \in \R_{\ge 0} }\right\} \subseteq \R^2 \setminus B_R \left({\mathbf 0}\right)$, so there are zero [[Definition:Crossing (Jordan Curve)|crossings]] between the ray and $\partial P$.
+From [[Jordan Polygon Parity Lemma]], it follows that $\operatorname{par} \left({q_0}\right) = 0$.
+As $\operatorname{Ext} \left({P}\right)$ is [[Definition:Path-Connected Set (Topology)|path-connected]], it follows from the lemma that for all $q \in \operatorname{Ext} \left({P}\right)$, we have $\operatorname{par} \left({q}\right) = 0$.
+If $q_1 \in \R^2 \setminus \partial P$ with $\operatorname{par} \left({q}\right) = 1$, it follows that $q_1 \notin \operatorname{Ext} \left({P}\right)$.
+As $\R^2 \setminus \partial P = \operatorname{Int} \left({P}\right) \cup \operatorname{Ext} \left({P}\right)$, it follows that $q_1 \in \operatorname{Int} \left({P}\right)$.
+Again, [[Jordan Polygon Parity Lemma]] shows that for all $q \in \operatorname{Int} \left({P}\right)$, we have $\operatorname{par} \left({q}\right) = 1$.
+So if instead $q_0 \in \R^2 \setminus \partial P$ with $\operatorname{par} \left({q}\right) = 0$, the only possibility is that $q_0 \in \operatorname{Ext} \left({P}\right)$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Closed Set in Linearly Ordered Space}
+Tags: Linearly Ordered Spaces
+
+\begin{theorem}
+Let $\struct {X, \preceq, \tau}$ be a [[Definition:Linearly Ordered Space|linearly ordered space]].
+Let $C$ be a [[Definition:Subset|subset]] of $X$.
+Then $C$ is [[Definition:Closed Set (Topology)|closed]] in $X$ {{iff}} for all [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subsets]] $S$ of $C$:
+:If $s \in X$ is a [[Definition:Supremum of Set|supremum]] or [[Definition:Infimum of Set|infimum]] of $S$ in $X$, then $s \in C$.
+\end{theorem}
+
+\begin{proof}
+=== Necessary Condition ===
+Suppose that $C$ be [[Definition:Closed Set (Topology)|closed]].
+Let $S$ be a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $C$.
+Let $b \in X \setminus C$.
+We will show that $b$ is not a [[Definition:Supremum of Set|supremum]] of $S$.
+If $b$ is not an [[Definition:Upper Bound of Set|upper bound]] of $S$, then by definition $b$ cannot be a [[Definition:Supremum of Set|supremum]] of $S$.
+Suppose, then, that $b$ is an [[Definition:Upper Bound of Set|upper bound]] of $S$.
+Since $C$ is [[Definition:Closed Set (Topology)|closed]] and $b \notin C$, there must be an open interval or open ray $U$ containing $b$ that is disjoint from $Y$.
+Since $b$ is an [[Definition:Upper Bound of Set|upper bound]] for $S$ and $S$ is [[Definition:Non-Empty Set|not empty]], $U$ cannot be a downward-pointing ray.
+Thus $U$ is either an open interval $\openint a q$ or an upward-pointing open ray ${\uparrow}a$.
+Then $a \prec b$.
+Since $b \in U$, and $b$ is an [[Definition:Upper Bound of Set|upper bound]] of $S$, no element strictly succeeding all elements of $U$ can be in $S$.
+By the above and the fact that $S \subseteq C$, ${\uparrow}a \cap S = \O$, so $a$ is an upper bound of $S$.
+Since $a \prec b$, $b$ is not a [[Definition:Supremum of Set|supremum]] of $S$.
+A similar argument shows that $b$ is not an [[Definition:Infimum of Set|infimum]].
+{{qed|lemma}}
+=== Sufficient Condition ===
+Suppose that no nonempty subset of $C$ has a [[Definition:Supremum of Set|supremum]] or [[Definition:Infimum of Set|infimum]] relative to $X$ that lies in $X \setminus C$.
+Let $p \in X \setminus C$.
+==== Case 1: $p$ is an upper bound of $C$ ====
+Since $C$ is a non-empty subset of $C$, it does not have a [[Definition:Supremum of Set|supremum]] in $X \setminus C$.
+Thus $p$ is not a [[Definition:Supremum of Set|supremum]] of $C$.
+Therefore, $C$ has an upper bound $a \in X$ such that $a \prec p$.
+Thus ${\uparrow_X}a$ contains $p$ and is disjoint from $C$, so $p$ is not an [[Definition:Adherent Point|adherent point]] of $C$.
+==== Case 2: $p$ is a lower bound of $C$ ====
+The same approach used for case 1 proves $p$ is not an [[Definition:Adherent Point|adherent point]] of $C$.
+==== Case 3: $p$ is neither an upper nor a lower bound of $C$ ====
+In this case, $C \cap {\downarrow_X} p$ and $C \cap {\uparrow_X} p$ are nonempty, and their union is $C$.
+Thus $p$ is not a [[Definition:Supremum of Set|supremum]] of $C \cap {\downarrow_X} p$ and is not an infimum of $C \cap {\uparrow_X}p$.
+Thus there are $a, b \in X$ such that $a \prec p \prec b$, $a$ is an upper bound of $C \cap {\downarrow_X}p$, and $b$ is a lower bound of $C \cap {\downarrow_X} p$.
+Then $\openint a b$ contains $p$ and is disjoint from $C$, so $p$ is not an [[Definition:Adherent Point|adherent point]] of $C$.
+Since $C$ contains all of its adherent points, it is [[Definition:Closed Set (Topology)|closed]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Implication is Left Distributive over Disjunction/Formulation 1}
+Tags: Implication is Left Distributive over Disjunction
+
+\begin{theorem}
+:$p \implies \left({q \lor r}\right) \dashv \vdash \left({p \implies q}\right) \lor \left({p \implies r}\right)$
+\end{theorem}<|endoftext|>
+\section{Implication is Left Distributive over Disjunction/Formulation 1/Forward Implication}
+Tags: Implication is Left Distributive over Disjunction
+
+\begin{theorem}
+:$p \implies \paren {q \lor r} \vdash \paren {p \implies q} \lor \paren {p \implies r}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \implies \paren {q \lor r} \vdash \paren {p \implies q} \lor \paren {p \implies r} }}
+{{Assumption|1|p \implies \paren {q \lor r} }}
+{{Assumption|2|p}}
+{{ModusPonens|3|1, 2|q \lor r|1|2}}
+{{IdentityLaw|4|2|p|2}}
+{{Assumption|5|q}}
+{{Implication|6|5|p \implies q|4|5}}
+{{Addition|7|5|\paren {p \implies q} \lor \paren {p \implies r}|6|1}}
+{{Assumption|8|r}}
+{{SequentIntro|9|8|p \implies r|8|[[True Statement is implied by Every Statement]]}}
+{{Addition|10|8|\paren {p \implies q} \lor \paren {p \implies r}|9|2}}
+{{ProofByCases|11|1|\paren {p \implies q} \lor \paren {p \implies r}|3|2|7|8|10}}
+{{EndTableau}}
+{{qed}}
+[[Category:Implication is Left Distributive over Disjunction]]
+iuw2ti0tdligmxfmhge8tid9aanq9iv
+\end{proof}<|endoftext|>
+\section{Implication is Left Distributive over Disjunction/Formulation 1/Reverse Implication}
+Tags: Implication is Left Distributive over Disjunction
+
+\begin{theorem}
+:$\left({p \implies q}\right) \lor \left({p \implies r}\right) \vdash p \implies \left({q \lor r}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({p \implies q}\right) \lor \left({p \implies r}\right) \vdash p \implies \left({q \lor r}\right)}}
+{{Assumption|1|\left({p \implies q}\right) \lor \left({p \implies r}\right)}}
+{{Assumption|2|p \implies q}}
+{{Assumption|3|p}}
+{{ModusPonens|4|2, 3|q|2|3}}
+{{Addition|5|2, 3|q \lor r|4|1}}
+{{Implication|6|2|p \implies \left({q \lor r}\right)|3|5}}
+{{Assumption|7|p \implies r}}
+{{Assumption|8|p}}
+{{ModusPonens|9|7, 8|r|7|8}}
+{{Addition|10|7, 8|q \lor r|9|2}}
+{{Implication|11|7|p \implies \left({q \lor r}\right)|8|10}}
+{{ProofByCases|12|1|p \implies \left({q \lor r}\right)|1|2|6|7|11}}
+{{EndTableau}}
+{{qed}}
+[[Category:Implication is Left Distributive over Disjunction]]
+9y3zqxzh2wtz8svinjz0k93zt6o6bxt
+\end{proof}<|endoftext|>
+\section{Factor Principles/Disjunction on Left/Formulation 2}
+Tags: Factor Principles
+
+\begin{theorem}
+: $\vdash \left({p \implies q}\right) \implies \left({\left({r \lor p}\right) \implies \left ({r \lor q}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({p \implies q}\right) \implies \left({\left({r \lor p}\right) \implies \left ({r \lor q}\right)}\right)}}
+{{Assumption|1|p \implies q}}
+{{SequentIntro|2|1|\left({\left({r \lor p}\right) \implies \left ({r \lor q}\right)}\right)|1
+ |[[Factor Principles/Disjunction on Left/Formulation 1|Factor Principles: Disjunction on Left: Formulation 1]]}}
+{{Implication|3|1|\left({p \implies q}\right) \implies \left({\left({r \lor p}\right) \implies \left ({r \lor q}\right)}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Factor Principles/Disjunction on Right/Formulation 2}
+Tags: Factor Principles
+
+\begin{theorem}
+: $\vdash \left({p \implies q}\right) \implies \left({\left({p \lor r}\right) \implies \left ({q \lor r}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({p \implies q}\right) \implies \left({\left({p \lor r}\right) \implies \left ({q \lor r}\right)}\right)}}
+{{Assumption|1|p \implies q}}
+{{SequentIntro|2|1|\left({p \lor r}\right) \implies \left ({q \lor r}\right)|1
+ |[[Factor Principles/Disjunction on Right/Formulation 1|Factor Principles: Disjunction on Right: Formulation 1]]}}
+{{Implication|3|1|\left({p \implies q}\right) \implies \left({\left({p \lor r}\right) \implies \left ({q \lor r}\right)}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Factor Principles/Disjunction on Right/Formulation 1}
+Tags: Factor Principles
+
+\begin{theorem}
+:$p \implies q \vdash \paren {p \lor r} \implies \paren {q \lor r}$
+\end{theorem}<|endoftext|>
+\section{Factor Principles/Disjunction on Left/Formulation 1}
+Tags: Factor Principles
+
+\begin{theorem}
+: $p \implies q \vdash \left({r \lor p}\right) \implies \left ({r \lor q}\right)$
+\end{theorem}<|endoftext|>
+\section{Disjunction of Implications}
+Tags: Disjunction, Implication
+
+\begin{theorem}
+:$\vdash \paren {p \implies q} \lor \paren {q \implies r}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \paren {p \implies q} \lor \paren {q \implies r} }}
+{{ExcludedMiddle|1|q \lor \neg q}}
+{{Assumption|2|q}}
+{{SequentIntro|3|2|p \implies q|2|[[True Statement is implied by Every Statement]]}}
+{{Addition|4|2|\paren {p \implies q} \lor \paren {q \implies r}|3|1}}
+{{Assumption|5|\neg q}}
+{{SequentIntro|6|5|q \implies r|5|[[False Statement implies Every Statement]]}}
+{{Addition|7|5|\paren {p \implies q} \lor \paren {q \implies r}|6|2}}
+{{ProofByCases|8||\paren {p \implies q} \lor \paren {q \implies r}|1|2|4|5|7}}
+{{EndTableau|qed}}
+{{LEM}}
+\end{proof}<|endoftext|>
+\section{Principle of Composition/Formulation 1}
+Tags: Principle of Composition
+
+\begin{theorem}
+:$\left({p \implies r}\right) \lor \left({q \implies r}\right) \dashv \vdash \left({p \land q}\right) \implies r$
+\end{theorem}<|endoftext|>
+\section{Principle of Composition/Formulation 1/Forward Implication}
+Tags: Principle of Composition
+
+\begin{theorem}
+:$\paren {p \implies r} \lor \paren {q \implies r} \vdash \paren {p \land q} \implies r$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\paren {p \implies r} \lor \paren {q \implies r} \vdash \paren {p \land q} \implies r}}
+{{Premise | 1 | \paren {p \implies r} \lor \paren {q \implies r} }}
+{{Assumption | 2 | p \implies r}}
+{{Assumption | 3 | p \land q}}
+{{Simplification | 4 | 3 | p | 3 | 1}}
+{{ModusPonens | 5 | 2, 3 | r | 2 | 4}}
+{{Implication | 6 | 2 | \paren {p \land q} \implies r | 3 | 5}}
+{{Assumption | 7 | q \implies r}}
+{{Assumption | 8 | p \land q}}
+{{Simplification | 9 | 8 | q | 8 | 2}}
+{{ModusPonens | 10 | 7, 8 | r | 7 | 9}}
+{{Implication | 11 | 7 | \paren {p \land q} \implies r | 8 | 10}}
+{{ProofByCases | 12 | 1 | \paren {p \land q} \implies r | 1 | 2 | 6 | 7 | 11}}
+{{EndTableau}}
+{{qed}}
+[[Category:Principle of Composition]]
+s0zw4mvifgwuf9geznf0ccfqcepe05p
+\end{proof}<|endoftext|>
+\section{Principle of Composition/Formulation 1/Reverse Implication}
+Tags: Principle of Composition
+
+\begin{theorem}
+:$\paren {p \land q} \implies r \vdash \paren {p \implies r} \lor \paren {q \implies r}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\paren {p \land q} \implies r \vdash \paren {p \implies r} \lor \paren {q \implies r} }}
+{{Premise | 1 | \paren {p \land q} \implies r}}
+{{SequentIntro | 2 | 1 | \neg \paren {p \lor q} \lor r | 1 | [[Rule of Material Implication/Formulation 1|Rule of Material Implication]]}}
+{{SequentIntro | 3 | 1 | \neg p \lor \neg q \lor r | 2 | [[De Morgan's Laws (Logic)/Disjunction of Negations|De Morgan's Laws: Disjunction of Negations]]}}
+{{Addition | 4 | 1 | r \lor \neg p \lor \neg q \lor r | 3 | 2}}
+{{Commutation | 5 | 1 | \neg p \lor r \lor \neg q \lor r | 4 | Disjunction}}
+{{SequentIntro | 6 | 1 | \paren {p \implies r} \lor \neg q \lor r | 5 | [[Rule of Material Implication/Formulation 1|Rule of Material Implication]]}}
+{{SequentIntro | 7 | 1 | \paren {p \implies r} \lor \paren {q \implies r} | 6 | [[Rule of Material Implication/Formulation 1|Rule of Material Implication]]}}
+{{EndTableau}}
+{{qed}}
+[[Category:Principle of Composition]]
+lrbsgl53wcyrryvmzb11mbkzydx5lg1
+\end{proof}<|endoftext|>
+\section{Principle of Composition/Formulation 2}
+Tags: Principle of Composition
+
+\begin{theorem}
+:$\left({\left({p \implies r}\right) \lor \left({q \implies r}\right)}\right) \iff \left({\left({p \land q}\right) \implies r}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({\left({p \implies r}\right) \lor \left({q \implies r}\right)}\right) \iff \left({\left({p \land q}\right) \implies r}\right)}}
+{{Assumption|1|\left({p \implies r}\right) \lor \left({q \implies r}\right)}}
+{{SequentIntro|2|1|\left({p \land q}\right) \implies r|1|[[Principle of Composition/Formulation 1/Forward Implication|Principle of Composition/Formulation 1]]}}
+{{Implication|3||\left({\left({p \implies r}\right) \lor \left({q \implies r}\right)}\right) \implies \left({\left({p \land q}\right) \implies r}\right)|1|2}}
+{{Assumption|4|\left({p \implies r}\right) \lor \left({q \implies r}\right)}}
+{{SequentIntro|5|4|\left({p \land q}\right) \implies r|4|[[Principle of Composition/Formulation 1/Reverse Implication|Principle of Composition/Formulation 1]]}}
+{{Implication|6||\left({\left({p \land q}\right) \implies r}\right) \implies \left({\left({p \implies r}\right) \lor \left({q \implies r}\right)}\right)|4|5}}
+{{BiconditionalIntro|7||\left({\left({p \implies r}\right) \lor \left({q \implies r}\right)}\right) \iff \left({\left({p \land q}\right) \implies r}\right)|3|6}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Inversion Mapping on Topological Group is Homeomorphism}
+Tags: Inversion Mappings, Homeomorphisms, Topological Groups
+
+\begin{theorem}
+Let $T = \struct {G, \circ, \tau}$ be a [[Definition:Topological Group|topological group]].
+Let $\phi: G \to G$ be the [[Definition:Inversion Mapping|inversion mapping]] of $T$.
+Then $\phi$ is a [[Definition:Homeomorphism|homeomorphism]].
+\end{theorem}
+
+\begin{proof}
+From the definition of [[Definition:Topological Group|topological group]], $\phi$ is [[Definition:Continuous Mapping (Topology)|continuous]].
+By [[Inversion Mapping is Involution]], $\phi$ is an [[Definition:Involution (Mapping)|involution]].
+By [[Continuous Involution is Homeomorphism]], $\phi$ is a [[Definition:Homeomorphism|homeomorphism]].
+{{qed}}
+[[Category:Inversion Mappings]]
+[[Category:Homeomorphisms]]
+[[Category:Topological Groups]]
+rjcmxcmvc5l269f81xlhd2baxldobxe
+\end{proof}<|endoftext|>
+\section{De Morgan's Laws (Logic)/Conjunction of Negations/Formulation 2/Forward Implication}
+Tags: De Morgan's Laws (Logic)
+
+\begin{theorem}
+: $\left({\neg p \land \neg q}\right) \implies \left({\neg \left({p \lor q}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({\neg p \land \neg q}\right) \implies \left({\neg \left({p \lor q}\right)}\right)}}
+{{Assumption|1|\neg p \land \neg q}}
+{{SequentIntro|2|1|\neg \left({p \lor q}\right)|1|[[De Morgan's Laws (Logic)/Conjunction of Negations/Formulation 1/Forward Implication|De Morgan's Laws (Logic): Conjunction of Negations: Formulation 1]]}}
+{{Implication|3||\left({\neg p \land \neg q}\right) \implies \left({\neg \left({p \lor q}\right)}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{De Morgan's Laws (Logic)/Conjunction of Negations/Formulation 2/Reverse Implication}
+Tags: De Morgan's Laws (Logic)
+
+\begin{theorem}
+: $\left({\neg \left({p \lor q}\right)}\right) \implies \left({\neg p \land \neg q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({\neg \left({p \lor q}\right)}\right) \implies \left({\neg p \land \neg q}\right)}}
+{{Assumption|1|\neg \left({p \lor q}\right)}}
+{{SequentIntro|2|1|\neg p \land \neg q|1|[[De Morgan's Laws (Logic)/Conjunction of Negations/Formulation 1/Reverse Implication|De Morgan's Laws (Logic): Conjunction of Negations: Formulation 1]]}}
+{{Implication|3||\left({\neg \left({p \lor q}\right)}\right) \implies \left({\neg p \land \neg q}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+[[Category:De Morgan's Laws (Logic)]]
+lz2c718vtnmt72duwxve8dch6pc4scd
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Generalized Ordered Space}
+Tags: Generalized Ordered Spaces, Equivalence of Definitions of Generalized Ordered Space
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $\tau$ be a [[Definition:Topology|topology]] for $S$.
+{{TFAE|def = Generalized Ordered Space}}
+\end{theorem}
+
+\begin{proof}
+=== [[Equivalence of Definitions of Generalized Ordered Space/Definition 1 implies Definition 3|Definition $(1)$ implies Definition $(3)$]] ===
+{{transclude:Equivalence of Definitions of Generalized Ordered Space/Definition 1 implies Definition 3|section=proof}}
+=== [[Equivalence of Definitions of Generalized Ordered Space/Definition 3 implies Definition 1|Definition $(3)$ implies Definition $(1)$]] ===
+{{transclude:Equivalence of Definitions of Generalized Ordered Space/Definition 3 implies Definition 1|section=proof}}
+=== [[Equivalence of Definitions of Generalized Ordered Space/Definition 2 implies Definition 1|Definition $(2)$ implies Definition $(1)$]] ===
+{{:Equivalence of Definitions of Generalized Ordered Space/Definition 2 implies Definition 1}}
+=== Definition $(3)$ implies Definition $(2)$ ===
+This follows from [[GO-Space Embeds Densely into Linearly Ordered Space]].
+{{qed}}
+[[Category:Generalized Ordered Spaces]]
+[[Category:Equivalence of Definitions of Generalized Ordered Space]]
+hazwxe6hpm2z8xwd4rpix6pxci8o2b8
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Generalized Ordered Space/Definition 2 implies Definition 1}
+Tags: Equivalence of Definitions of Generalized Ordered Space
+
+\begin{theorem}
+Let $\struct {S, \preceq, \tau}$ be a [[Definition:Generalized Ordered Space/Definition 2|generalized ordered space by Definition 2]]:
+{{:Definition:Generalized Ordered Space/Definition 2}}
+Then $\struct {S, \preceq, \tau}$ is a [[Definition:Generalized Ordered Space/Definition 1|generalized ordered space by Definition 1]]:
+{{:Definition:Generalized Ordered Space/Definition 1}}
+\end{theorem}
+
+\begin{proof}
+Let $x \in U \in \tau$.
+Then by the definition of [[Definition:Topological Embedding|topological embedding]]:
+:$\map \phi U$ is an [[Definition:Open Neighborhood|open neighborhood]] of $\map \phi x$ in $\map \phi S$ with the [[Definition:Subspace Topology|subspace topology]].
+Thus by [[Basis for Topological Subspace]] and the definition of the [[Definition:Order Topology|order topology]], there is an [[Definition:Open Interval|open interval]] or [[Definition:Open Ray|open ray]] $I' \in \tau'$ such that:
+:$\map \phi x \in I' \cap \map \phi S \subseteq \map \phi U$
+Since $I'$ is an [[Definition:Interval of Ordered Set|interval]] or [[Definition:Ray (Order Theory)|ray]], it is [[Definition:Convex Set (Order Theory)|convex]] in $S'$ by [[Interval of Ordered Set is Convex]] or [[Ray is Convex]], respectively.
+Then:
+{{begin-eqn}}
+{{eqn | l = x \in \phi^{-1} \sqbrk {I'}
+ | r = \phi^{-1} \sqbrk {I' \cap \phi \sqbrk S}
+ | c =
+}}
+{{eqn | o = \subseteq
+ | r = \phi^{-1} \sqbrk {\phi \sqbrk U}
+ | c =
+}}
+{{end-eqn}}
+Because $\phi$ is a [[Definition:Topological Embedding|topological embedding]], it is [[Definition:Injection|injective]] by definition.
+So:
+:$\phi^{-1} \sqbrk {\phi \sqbrk U} = U$
+Thus:
+:$x \in \phi^{-1} \sqbrk {I'} \subseteq U$
+By [[Inverse Image of Convex Set under Monotone Mapping is Convex]]:
+:$\phi^{-1} \sqbrk {I'}$ is [[Definition:Convex Set (Order Theory)|convex]].
+Thus $\tau$ has a [[Definition:Basis (Topology)|basis]] consisting of [[Definition:Convex Set (Order Theory)|convex sets]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Inverse Image of Convex Set under Monotone Mapping is Convex}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({X, \le}\right)$ and $\left({Y, \preceq}\right)$ be [[Definition:Ordered Set|ordered sets]].
+Let $f: X \to Y$ be a [[Definition:Monotone Mapping|monotone mapping]].
+Let $C$ be a [[Definition:Convex Set (Order Theory)|convex subset]] of $Y$.
+Then $f^{-1} \left[{C}\right]$ is [[Definition:Convex Set (Order Theory)|convex]] in $X$.
+\end{theorem}
+
+\begin{proof}
+Suppose $f$ is [[Definition:Increasing Mapping|increasing]].
+Let $a, b, c \in X$ such that $a \le b \le c$.
+Let $a, c \in f^{-1} \left[{C}\right]$.
+By definition of [[Definition:Inverse Image|inverse image]]:
+: $f \left({a}\right), f \left({c}\right) \in C$
+By definition of [[Definition:Increasing Mapping|increasing mapping]]:
+:$f \left({a}\right) \preceq f \left({b}\right) \preceq f \left({c}\right)$
+Thus by definition of [[Definition:Convex Set (Order Theory)|convex set]]:
+:$f \left({b}\right) \in C$
+Then by definition of [[Definition:Inverse Image|inverse image]]:
+:$b \in f^{-1} \left[{C}\right]$
+Since this holds for all such triples, $f^{-1} \left[{C}\right]$ is [[Definition:Convex Set (Order Theory)|convex]] in $X$.
+A similar argument applies if $f$ is [[Definition:Decreasing Mapping|decreasing]].
+{{qed}}
+[[Category:Order Theory]]
+fetuvrsxewava1wwlw6q1fh3x889jej
+\end{proof}<|endoftext|>
+\section{Conjunction with Law of Excluded Middle}
+Tags: Conjunction, Law of Excluded Middle
+
+\begin{theorem}
+:$\vdash p \iff \paren {p \land q} \lor \paren {p \land \neg q}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash p \iff \paren {p \land q} \lor \paren {p \land \neg q} }}
+{{Assumption|1|p}}
+{{ExcludedMiddle|2|q \lor \neg q}}
+{{Assumption|3|q}}
+{{Conjunction|4|1, 3|p \land q|1|2}}
+{{Addition|5|1, 3|\paren {p \land q} \lor \paren {p \land \neg q}|4|1}}
+{{Assumption|6|\neg q}}
+{{Conjunction|7|1, 6|p \land \neg q|1|6}}
+{{Addition|8|1, 6|\paren {p \land q} \lor \paren {p \land \neg q}|7|2}}
+{{ProofByCases|9|1|\paren {p \land q} \lor \paren {p \land \neg q}|2|3|5|6|8}}
+{{Implication|10||p \implies \paren {p \land q} \lor \paren {p \land \neg q}|1|9}}
+{{Assumption|11|\paren {p \land q} \lor \paren {p \land \neg q} }}
+{{SequentIntro|12|11|p \land \paren {q \lor \neg q}|11|[[Conjunction Distributes over Disjunction]]}}
+{{Simplification|13|11|p|11|2}}
+{{Implication|14||\paren {p \land q} \lor \paren {p \land \neg q} \implies p|11|13}}
+{{BiconditionalIntro|15||p \iff \paren {p \land q} \lor \paren {p \land \neg q}|12|14}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Proof by Cases with Contradiction}
+Tags: Principle of Non-Contradiction, Proof by Cases
+
+\begin{theorem}
+:$\vdash p \iff \left({p \lor q}\right) \land \left({p \lor \neg q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash p \iff \left({p \lor q}\right) \land \left({p \lor \neg q}\right)}}
+{{Assumption|1|p}}
+{{Addition|2|1|p \lor q|1|1}}
+{{Addition|3|1|p \lor \neg q|1|2}}
+{{Conjunction|4|1|\left({p \lor q}\right) \land \left({p \lor \neg q}\right)|2|3}}
+{{Implication|5||p \implies \left({p \lor q}\right) \land \left({p \lor \neg q}\right)|1|4}}
+{{Assumption|6|\left({p \lor q}\right) \land \left({p \lor \neg q}\right)}}
+{{SequentIntro|7|6|p \lor \left({q \land \neg q}\right)|6|[[Disjunction Distributes over Conjunction]]}}
+{{Assumption|8|p}}
+{{TheoremIntro|9|\neg \left({q \land \neg q}\right)|[[Principle of Non-Contradiction/Sequent Form/Formulation 2|Principle of Non-Contradiction: Formulation 2]]}}
+{{ModusTollendoPonens|10|6|p|7|9|2}}
+{{ProofByCases|11|6|p|6|8|8|9|10}}
+{{Implication|12||\left({p \lor q}\right) \land \left({p \lor \neg q}\right) \implies p|6|11}}
+{{BiconditionalIntro|13||p \iff \left({p \lor q}\right) \land \left({p \lor \neg q}\right)|5|12}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ray is Convex}
+Tags: Convex Sets, Rays (Order Theory)
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $I$ be a [[Definition:Ray (Order Theory)|ray]], either [[Definition:Open Ray|open]] or [[Definition:Closed Ray|closed]].
+Then $I$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+\end{theorem}
+
+\begin{proof}
+The cases for [[Definition:Upward-Pointing Ray|upward-pointing]] and [[Definition:Downward-Pointing Ray|downward-pointing]] [[Definition:Ray (Order Theory)|rays]] are equivalent.
+{{explain|"Dual of convex is convex" and duality, upon which the above statement depends.}}
+{{WLOG}}, suppose that $U$ is an [[Definition:Upward-Pointing Ray|upward-pointing ray]].
+By the definition of a [[Definition:Ray (Order Theory)|ray]], there exists an $a \in S$ such that:
+:$I = a^\succ$
+or;
+:$I = a^\succeq$
+according to whether $U$ is [[Definition:Open Ray|open]] or [[Definition:Closed Ray|closed]].
+Let $x, y, z \in S$ such that $x \prec y \prec z$ and $x, z \in I$.
+Then:
+:$a \preceq x \prec y$
+so:
+:$a \prec y$
+Therefore:
+:$y \in a^\succ \subseteq I$
+Thus $I$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Intersection of Convex Sets is Convex Set (Order Theory)}
+Tags: Convex Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $\mathcal C$ be a [[Definition:Set|set]] of [[Definition:Convex Set (Order Theory)|convex sets]] in $S$.
+Then $\displaystyle \bigcap \mathcal C$ is [[Definition:Convex Set (Order Theory)|convex]].
+\end{theorem}
+
+\begin{proof}
+Let $a, b, c \in S$.
+Let $a, c \in \displaystyle \bigcap \mathcal C$.
+Let $a \prec b \prec c$.
+By the definition of [[Definition:Set Intersection|intersection]]:
+:$\forall C \in \mathcal C$: $a, c \in C$
+Since each $C \in C$ is [[Definition:Convex Set (Order Theory)|convex]]:
+:$\forall C \in \mathcal C$: $b \in C$.
+Thus by the definition of [[Definition:Set Intersection|intersection]]:
+:$b \in \displaystyle \bigcap \mathcal C$
+Thus $\displaystyle \bigcap \mathcal C$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+[[Category:Convex Sets]]
+8dmimjz3fi4bkv3mxh4bpiye8k4tqi5
+\end{proof}<|endoftext|>
+\section{Interval of Ordered Set is Convex}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $I$ be an [[Definition:Interval of Ordered Set|interval]]: be it [[Definition:Open Interval|open]], [[Definition:Closed Interval|closed]], or [[Definition:Half-Open Interval|half-open]] in $S$.
+Then $I$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Any [[Definition:Interval of Ordered Set|interval]] can be represented as the [[Definition:Set Intersection|intersection]] of two [[Definition:Ray (Order Theory)|rays]].
+{{explain|Obvious though it is, the above needs to be stated as a theorem in its own right.}}
+Thus by [[Ray is Convex]] and [[Intersection of Convex Sets is Convex Set (Order Theory)]], $I$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+[[Category:Order Theory]]
+kob8akch5iwfbh8kkzmoembt3sost1s
+\end{proof}<|endoftext|>
+\section{Upper and Lower Closures are Convex}
+Tags: Lower Closures, Upper Closures, Convex Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $a \in S$.
+Then $a^\succeq$, $a^\succ$, $a^\preceq$, and $a^\prec$ are [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+\end{theorem}
+
+\begin{proof}
+The cases for upper and lower closures are dual, so we need only prove the case for upper closures.
+Suppose, then, that $C = a^\succeq$ or $C = a^\succ$.
+Suppose that $x, y, z \in S$, $x \prec y \prec z$, and $x, z \in C$.
+Then $a \preceq x \prec y$, so $a \prec y$ by [[Extended Transitivity]].
+Therefore $y \in a^\succ \subseteq C$.
+Thus $C$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+[[Category:Lower Closures]]
+[[Category:Upper Closures]]
+[[Category:Convex Sets]]
+1ubqkqahcv5dlic8bk0xfbemucj22th
+\end{proof}<|endoftext|>
+\section{Transitive Closure of Symmetric Relation is Symmetric}
+Tags: Symmetric Relations, Transitive Closures
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $\mathcal R$ be a [[Definition:Symmetric Relation|symmetric relation]] on $S$.
+Let $\mathcal T$ be the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] of $\mathcal R$.
+The $\mathcal T$ is [[Definition:Symmetric Relation|symmetric]].
+\end{theorem}
+
+\begin{proof}
+Let $a, b \in S$ with $a \mathrel{\mathcal T} b$.
+By the definition of [[Definition:Transitive Closure (Relation Theory)/Union of Compositions|transitive closure]], there is an $n \in \N$ such that $a \mathrel{\mathcal R^n} b $.
+Thus there are $x_0, x_1, \dots x_n \in S$ such that:
+: $x_0 = a$
+: $x_n = b$
+: For $k = 0, \dots, n-1$: $x_k \mathrel{\mathcal R} x_{k+1}$
+For $k = 0, \dots, n$, let $y_k = x_{n-k}$.
+Then:
+: $y_0 = x_n = b$
+: $y_n = x_0 = a$
+For $k = 0, \dots, n-1$: $x_{n-k-1} \mathrel{\mathcal R} x_{n-k}$.
+Thus $y_{k+1} \mathrel{\mathcal R} y_k$.
+Since $\mathcal R$ is [[Definition:Symmetric Relation|symmetric]]:
+: For $k = 0, \dots, n-1$: $y_k \mathrel{\mathcal R} y_{k+1}$
+Thus $b \mathrel{\mathcal R^n} a$, so $b \mathrel{\mathcal T} a$.
+{{qed}}
+[[Category:Symmetric Relations]]
+[[Category:Transitive Closures]]
+kw5062bup14w60mar3g1krnuhq44vfa
+\end{proof}<|endoftext|>
+\section{Transitive Closure of Reflexive Relation is Reflexive}
+Tags: Reflexive Relations, Transitive Closures
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $\mathcal R$ be a [[Definition:Reflexive Relation|reflexive relation]] on $S$.
+Let $\mathcal T$ be the [[Definition:Transitive Closure of Relation|transitive closure]] of $\mathcal R$.
+Then $\mathcal T$ is [[Definition:Reflexive Relation|reflexive]].
+\end{theorem}
+
+\begin{proof}
+Let $a \in S$.
+Since $\mathcal R$ is [[Definition:Reflexive Relation|reflexive]]:
+:$\left({a, a}\right) \in \mathcal R$
+By the definition of [[Definition:Transitive Closure (Relation Theory)/Smallest Transitive Superset|transitive closure]]:
+:$\mathcal R \subseteq \mathcal T$
+Thus by the definition of [[Definition:Subset|subset]]:
+:$\left({a, a}\right) \in \mathcal T$
+Since this holds for all $a \in S, \mathcal T$ is [[Definition:Reflexive Relation|reflexive]].
+{{qed}}
+[[Category:Reflexive Relations]]
+[[Category:Transitive Closures]]
+ivk48ffpye1ebp4yjc0cc6v3qja04eb
+\end{proof}<|endoftext|>
+\section{Transitive Closure of Reflexive Symmetric Relation is Equivalence}
+Tags: Transitive Closures, Equivalence Relations
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\mathcal R$ be a [[Definition:Symmetric Relation|symmetric]] and [[Definition:Reflexive Relation|reflexive]] [[Definition:Endorelation|relation]] on $S$.
+Then the [[Definition:Transitive Closure of Relation|transitive closure]] of $\mathcal R$ is an [[Definition:Equivalence Relation|equivalence relation]].
+\end{theorem}
+
+\begin{proof}
+Let $\sim$ be the [[Definition:Transitive Closure of Relation|transitive closure]] of $\mathcal R$.
+Checking in turn each of the criteria for [[Definition:Equivalence Relation|equivalence]]:
+=== Reflexivity ===
+By [[Transitive Closure of Reflexive Relation is Reflexive]]:
+: $\sim$ is [[Definition:Reflexive Relation|reflexive]].
+{{qed|lemma}}
+=== Symmetry ===
+By [[Transitive Closure of Symmetric Relation is Symmetric]]:
+: $\sim$ is [[Definition:Symmetric Relation|symmetric]].
+{{qed|lemma}}
+=== Transitivity ===
+By the definition of transitive closure:
+: $\sim$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+$\sim$ has been shown to be [[Definition:Reflexive Relation|reflexive]], [[Definition:Symmetric Relation|symmetric]] and [[Definition:Transitive Relation|transitive]].
+Hence by definition it is an [[Definition:Equivalence Relation|equivalence relation]].
+{{qed}}
+[[Category:Transitive Closures]]
+[[Category:Equivalence Relations]]
+o62uk3c5ms86bq9v4zgnbs4mju6yw3n
+\end{proof}<|endoftext|>
+\section{Union of Overlapping Convex Sets in Toset is Convex}
+Tags: Total Orderings
+
+\begin{theorem}
+Let $\left({ S, \preceq }\right)$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $U$ and $V$ be [[Definition:Convex Set (Order Theory)|convex sets]] in $S$.
+Let $U \cap V \ne \varnothing$.
+Then $U \cup V$ is also [[Definition:Convex Set (Order Theory)|convex]].
+\end{theorem}
+
+\begin{proof}
+Let $a,b,c \in S$
+Let $a,c \in U \cup V$.
+Let $a \prec b \prec c$.
+If $a,c \in U$ then $b \in U$ because $U$ is [[Definition:Convex Set (Order Theory)|convex]].
+Thus $b \in U \cup V$ by the definition of [[Definition:Set Union|union]].
+Similarly, if $a,c \in V$ then $b \in U \cup V$.
+Otherwise, suppose [[Definition:WLOG|WLOG]] that $a \in U$ and $c \in V$.
+Since $U \cap V$ is nonempty by the premise, it has an element $p$.
+Since $\preceq$ is a [[Definition:Total Ordering|total ordering]]:
+:$b \preceq p$ or $p \preceq b$.
+If $b \preceq p$, then since $a \prec b$, $a,p \in U$, and $U$ is convex, we can conclude that
+:$b \in U$
+so $b \in U \cup V$.
+A similar argument shows that it $p \preceq b$ then $b \in V$, so $b \in U \cup V$.
+Thus in all cases we can conclude that $b \in U \cup V$, so $U \cup V$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+[[Category:Total Orderings]]
+phpaeazpqhrvjvlm7bqfm97jnvw0amk
+\end{proof}<|endoftext|>
+\section{Rule of Material Equivalence}
+Tags: Rule of Material Equivalence, Biconditional, Implication, Conjunction
+
+\begin{theorem}
+==== [[Rule of Material Equivalence/Formulation 1|Formulation 1]] ====
+{{:Rule of Material Equivalence/Formulation 1}}
+==== [[Rule of Material Equivalence/Formulation 2|Formulation 2]] ====
+{{:Rule of Material Equivalence/Formulation 2}}
+\end{theorem}<|endoftext|>
+\section{Union of Overlapping Convex Sets in Toset is Convex/Infinite Union}
+Tags: Total Orderings
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $\mathcal A$ be a [[Definition:Set of Sets|set]] of [[Definition:Convex Set (Order Theory)|convex]] subsets of $S$.
+For any $P, Q \in \mathcal A$, let there be elements $C_0, \dotsc, C_n \in \mathcal A$ such that:
+:$C_0 = P$
+:$C_n = Q$
+:For $k = 0, \dots, n-1: C_k \cap C_{k+1} \ne \varnothing$
+Then $\bigcup \mathcal A$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Let $a, c \in \bigcup \mathcal A$.
+Let $b \in S$.
+Let $a \prec b \prec c$.
+Since $a, c \in \bigcup \mathcal A$, there are $P, Q \in \mathcal A$ such that $a \in P$ and $c \in Q$.
+By the premise, there are elements $C_0, \dots, C_n \in \mathcal A$ such that:
+:$C_0 = P$
+:$C_n = Q$
+:For $k = 0, \dots, n-1: C_k \cap C_{k+1} \ne \varnothing$
+{{explain|details of induction. Consider putting the finite chain case into a separate lemma.}}
+Applying [[Union of Overlapping Intervals is Interval]] inductively:
+:$\displaystyle \bigcup_{k \mathop = 0}^n C_k$ is [[Definition:Convex Set (Order Theory)|convex]].
+Since $\displaystyle a, c \in \bigcup_{k \mathop = 0}^n C_k$, by the definition of convexity:
+:$\displaystyle b \in \bigcup_{k \mathop = 0}^n C_k$
+Thus:
+:$\displaystyle b \in \bigcup \mathcal A$
+Since this holds for all such triples $a, b, c$, it follows that $\mathcal A$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+[[Category:Total Orderings]]
+0zqc4uvgtwv16acj56off7wr2zf6zy0
+\end{proof}<|endoftext|>
+\section{Rule of Material Equivalence/Formulation 2}
+Tags: Rule of Material Equivalence
+
+\begin{theorem}
+:$\vdash \left({p \iff q}\right) \iff \left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)$
+\end{theorem}<|endoftext|>
+\section{Non-Equivalence as Disjunction of Conjunctions}
+Tags: Negation, Biconditional, Disjunction, Conjunction
+
+\begin{theorem}
+==== [[Non-Equivalence as Disjunction of Conjunctions/Formulation 1|Formulation 1]] ====
+{{:Non-Equivalence as Disjunction of Conjunctions/Formulation 1}}
+==== [[Non-Equivalence as Disjunction of Conjunctions/Formulation 2|Formulation 2]] ====
+{{:Non-Equivalence as Disjunction of Conjunctions/Formulation 2}}
+\end{theorem}<|endoftext|>
+\section{Continuous Involution is Homeomorphism}
+Tags: Continuous Mappings, Homeomorphisms, Involutions
+
+\begin{theorem}
+Let $\struct {S, \tau}$ be a [[Definition:Topological Space|topological space]].
+Let $f: S \to S$ be a [[Definition:Continuous Mapping (Topology)|continuous]] [[Definition:Involution (Mapping)|involution]].
+Then $f$ is a [[Definition:Homeomorphism|homeomorphism]].
+\end{theorem}
+
+\begin{proof}
+From [[Involution is Permutation]], $f$ is a [[Definition:Permutation|permutation]] and so a [[Definition:Bijection|bijection]].
+Since $f$ is [[Definition:Continuous Mapping (Topology)|continuous]], it suffices to verify that its [[Definition:Inverse Mapping|inverse]] is also [[Definition:Continuous Mapping (Topology)|continuous]].
+Now recall $f$ is an [[Definition:Involution (Mapping)|involution]], that is, $f^{-1} = f$.
+Thus $f^{-1}$ is also [[Definition:Continuous Mapping (Topology)|continuous]].
+Hence $f$ is a [[Definition:Homeomorphism|homeomorphism]].
+{{qed}}
+[[Category:Continuous Mappings]]
+[[Category:Homeomorphisms]]
+[[Category:Involutions]]
+2djq8sagezscmf2b3kgfi523p99kpm4
+\end{proof}<|endoftext|>
+\section{Involution is Permutation}
+Tags: Involutions, Permutation Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $f: S \to S$ be an [[Definition:Involution (Mapping)|involution]].
+Then $f$ is a [[Definition:Permutation|permutation]].
+\end{theorem}
+
+\begin{proof}
+By definition, a [[Definition:Permutation|permutation]] is a [[Definition:Bijection|bijection]] from a [[Definition:Set|set]] to itself.
+Thus it is sufficient to show that $f$ is a [[Definition:Bijection|bijection]].
+By definition of [[Definition:Involution (Mapping)|involution]], for each $x \in S$:
+:$\map f {\map f x} = x$
+By [[Equality of Mappings]]:
+:$f \circ f = I_S$
+where $I_S$ is the [[Definition:Identity Mapping|identity mapping]] on $S$.
+Thus $f$ is both a [[Definition:Left Inverse Mapping|left inverse]] and a [[Definition:Right Inverse Mapping|right inverse]] of itself.
+The result follows from [[Bijection iff Left and Right Inverse]].
+{{qed}}
+[[Category:Involutions]]
+[[Category:Permutation Theory]]
+kpxtxhpxzu12jovs9dcovzqnrd9bn2g
+\end{proof}<|endoftext|>
+\section{Non-Equivalence as Disjunction of Conjunctions/Formulation 1/Forward Implication}
+Tags: Non-Equivalence as Disjunction of Conjunctions
+
+\begin{theorem}
+: $\neg \left ({p \iff q}\right) \vdash \left({\neg p \land q}\right) \lor \left({p \land \neg q}\right)$
+\end{theorem}<|endoftext|>
+\section{Non-Equivalence as Disjunction of Conjunctions/Formulation 1/Forward Implication/Proof}
+Tags: Non-Equivalence as Disjunction of Conjunctions
+
+\begin{theorem}
+: $\neg \left ({p \iff q}\right) \vdash \left({\neg p \land q}\right) \lor \left({p \land \neg q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg \left ({p \iff q}\right) \vdash \left({\neg p \land q}\right) \lor \left({p \land \neg q}\right)}}
+{{Premise|1|\neg \left ({p \iff q}\right)}}
+{{SequentIntro|2|1|\neg \left({\left ({p \implies q}\right) \land \left ({q \implies p}\right)}\right)|1
+ |[[Rule of Material Equivalence]]}}
+{{SequentIntro|3|1|\neg \left({\left ({\neg p \lor q}\right) \land \left ({\neg q \lor p}\right)}\right)|2
+ |[[Rule of Material Implication]] (twice)}}
+{{DeMorgan|4|1|\neg \left({\neg p \lor q}\right) \lor \neg \left ({\neg q \lor p}\right)|3|Disjunction of Negations}}
+{{DeMorgan|5|1|\left ({\neg \neg p \land \neg q}\right) \lor \left ({\neg \neg q \land \neg p}\right)|4|Conjunction of Negations}}
+{{DoubleNegElimination|6|1|\left ({p \land \neg q}\right) \lor \left ({q \land \neg p}\right)|5}}
+{{Commutation|7|1|\left ({p \land \neg q}\right) \lor \left ({\neg p \land q}\right)|6|Conjunction}}
+{{Commutation|8|1|\left ({\neg p \land q}\right) \lor \left ({p \land \neg q}\right)|7|Disjunction}}
+{{EndTableau}}
+{{qed}}
+{{LEM|Double Negation Elimination}}
+\end{proof}<|endoftext|>
+\section{Non-Equivalence as Disjunction of Conjunctions/Formulation 1/Reverse Implication/Proof}
+Tags: Non-Equivalence as Disjunction of Conjunctions
+
+\begin{theorem}
+: $\left({\neg p \land q}\right) \lor \left({p \land \neg q}\right) \vdash \neg \left ({p \iff q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({\neg p \land q}\right) \lor \left({p \land \neg q}\right) \vdash \neg \left ({p \iff q}\right)}}
+{{Premise|1|\left ({\neg p \land q}\right) \lor \left ({p \land \neg q}\right)}}
+{{Commutation|2|1|\left ({p \land \neg q}\right) \lor \left ({\neg p \land q}\right)|1|Disjunction}}
+{{Commutation|3|1|\left ({p \land \neg q}\right) \lor \left ({q \land \neg p}\right)|2|Conjunction}}
+{{DoubleNegIntro|4|1|\left ({\neg \neg p \land \neg q}\right) \lor \left ({\neg \neg q \land \neg p}\right)|3}}
+{{DeMorgan|5|1|\neg \left({\neg p \lor q}\right) \lor \neg \left ({\neg q \lor p}\right)|4|Conjunction of Negations}}
+{{DeMorgan|6|1|\neg \left({\left ({\neg p \lor q}\right) \land \left ({\neg q \lor p}\right)}\right)|5|Disjunction of Negations}}
+{{SequentIntro|7|1 |\neg \left({\left ({p \implies q}\right) \land \left ({q \implies p}\right)}\right)|6
+ |[[Rule of Material Implication]] (twice)}}
+{{SequentIntro|8|1|\neg \left ({p \iff q}\right)|7|[[Rule of Material Equivalence]]}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Non-Equivalence as Disjunction of Conjunctions/Formulation 1/Reverse Implication}
+Tags: Non-Equivalence as Disjunction of Conjunctions
+
+\begin{theorem}
+: $\left({\neg p \land q}\right) \lor \left({p \land \neg q}\right) \vdash \neg \left ({p \iff q}\right)$
+\end{theorem}<|endoftext|>
+\section{Non-Equivalence as Disjunction of Conjunctions/Formulation 2}
+Tags: Non-Equivalence as Disjunction of Conjunctions
+
+\begin{theorem}
+:$\vdash \paren {\neg \paren {p \iff q} } \iff \paren {\paren {\neg p \land q} \lor \paren {p \land \neg q} }$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \paren {\neg \paren {p \iff q} } \iff \paren {\paren {\neg p \land q} \lor \paren {p \land \neg q} } }}
+{{Assumption|1|\neg \paren {p \iff q} }}
+{{SequentIntro|2|1|\paren {\neg p \land q} \lor \paren {p \land \neg q}|1|[[Non-Equivalence as Disjunction of Conjunctions/Formulation 1|Non-Equivalence as Disjunction of Conjunctions: Formulation 1]]}}
+{{Implication|3||\paren {\neg \paren {p \iff q} } \implies \paren {\paren {\neg p \land q} \lor \paren {p \land \neg q} }|1|2}}
+{{Assumption|4|\paren {\neg p \land q} \lor \paren {p \land \neg q} }}
+{{SequentIntro|5|4|\neg \paren {p \iff q}|4|[[Non-Equivalence as Disjunction of Conjunctions/Formulation 1|Non-Equivalence as Disjunction of Conjunctions: Formulation 1]]}}
+{{Implication|6||\paren {\paren {\neg p \land q} \lor \paren {p \land \neg q} } \implies \paren {\neg \paren {p \iff q} }|4|5}}
+{{BiconditionalIntro|7||\paren {\neg \paren {p \iff q} } \iff \paren {\paren {\neg p \land q} \lor \paren {p \land \neg q} }|3|6}}
+{{EndTableau|qed}}
+\end{proof}<|endoftext|>
+\section{Non-Equivalence as Equivalence with Negation/Formulation 1/Forward Implication}
+Tags: Non-Equivalence as Equivalence with Negation
+
+\begin{theorem}
+:$\neg \paren {p \iff q} \vdash \paren {p \iff \neg q}$
+\end{theorem}<|endoftext|>
+\section{Non-Equivalence as Equivalence with Negation/Formulation 1}
+Tags: Non-Equivalence as Equivalence with Negation
+
+\begin{theorem}
+:$\neg \paren {p \iff q} \dashv \vdash \paren {p \iff \neg q}$
+\end{theorem}<|endoftext|>
+\section{Non-Equivalence as Equivalence with Negation/Formulation 1/Reverse Implication}
+Tags: Non-Equivalence as Equivalence with Negation
+
+\begin{theorem}
+:$\paren {p \iff \neg q} \vdash \neg \paren {p \iff q}$
+\end{theorem}<|endoftext|>
+\section{Sign of Function Matches Sign of Definite Integral}
+Tags: Integral Calculus
+
+\begin{theorem}
+Let $f$ be a [[Definition:Real Function|real function]] [[Definition:Continuous Real Function on Interval|continuous]] on some [[Definition:Closed Real Interval|closed interval]] $\closedint a b$, where $a < b$.
+Then:
+:If $\forall x \in \closedint a b: \map f x \ge 0$ then $\displaystyle \int_a^b \map f x \rd x \ge 0$
+:If $\forall x \in \closedint a b: \map f x > 0$ then $\displaystyle \int_a^b \map f x \rd x > 0$
+:If $\forall x \in \closedint a b: \map f x \le 0$ then $\displaystyle \int_a^b \map f x \rd x \le 0$
+:If $\forall x \in \closedint a b: \map f x < 0$ then $\displaystyle \int_a^b \map f x \rd x < 0$
+\end{theorem}
+
+\begin{proof}
+From [[Continuous Real Function is Darboux Integrable]], the [[Definition:Definite Integral|definite integrals]] under discussion are guaranteed to exist.
+Consider the case where $\forall x \in \closedint a b: \map f x \ge 0$.
+Define a [[Definition:Constant Mapping|constant mapping]]:
+:$f_0: \closedint a b \to \R$:
+:$\map {f_0} x = 0$
+Then:
+{{begin-eqn}}
+{{eqn | l = \map {f_0} x
+ | o = \le
+ | r = \map f x
+ | c = for any $x \in \closedint a b$: recall $\map f x \ge 0$
+}}
+{{eqn | ll= \leadsto
+ | l = \int_a^b \map {f_0} x \rd x
+ | o = \le
+ | r = \int_a^b \map f x \rd x
+ | c = [[Relative Sizes of Definite Integrals]]
+}}
+{{eqn | ll= \leadsto
+ | l = 0 \paren {b - a}
+ | o = \le
+ | r = \int_a^b \map f x \rd x
+ | c = [[Integral of Constant/Definite|Integral of Constant]]
+}}
+{{eqn | ll= \leadsto
+ | l = \int_a^b \map f x \rd x
+ | o = \ge
+ | r = 0
+}}
+{{end-eqn}}
+The proofs of the other cases are similar.
+{{qed}}
+[[Category:Integral Calculus]]
+2b6zbio87ls85qmqge23xxn4rety4a6
+\end{proof}<|endoftext|>
+\section{Non-Equivalence as Equivalence with Negation/Formulation 2}
+Tags: Non-Equivalence as Equivalence with Negation
+
+\begin{theorem}
+: $\vdash \neg \left ({p \iff q}\right) \iff \left({p \iff \neg q}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \neg \left ({p \iff q}\right) \iff \left({p \iff \neg q}\right)}}
+{{Assumption|1|\neg \left ({p \iff q}\right)}}
+{{SequentIntro|2|1|p \iff \neg q|1|[[Non-Equivalence as Equivalence with Negation/Formulation 1|Non-Equivalence as Equivalence with Negation: Formulation 1]]}}
+{{Implication|3||\left({\neg \left ({p \iff q}\right)}\right) \implies \left({p \iff \neg q}\right)|1|2}}
+{{Assumption|4|p \iff \neg q}}
+{{SequentIntro|5|4|\neg \left ({p \iff q}\right)|4|[[Non-Equivalence as Equivalence with Negation/Formulation 1|Non-Equivalence as Equivalence with Negation: Formulation 1]]}}
+{{Implication|6||\left({p \iff \neg q}\right) \implies \left({\neg \left ({p \iff q}\right)}\right)|4|5}}
+{{BiconditionalIntro|7||\left({\neg \left ({p \iff q}\right)}\right) \iff \left({p \iff \neg q}\right)|3|6}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Transitive Relation whose Symmetric Closure is not Transitive}
+Tags: Transitive Relations
+
+\begin{theorem}
+Let $S = \set {p, q}$, where $p$ and $q$ are distinct elements.
+Let $\RR = \set {\tuple {p, q} }$.
+Then $\RR$ is [[Definition:Transitive Relation|transitive]] but its [[Definition:Symmetric Closure|symmetric closure]] is not.
+\end{theorem}
+
+\begin{proof}
+$\RR$ is vacuously [[Definition:Transitive Relation|transitive]] because there are no elements $a, b, c \in S$ such that $a \mathrel \RR b$ and $b \mathrel \RR c$.
+Let $\RR^\leftrightarrow$ be the [[Definition:Symmetric Closure|symmetric closure]] of $\RR$.
+Then $\RR^\leftrightarrow = \RR \cup \RR^{-1} = \set {\tuple {p, q}, \tuple {q, p} }$.
+Then:
+:$p \mathrel {\RR^\leftrightarrow} q$ and $q \mathrel {\RR^\leftrightarrow} p$
+but:
+:$p \not \mathrel {\RR^\leftrightarrow} p$
+Therefore $\RR^\leftrightarrow$ is not [[Definition:Transitive Relation|transitive]].
+{{qed}}
+[[Category:Transitive Relations]]
+qqk8rl27ob84fq93bz3e62s112gjkzt
+\end{proof}<|endoftext|>
+\section{Symmetric Closure of Relation Compatible with Operation is Compatible}
+Tags: Compatible Relations
+
+\begin{theorem}
+Let $\struct {S, \circ}$ be a [[Definition:Magma|magma]].
+Let $\RR$ be a [[Definition:Endorelation|relation]] [[Definition:Relation Compatible with Operation|compatible]] with $\circ$.
+Let $\RR^\leftrightarrow$ be the [[Definition:Symmetric Closure|symmetric closure]] of $\RR$.
+Then $\RR^\leftrightarrow$ is compatible with $\circ$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Symmetric Closure|symmetric closure]]:
+:$\RR^\leftrightarrow = \RR \cup \RR^{-1}$.
+Here $\RR^{-1}$ is the [[Definition:Inverse Relation|inverse]] of $\RR$.
+By [[Inverse of Relation Compatible with Operation is Compatible]], $\RR^{-1}$ is [[Definition:Relation Compatible with Operation|compatible]] with $\circ$.
+Thus by [[Union of Relations Compatible with Operation is Compatible]]:
+:$\RR^\leftrightarrow = \RR \cup \RR^{-1}$ is compatible with $\circ$.
+{{qed}}
+[[Category:Compatible Relations]]
+b6pdw9g2zqye1clef2ior1iq7fzjwqb
+\end{proof}<|endoftext|>
+\section{Singleton is Convex Set (Order Theory)}
+Tags: Convex Sets, Singletons
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $x \in S$.
+Then the [[Definition:singleton|singleton]] $\left\{{x}\right\}$ is [[Definition:Convex Set (Order Theory)|convex]].
+\end{theorem}
+
+\begin{proof}
+Let:
+:$a, c \in \left\{{x}\right\}$
+:$b \in S$
+:$a \preceq b \preceq c$
+Then $a = c = x$.
+Thus $x \preceq b \preceq x$.
+Since $\preceq$ is a [[Definition:ordering|ordering]], it is [[Definition:Antisymmetric Relation|antisymmetric]].
+Thus $b = x$, so $b \in \left\{{x}\right\}$.
+Since this holds for the only such triple, $\left\{{x}\right\}$ is [[Definition:Convex Set (Order Theory)|convex]].
+{{qed}}
+[[Category:Convex Sets]]
+[[Category:Singletons]]
+8yo4pko9er4prextiv9qlaxo5auvz4g
+\end{proof}<|endoftext|>
+\section{Rule of Transposition/Variant 1/Formulation 2}
+Tags: Rule of Transposition
+
+\begin{theorem}
+:$\vdash \paren {p \implies \neg q} \iff \paren {q \implies \neg p}$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 1}
+Tags: Rule of Transposition
+
+\begin{theorem}
+==== [[Rule of Transposition/Variant 1/Formulation 1|Formulation 1]] ====
+{{:Rule of Transposition/Variant 1/Formulation 1}}
+==== [[Rule of Transposition/Variant 1/Formulation 2|Formulation 2]] ====
+{{:Rule of Transposition/Variant 1/Formulation 2}}
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 1/Formulation 2/Reverse Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\vdash \left({q \implies \neg p}\right) \implies \left({p \implies \neg q}\right)$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 1/Formulation 1}
+Tags: Rule of Transposition
+
+\begin{theorem}
+:$p \implies \neg q \dashv \vdash q \implies \neg p$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 1/Formulation 1/Forward Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+:$p \implies \neg q \vdash q \implies \neg p$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 1/Formulation 1/Reverse Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+:$q \implies \neg p \vdash \neg p \implies q$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2}
+Tags: Rule of Transposition
+
+\begin{theorem}
+==== [[Rule of Transposition/Variant 2/Formulation 1|Formulation 1]] ====
+{{:Rule of Transposition/Variant 2/Formulation 1}}
+==== [[Rule of Transposition/Variant 2/Formulation 2|Formulation 2]] ====
+{{:Rule of Transposition/Variant 2/Formulation 2}}
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 1}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\neg p \implies q \dashv \vdash \neg q \implies p$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 1/Forward Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+:$\neg p \implies q \vdash \neg q \implies p$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 1/Forward Implication/Proof}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\neg p \implies q \vdash \neg q \implies p$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg p \implies q \vdash \neg q \implies p}}
+{{Premise|1|\neg p \implies q}}
+{{Assumption|2|\neg q}}
+{{ModusTollens|3|1, 2|\neg \neg p|1|2}}
+{{DoubleNegElimination|4|1, 2|p|3}}
+{{Implication|5|1|\neg q \implies p|2|4}}
+{{EndTableau}}
+{{Qed}}
+{{LEM|Double Negation Elimination|3}}
+\end{proof}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 1/Reverse Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $q \implies \neg p \vdash p \implies \neg q$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 1/Reverse Implication/Proof}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\neg q \implies p \vdash \neg p \implies q$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\neg q \implies p \vdash \neg p \implies q}}
+{{Premise|1|\neg q \implies p}}
+{{Assumption|2|\neg p}}
+{{ModusTollens|3|1, 2|\neg \neg q|1|2}}
+{{DoubleNegElimination|4|1, 2|q|3}}
+{{Implication|5|1|\neg p \implies q|2|4}}
+{{EndTableau}}
+{{Qed}}
+{{LEM|Double Negation Elimination|3}}
+\end{proof}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 1/Proof 2}
+Tags: Truth Table Proofs, Rule of Transposition
+
+\begin{theorem}
+:$\neg p \implies q \dashv \vdash \neg q \implies p$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]] to the proposition.
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connectives]] match for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|cccc||cccc|} \hline
+\neg & p & \implies & q & \neg & q & \implies & p \\
+\hline
+T & F & T & F & T & F & T & F \\
+T & F & T & T & F & T & T & F \\
+F & T & F & F & T & F & F & T \\
+F & T & T & T & F & T & T & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 2/Forward Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\vdash \left({\neg p \implies q}\right) \iff \left({\neg q \implies p}\right)$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 2/Reverse Implication}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\vdash \left({\neg q \implies p}\right) \implies \left({\neg p \implies q}\right)$
+\end{theorem}<|endoftext|>
+\section{Rule of Transposition/Variant 2/Formulation 2}
+Tags: Rule of Transposition
+
+\begin{theorem}
+: $\vdash \left({\neg p \implies q}\right) \iff \left({\neg q \implies p}\right)$
+\end{theorem}<|endoftext|>
+\section{Boundary of Polygon is Topological Boundary}
+Tags: Plane Geometry, Topology
+
+\begin{theorem}
+Let $P$ be a [[Definition:Polygon|polygon]] embedded in $\R^2$.
+Denote the [[Definition:Boundary (Geometry)|boundary]] of $P$ as $\partial P$.
+Let $\Int P$ and $\Ext P$ denote the [[Definition:Interior of Jordan Curve|interior]] and [[Definition:Exterior of Jordan Curve|exterior]] of $\partial P$, where $\partial P$ is considered as a [[Definition:Jordan Curve|Jordan curve]].
+Then the [[Definition:Boundary (Topology)|topological boundary]] of $\Int P$ is equal to $\partial P$, and the [[Definition:Boundary (Topology)|topological boundary]] of $\Ext P$ is equal to $\partial P$.
+\end{theorem}
+
+\begin{proof}
+Denote the [[Definition:Boundary (Topology)|topological boundary]] of $\Int P$ as $\partial \Int P$, and denote the [[Definition:Boundary (Topology)|topological boundary]] of $\Ext P$ as $\partial \Ext P$.
+=== Topological Boundary is Subset of Boundary ===
+From [[Boundary of Polygon is Jordan Curve]], it follows that the [[Definition:Boundary (Geometry)|boundary]] $\partial P$ is equal to the [[Definition:Image of Mapping|image]] of a [[Definition:Jordan Curve|Jordan curve]].
+From [[Jordan Polygon Theorem]], it follows that $\Int P$ and $\Ext P$ are [[Definition:Disjoint Sets|disjoint]], [[Definition:Open Set (Metric Space)|open]] and [[Definition:Path-Connected Metric Subspace|path-connected]].
+From [[Set is Open iff Disjoint from Boundary]], it follows that $\Int P$ and $\partial \Int P$ are disjoint.
+From [[Disjoint Open Sets remain Disjoint with one Closure]], it follows that $\Ext P$ and the [[Definition:Closure (Topology)|closure]] of $\Int P$ are disjoint.
+As $\partial \Int P$ is a [[Definition:Subset|subset]] of the closure of $\Int P$, it follows that $\Ext P$ and $\partial \Int P$ are disjoint.
+As $\R^2 = \Int P \cup \Ext P \cup \partial P$ by the [[Jordan Polygon Theorem]], it follows that $\partial \Int P \subseteq \partial P$.
+Similarly, it follows that $\partial \Ext P \subseteq \partial P$.
+{{qed|lemma}}
+=== Boundary is Subset of Topological Boundary ===
+Let $p \in \partial P$ such that $p$ is not a [[Definition:Vertex of Polygon|vertex]], and let $S$ be the [[Definition:Side of Polygon|side]] of $P$ that $p$ is a part of.
+Denote the $j$th side of $P$ as $S_j$, and let $n \in \N$ be the total number of sides.
+Let $\displaystyle \delta = \map d {S, \bigcup_{j \mathop = 1, \ldots, n: S_j \mathop \ne S} S_j}$ be the [[Definition:Euclidean Metric on Real Number Plane|Euclidean]] [[Definition:Distance between Element and Subset of Metric Space|distance]] between $S$ and all other sides of $P$.
+From [[Distance between Closed Sets in Euclidean Space]], it follows that $\delta > 0$.
+Let $\epsilon \in \openint 0 \delta$, and denote the [[Definition:Open Ball|open ball]] of $p$ with radius $\epsilon$ as $\map {B_\epsilon} p$.
+Choose $x_1 \in \map {B_\epsilon} p$, and put $\mathbf v = p - x_1$.
+Let $\LL_1 = \set {x_1 + s \mathbf v: s \in \R_{\ge 0} }$ be a [[Definition:Ray (Geometry)|ray]] with start point $x_1$.
+Then $\LL_1$ and $S$ has one [[Definition:Crossing (Jordan Curve)|crossing]] at $p$.
+Put $x_2 = x_1 + 2 \mathbf v$, and put $\LL_2 = \set {x_2 + s \mathbf v: s \in \R_{\ge 0} }$, so $\LL_1 \cap \LL_2 = \LL_2$.
+Then $\LL_2$ and $S$ do not cross.
+As $x_2 \in \map {B_\epsilon} p$ with $\epsilon < \delta$, it follows from the definition of $\delta$ that if $\LL_1$ and some side $S'$ has a crossing, then $\LL_2$ and $S'$ also has a crossing.
+If $\map N {x_i}$ denotes the number of crossings between $\LL_i$ and $\partial P$, it follows that $\map N {x_1} + 1 = \map N {x_2}$.
+Then $\map {\mathrm {par} } {x_1} \ne \map {\mathrm {par} } {x_2}$, where $\map {\mathrm {par} } {x_i}$ denotes the [[Definition:Crossing (Jordan Curve)/Parity|parity]] of $x_i$.
+From [[Jordan Polygon Interior and Exterior Criterion]], it follows that one of the points $x_1, x_2$ belongs to $\Int P$, and the other point belongs to $\Ext P$.
+As $\epsilon$ was arbitrary small, it follows that $p$ is a [[Definition:Limit Point (Metric Space)|limit point]] of both $\Int P$ and $\Ext P$.
+By [[Definition:Closure (Topology)|definition of closure]], it follows that $p$ lies in the closure of $\Int P$ and $\Ext P$.
+Then $p \in \partial \Int P$ and $p \in \partial \Ext P$, as the [[Jordan Polygon Theorem]] shows that $\partial P$ and $\Int P$, $\Ext P$ are [[Definition:Disjoint Sets|disjoint]].
+Now, suppose that $p$ is a [[Definition:Vertex of Polygon|vertex]] of $S$.
+Then we can find a [[Definition:Sequence|sequence]] $\sequence {p_k}$ of points that lies on the [[Definition:Adjacent Sides|adjacent sides]] of $p$ such that the sequence [[Definition:Convergent Sequence (Metric Space)|converges]] to $p$.
+As none of the point in $\sequence {x_k}$ are [[Definition:Vertex of Polygon|vertices]], all $x_k$ lie in $\partial \Int P$ and $\partial \Ext P$.
+As [[Boundary of Set is Closed]], it follows that $p \in \partial \Int P$, and $p \in \partial \Ext P$.
+Hence, $\partial P \subseteq \partial \Int P$, and $\partial P \subseteq \partial \Ext P$.
+{{qed|lemma}}
+The result now follows by definition of [[Definition:Set Equality/Definition 2|set equality]].
+{{qed}}
+[[Category:Plane Geometry]]
+[[Category:Topology]]
+4hswurvo6sann59hm8mbeq871pdgakx
+\end{proof}<|endoftext|>
+\section{Jordan Curve and Jordan Arc form Two Jordan Curves}
+Tags: Jordan Curves, Jordan Arcs
+
+\begin{theorem}
+Let $\closedint a b$ denote the [[Definition:Closed Real Interval|closed real interval]] between $a \in \R, b \in \R: a \le b$.
+Let $\gamma: \closedint a b \to \R^2$ be a [[Definition:Jordan Curve|Jordan curve]].
+Let the [[Definition:Interior of Jordan Curve|interior]] of $\gamma$ be denoted $\Int \gamma$.
+Let the [[Definition:Image of Mapping|image]] of $\gamma$ be denoted $\Img \gamma$.
+Let $\sigma: \closedint c d \to \R^2$ be a [[Definition:Jordan Arc|Jordan arc]] such that:
+:$\map \sigma c \ne \map \sigma d$
+:$\map \sigma c, \map \sigma d \in \Img \gamma$
+and:
+:$\forall t \in \openint c d: \map \sigma t \in \Int \gamma$
+Let $t_1 = \map {\gamma^{-1} } {\map \sigma c}$.
+Let $t_2 = \map {\gamma^{-1} } {\map \sigma d}$.
+Let $t_1 < t_2$.
+Define:
+:$-\sigma: \closedint c d \to \Img \sigma$ by $-\map \sigma t = \map \sigma {c + d - t}$
+Let $*$ denote [[Definition:Concatenation (Topology)|concatenation of paths]].
+Let $\gamma \restriction_{\closedint a {t_1} }$ denote the [[Definition:Restriction of Mapping|restriction]] of $\gamma$ to $\closedint a {t_1}$.
+Define:
+:$\gamma_1 = \gamma {\closedint a {t_1} } * \sigma * \gamma {\restriction_{\closedint {t_2} b} }$
+Define:
+:$\gamma_2 = \gamma {\closedint a {t_1} } * \paren {-\sigma}$
+Then $\gamma_1$ and $\gamma_2$ are [[Definition:Jordan Curve|Jordan curves]] such that:
+:$\Int {\gamma_1} \subseteq \Int \gamma$
+and:
+:$\Int {\gamma_2} \subseteq \Int \gamma$
+\end{theorem}
+
+\begin{proof}
+As:
+:$\Int \gamma$ and $\Img \gamma$ are [[Definition:Disjoint Sets|disjoint]] by the [[Jordan Curve Theorem]]
+and:
+:$\map \sigma {\openint c d} \subseteq \Int \gamma$
+it follows that:
+:$\Img \gamma \cap \Img \sigma = \set {\map \sigma c, \map \sigma d}$.
+As $\gamma$ is a [[Definition:Jordan Curve|Jordan curve]], it follows that $\gamma {\restriction_{\closedint a {t_1} } }$ and $\gamma {\restriction_{\closedint {t_2} b} }$ [[Definition:Set Intersection|intersect]] only in $\map \gamma a$.
+It follows that $\gamma_1$ is a [[Definition:Jordan Arc|Jordan arc]].
+As the [[Definition:Initial Point of Path|initial point]] of $\gamma_1$ is $\map \gamma a$, and the [[Definition:Final Point of Path|final point]] of $\gamma_1$ is $\map \gamma b = \map \gamma a$, it follows that $\gamma_1$ is a [[Definition:Jordan Curve|Jordan curve]].
+As $\Img {-\sigma} = \Img \sigma$, it follows that $\gamma_2$ is a [[Definition:Jordan Arc|Jordan arc]].
+As $\map \gamma {t_1} = \map \sigma c = -\map \sigma d$, it follows that $\gamma_2$ is a [[Definition:Jordan Curve|Jordan curve]].
+{{qed|lemma}}
+Denote the [[Definition:Exterior of Jordan Curve|exterior]] of $\gamma$ as $\Ext \gamma$.
+Let $q_0 \in \Ext \gamma$ be determined.
+Let $q \in \Ext \gamma$.
+By the [[Jordan Curve Theorem]], $\Ext \gamma$ is [[Definition:Unbounded Metric Space|unbounded]].
+Hence for all $N \in \N$ we can choose $q \in \Ext \gamma$ such that:
+:$\map d {\mathbf 0, q} > N$
+where $d$ denotes the [[Definition:Euclidean Metric on Real Number Plane|Euclidean metric]] on $\R^2$.
+{{explain|The word "chooses" suggests that some choice function may be involved here, possibly [[Axiom:Axiom of Countable Choice]]? Someone knowledgeable may need to be consulted.}}
+The [[Jordan Curve Theorem]] also shows that $\Ext \gamma$ is [[Definition:Open Set (Metric Space)|open]] and [[Definition:Connected (Topology)|connected]].
+From [[Connected Open Subset of Euclidean Space is Path-Connected]], there exists a [[Definition:Path (Topology)|path]]:
+:$\rho: \closedint 0 1 \to \Ext \gamma$
+joining $q$ and $q_0$.
+We have:
+:$\Img {\gamma_1} \subseteq \Int \gamma \cup \Img \gamma$
+This is [[Definition:Disjoint Sets|disjoint]] with $\Ext \gamma$
+Thus it follows that $\rho$ is a [[Definition:Path (Topology)|path]] in either $\Ext {\gamma_1}$ or $\Int {\gamma_1}$.
+We have that $\map d {\mathbf 0, q}$ can be arbitrary large.
+Also, $\Int {\gamma_1}$ is [[Definition:Bounded Metric Space|bounded]].
+So follows that $\rho$ is a [[Definition:Path (Topology)|path]] in $\Ext {\gamma_1}$.
+In particularly:
+:$q \in \Ext {\gamma_1}$
+Therefore:
+:$\Ext \gamma \subseteq \Ext {\gamma_1}$
+Let $q_1 \in \Int {\gamma_1}$.
+Then:
+:$q_1 \notin \Ext \gamma$
+as $\Int {\gamma_1}$ and $\Ext {\gamma_1}$ are [[Definition:Disjoint Sets|disjoint]].
+It follows that:
+:$q_1 \in \Int {\gamma_1}$
+Therefore:
+:$\Int {\gamma_1} \subseteq \Int \gamma$
+Similarly, it follows that:
+:$\Int {\gamma_2} \subseteq \Int \gamma$
+{{qed}}
+[[Category:Jordan Curves]]
+[[Category:Jordan Arcs]]
+tca9jwpxhq4l9f8frelckm12c0k0px3
+\end{proof}<|endoftext|>
+\section{Restriction of Total Ordering is Total Ordering}
+Tags: Total Orderings
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be a [[Definition:Total Ordering|total ordering]].
+Let $T \subseteq S$.
+Let $\preceq \restriction_T$ be the [[Definition:Restriction of Ordering|restriction]] of $\preceq$ to $T$.
+Then $\preceq \restriction_T$ is a [[Definition:Total Ordering|total ordering]] of $T$.
+\end{theorem}
+
+\begin{proof}
+By [[Restriction of Ordering is Ordering]], $\preceq \restriction_T$ is an [[Definition:Ordering|ordering]].
+Let $x, y \in T$.
+As $T \subseteq S$ it follows by definition of [[Definition:Subset|subset]] that:
+:$x, y \in S$
+As $\preceq$ is a [[Definition:Total Ordering|total ordering]]:
+:$\tuple {x, y} \in {\preceq}$
+or:
+:$\tuple {y, x} \in {\preceq}$
+Suppose $\tuple {x, y} \in {\preceq}$.
+As $x, y \in T$, it follows by definition of [[Definition:Cartesian Product|cartesian product]] that:
+:$\tuple {x, y} \in T \times T$
+Thus:
+:$\tuple {x, y} \in \paren {T \times T} \cap {\preceq}$
+By definition of the [[Definition:Restriction of Ordering|restriction]] of $\preceq$ to $T$:
+:$\paren {T \times T} \cap {\preceq} = {\preceq \restriction_T}$
+That is:
+:$\tuple {x, y} \in {\preceq \restriction_T}$
+A similar argument shows that:
+:$\tuple {y, x} \in {\preceq} \implies \tuple {y, x} \in {\preceq \restriction_T}$
+Thus $\preceq \restriction_T$ is a [[Definition:Total Ordering|total ordering]] of $T$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ordering Cycle implies Equality/General Case}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S,\preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $x_0, x_1, \dots, x_n \in S$.
+Suppose that for $k = 0, 1, \dots, n - 1: x_k \preceq x_{k+1}$.
+Suppose also that $x_n \preceq x_0$.
+Then $x_0 = x_1 = \dots = x_n$.
+\end{theorem}
+
+\begin{proof}
+Since $\preceq$ is an [[Definition:Ordering|ordering]] it is [[Definition:Transitive Relation|transitive]] and [[Definition:Antisymmetric Relation|antisymmetric]].
+By [[Transitive Chaining]], it follows from the first premise that for all $k$ with $0 \le k \le n$:
+:$x_0 \preceq x_k$
+and also:
+:$x_k \preceq x_n$
+The other premise states that $x_n \preceq x_0$.
+By [[Definition:Transitive Relation|transitivity]] of $\preceq$, this combines with the above to:
+:$x_k \preceq x_0$
+Since $\preceq$ is [[Definition:Antisymmetric Relation|antisymmetric]], this means that $x_0 = x_k$ for $0 \le k \le n$.
+That is, $x_0 = x_1 = \dots = x_n$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Finite Non-Empty Subset of Ordered Set has Maximal and Minimal Elements}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T \subseteq S$ be a [[Definition:Finite Set|finite]], [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $S$.
+Then $T$ has a [[Definition:Maximal Element|maximal element]] and a [[Definition:Minimal Element|minimal element]].
+\end{theorem}
+
+\begin{proof}
+We will show that each finite subset of $S$ has a [[Definition:Minimal Element|minimal element]].
+The existence of a [[Definition:Maximal Element|maximal element]] then follows from duality.
+Proof by [[Principle of Mathematical Induction|induction]]:
+For all $n \in \Z_{\ge 1}$, let $P \left({n}\right)$ be the [[Definition:Proposition|proposition]]:
+:Every set with $n$ elements has a [[Definition:Minimal Element|minimal element]].
+=== Basis for the Induction ===
+Let $T$ have exactly one [[Definition:Element|element]] $x$.
+Since $x \not \prec x$ it follows that $x$ is [[Definition:Minimal Element|minimal]].
+This is the [[Principle of Mathematical Induction#Basis for the Induction|basis for the induction]].
+=== Induction Hypothesis ===
+Now it needs to be shown that, if $P \left({k}\right)$ is true, where $k \ge 1$, then it logically follows that $P \left({k + 1}\right)$ is true.
+So this is the [[Principle of Mathematical Induction#Induction Hypothesis|induction hypothesis]]:
+:Every set with $k$ elements has a [[Definition:Minimal Element|minimal element]].
+from which it is to be shown that:
+:Every set with $k + 1$ elements has a [[Definition:Minimal Element|minimal element]].
+=== Induction Step ===
+Suppose that every [[Definition:Subset|subset]] of $S$ with $k$ elements has a [[Definition:Minimal Element|minimal element]].
+Let $T$ have $k + 1$ elements.
+Then:
+: $T = T_0 \cup \left\{{x}\right\}$
+where $T_0$ has $k$ elements and $x \in T \setminus T_0$.
+Then $T_0$ has a [[Definition:Minimal Element|minimal element]] $m_0$.
+If $m_0$ is not a [[Definition:Minimal Element|minimal element]] of $T$, then:
+: $x \prec m_0$
+Thus $x$ is a [[Definition:Minimal Element|minimal element]] of $T$.
+Thus either $m_0$ or $x$ is a [[Definition:Minimal Element|minimal element]] of $T$.
+So $P \left({k}\right) \implies P \left({k + 1}\right)$ and the result follows by the [[Principle of Mathematical Induction]].
+Therefore:
+:For every [[Definition:Finite Set|finite]], [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] $T$ of $S$, $T$ has a [[Definition:Maximal Element|maximal element]] and a [[Definition:Minimal Element|minimal element]]
+The result follows by the [[Principle of Mathematical Induction]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Star Convex Set is Path-Connected}
+Tags: Vector Spaces, Path-Connected Sets
+
+\begin{theorem}
+Let $A$ be a [[Definition:Star Convex Set|star convex]] [[Definition:Subset|subset]] of a [[Definition:Vector Space|vector space]] $V$ over $\R$ or $\C$.
+Then $A$ is [[Definition:Path-Connected Set (Topology)|path-connected]].
+\end{theorem}
+
+\begin{proof}
+Let $x_1, x_2 \in A$.
+Let $a \in A$ be a [[Definition:Star Convex Set/Star Center|star center]] of $A$.
+By [[Definition:Star Convex Set|definition of star convex set]], it follows that for all $t \in \left[{0 \,.\,.\, 1}\right]$, we have $t x_1 + \left({1 - t}\right) a, t x_2 + \left({1 - t}\right) a \in A$.
+Define two [[Definition:Path (Topology)|paths]] $\gamma_1, \gamma_2: t \in \left[{0 \,.\,.\, 1}\right] \to A$ by $\gamma_1 \left({t}\right) = t x_1 + \left({1 - t}\right) a$, and $\gamma_2 \left({t}\right) = t a + \left({1 - t}\right) x_2$.
+As $\gamma_2 \left({t}\right) = \left({1 - t}\right) x_2 + \left({1 - \left({1 - t}\right) }\right) a$, and $\left({1 - t}\right) \in \left[{0 \,.\,.\, 1}\right]$, it follows that $\gamma_2 \left({t}\right) \in A$.
+Note that $\gamma_1 \left({0}\right) = x_1$, $\gamma_1 \left({1}\right) = \gamma_2 \left({0}\right) = a$, and $\gamma_2 \left({1}\right) = x_2$.
+Define $\gamma: \left[{0 \,.\,.\, 1}\right] \to A$ as the [[Definition:Concatenation (Topology)|concatenation]] $\gamma_1 * \gamma_2$.
+Then $\gamma$ is a path in $A$ joining $x_1$ and $x_2$, so $A$ is [[Definition:Path-Connected Set (Topology)|path-connected]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Minimal Element of Chain is Smallest Element}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $C$ be a [[Definition:Chain (Set Theory)|chain]] in $S$.
+Let $m$ be a [[Definition:Minimal Element|minimal element]] of $C$.
+Then $m$ is the [[Definition:Smallest Element|smallest element]] of $C$.
+\end{theorem}
+
+\begin{proof}
+Let $x \in C$.
+Since $m$ is [[Definition:Minimal Element|minimal]] in $C$, $x \not\prec m$.
+Since $C$ is a [[Definition:Chain (Set Theory)|chain]], $x = m$ or $m \prec x$.
+Thus for each $x \in C$, $m \preceq x$.
+Therefore $m$ is the [[Definition:Smallest Element|smallest element]] of $C$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Maximal Element of Chain is Greatest Element}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $C$ be a [[Definition:Chain (Set Theory)|chain]] in $S$.
+Let $m$ be a [[Definition:Maximal Element|maximal element]] of $C$.
+Then $m$ is the [[Definition:Greatest Element|greatest element]] of $C$.
+\end{theorem}
+
+\begin{proof}
+Let $x \in C$.
+Since $m$ is [[Definition:Maximal Element|maximal]] in $C$, $m \not\prec x$.
+Since $C$ is a [[Definition:Chain (Set Theory)|chain]], $x = m$ or $x \prec m$.
+Thus for each $x \in C$, $x \preceq m$.
+Therefore $m$ is the [[Definition:Greatest Element|greatest element]] of $C$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Modus Ponendo Ponens/Variant 3}
+Tags: Modus Ponendo Ponens
+
+\begin{theorem}
+:$\vdash \paren {\paren {p \implies q} \land p} \implies q$
+\end{theorem}<|endoftext|>
+\section{Rule of Material Equivalence/Formulation 1/Proof 1}
+Tags: Rule of Material Equivalence
+
+\begin{theorem}
+:$p \iff q \dashv \vdash \left({p \implies q}\right) \land \left({q \implies p}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \iff q \vdash \left({p \implies q}\right) \land \left({q \implies p}\right)}}
+{{Premise|1|p \iff q}}
+{{BiconditionalElimination|2|1|p \implies q|1|1}}
+{{BiconditionalElimination|3|1|q \implies p|1|2}}
+{{Conjunction|4|1|\left({p \implies q}\right) \land \left({q \implies p}\right)|2|3}}
+{{EndTableau}}
+{{BeginTableau|\left({p \implies q}\right) \land \left({q \implies p}\right) \vdash p \iff q}}
+{{Premise|1|\left({p \implies q}\right) \land \left({q \implies p}\right)}}
+{{Simplification|2|1|p \implies q|1|1}}
+{{Simplification|3|1|q \implies p|1|2}}
+{{BiconditionalIntro|4|1|p \iff q|2|3}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Rule of Material Equivalence/Formulation 1/Proof 2}
+Tags: Rule of Material Equivalence
+
+\begin{theorem}
+:$p \iff q \dashv \vdash \left({p \implies q}\right) \land \left({q \implies p}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connectives]] match for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccc|ccccccc|} \hline
+p & \iff & q & (p & \implies & q) & \land & (q & \implies & p) \\
+\hline
+F & T & F & F & T & F & T & F & T & F \\
+F & F & T & F & T & T & F & T & F & F \\
+T & F & F & T & F & F & F & F & T & T \\
+T & T & T & T & T & T & T & T & T & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Rule of Material Equivalence/Formulation 2/Proof 1}
+Tags: Rule of Material Equivalence
+
+\begin{theorem}
+:$\vdash \left({p \iff q}\right) \iff \left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({p \iff q}\right) \iff \left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)}}
+{{Assumption|1|p \iff q}}
+{{SequentIntro|2|1|\left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)|1|[[Rule of Material Equivalence/Formulation 1|Rule of Material Equivalence: Formulation 1]]}}
+{{Implication|3||\left({p \iff q}\right) \implies \left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)|1|2}}
+{{Assumption|4|\left({p \implies q}\right) \land \left({q \implies p}\right)}}
+{{SequentIntro|5|4|p \iff q|4|[[Rule of Material Equivalence/Formulation 1|Rule of Material Equivalence: Formulation 1]]}}
+{{Implication|6||\left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right) \implies \left({p \iff q}\right)|4|5}}
+{{BiconditionalIntro|7||\left({p \iff q}\right) \iff \left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)|3|6}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Rule of Material Equivalence/Formulation 2/Proof 2}
+Tags: Rule of Material Equivalence, Truth Table Proofs
+
+\begin{theorem}
+:$\vdash \left({p \iff q}\right) \iff \left({\left({p \implies q}\right) \land \left({q \implies p}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] are [[Definition:True|true]] for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccc|c|ccccccc|} \hline
+(p & \iff & q) & \iff & (p & \implies & q) & \land & (q & \implies & p) \\
+\hline
+F & T & F & T & F & T & F & T & F & T & F \\
+F & F & T & T & F & T & T & F & T & F & F \\
+T & F & F & T & T & F & F & F & F & T & T \\
+T & T & T & T & T & T & T & T & T & T & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{De Morgan's Laws (Logic)/Conjunction/Formulation 2/Proof 2}
+Tags: De Morgan's Laws (Logic), Truth Table Proofs
+
+\begin{theorem}
+: $\vdash \left({p \land q}\right) \iff \left({\neg \left({\neg p \lor \neg q}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] are [[Definition:True|true]] for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccc|c|cccccc|} \hline
+(p & \land & q) & \iff & (\neg & (\neg & p & \lor & \neg & q)) \\
+\hline
+F & F & F & T & F & T & F & T & T & F \\
+F & F & T & T & F & T & F & T & F & T \\
+T & F & F & T & F & F & T & T & T & F \\
+T & T & T & T & T & F & T & F & F & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Modus Tollendo Ponens/Variant/Formulation 2/Proof 2}
+Tags: Modus Tollendo Ponens, Truth Table Proofs
+
+\begin{theorem}
+: $\vdash \left({p \lor q}\right) \iff \left({\neg p \implies q}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] are [[Definition:True|true]] for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccc|c|cccc|} \hline
+p & \lor & q & \iff & \neg & p & \implies & q \\
+\hline
+F & F & F & T & T & F & F & F \\
+F & T & T & T & T & F & T & T \\
+T & T & F & T & F & T & T & F \\
+T & T & T & T & F & T & T & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Rule of Transposition/Formulation 2/Proof 2}
+Tags: Rule of Transposition, Truth Table Proofs
+
+\begin{theorem}
+: $\vdash \left({p \implies q}\right) \iff \left({\neg q \implies \neg p}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]] to the proposition.
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connectives]] match for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccc|c|ccccc|} \hline
+p & \implies & q) & \iff & (\neg & q & \implies & \neg & p) \\
+\hline
+F & T & F & T & T & F & T & T & F \\
+F & T & T & T & F & T & T & T & F \\
+T & F & F & T & T & F & F & F & T \\
+T & T & T & T & F & T & T & F & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Biconditional as Disjunction of Conjunctions/Formulation 2/Proof 2}
+Tags: Biconditional as Disjunction of Conjunctions, Truth Table Proofs
+
+\begin{theorem}
+: $\vdash \left({p \iff q}\right) \iff \left({\left({p \land q}\right) \lor \left({\neg p \land \neg q}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, in all cases the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] is [[Definition:True|true]] for all [[Definition:Boolean Interpretation|boolean interpretations]].
+$\begin{array}{|ccc|c|ccccccccc|} \hline
+(p & \iff & q) & \iff & ((p & \land & q) & \lor & (\neg & p & \land & \neg & q)) \\
+\hline
+F & T & F & T & F & F & F & T & T & F & T & T & F \\
+F & F & T & T & F & F & T & F & T & F & F & F & T \\
+T & F & F & T & T & F & F & F & F & T & F & T & F \\
+T & T & T & T & T & T & T & T & F & T & F & F & T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Convex Set Characterization (Order Theory)}
+Tags: Convex Sets
+
+\begin{theorem}
+Let $\left({S, \preceq, \tau}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $C \subseteq S$.
+{{TFAE}}
+{{begin-axiom}}
+{{axiom | n = 1
+ | t = $C$ is [[Definition:Convex Set (Order Theory)|convex]].
+}}
+{{axiom | n = 2
+ | t = $C$ is the intersection of an [[Definition:Upper Set|upper set]] with a [[Definition:Lower Set|lower set]].
+}}
+{{axiom | n = 3
+ | t = $C$ is the intersection of its upper closure with its lower closure.
+}}
+{{end-axiom}}
+\end{theorem}
+
+\begin{proof}
+=== $(2)$ implies $(1)$ ===
+Follows from [[Upper Set is Convex]], [[Lower Set is Convex]], and [[Intersection of Convex Sets is Convex Set (Order Theory)]].
+{{qed|lemma}}
+=== $(1)$ implies $(3)$ ===
+{{MissingLinks}}
+Let $C$ be a convex set in $S$.
+Let $U$ and $L$ be the upper and lower closures of $C$, respectively.
+Since $C \subseteq U$ and $C \subseteq L$:
+:$C \subseteq U \cap L$.
+Let $p \in U \cap L$.
+Then $a \preceq p \preceq b$ for some $a, b \in C$.
+Since $C$ is convex, $p \in C$.
+Since this holds for all $p \in U \cap L$:
+:$U \cap L \subseteq C$.
+Since we know that $C \subseteq U \cap L$, $C = U \cap L$.
+{{qed|lemma}}
+=== $(3)$ implies $(2)$ ===
+Follows from [[Upper Closure is Upper Set]] and [[Lower Closure is Lower Set]].
+{{qed}}
+[[Category:Convex Sets]]
+2rudyxkfvqqz8vkpr8bjamysr5o11lu
+\end{proof}<|endoftext|>
+\section{Constructive Dilemma/Formulation 3}
+Tags: Constructive Dilemma
+
+\begin{theorem}
+:$\paren {p \implies q} \land \paren {r \implies s}, p \lor r \vdash q \lor s$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\paren {p \implies q} \land \paren {r \implies s}, p \lor r \vdash q \lor s}}
+{{Premise|1|\paren {p \implies q} \land \paren {r \implies s} }}
+{{Premise|2|p \lor r}}
+{{Conjunction|3|1, 2|\paren {p \lor r} \land \paren {p \implies q} \land \paren {r \implies s}|2|1|[[Conjunction is Associative|Associativity is implicit]]}}
+{{TheoremIntro|4|\paren {\paren {p \lor r} \land \paren {p \implies q} \land \paren {r \implies s} } \implies \paren {q \lor s}|[[Constructive Dilemma/Formulation 2|Constructive Dilemma: Formulation 2]]}}
+{{ModusPonens|5|1, 2|q \lor s|4|3}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Upper Closure is Upper Set}
+Tags: Upper Sets, Upper Closures
+
+\begin{theorem}
+Let $(S, \preceq, \tau)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T$ be a [[Definition:subset|subset]] of $S$.
+Let $U$ be the [[Definition:Upper Closure|upper closure]] of $T$.
+Then $U$ is an [[Definition:Upper Set|upper set]].
+\end{theorem}
+
+\begin{proof}
+Let $a \in U$.
+Let $b \in S$ with $a \preceq b$.
+By the definition of [[Definition:Upper Closure|upper closure]], there is a $t \in T$ such that $t \preceq a$.
+By [[Definition:Transitive Relation|transitivity]], $t \preceq b$.
+Thus, agin by the definition of upper closure, $b \in U$.
+Since this holds for all such $a$ and $b$, $U$ is an [[Definition:Upper Set|upper set]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Destructive Dilemma/Formulation 1/Proof 3}
+Tags: Destructive Dilemma
+
+\begin{theorem}
+:$p \implies q, r \implies s \vdash \neg q \lor \neg s \implies \neg p \lor \neg r$
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]] to the proposition.
+As can be seen for all [[Definition:Boolean Interpretation|boolean interpretations]] by inspection, where the [[Definition:Truth Value|truth value]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] on the {{LHS}} is $T$, that under the one on the {{RHS}} is also $T$:
+: $\begin{array}{|ccccccc||ccccccccccc|} \hline
+(p & \implies & q) & \land & (r & \implies & s) & (\neg & q & \lor & \neg & s) & \implies & (\neg & p & \lor & \neg & r) \\
+\hline
+F & T & F & T & F & T & F & T & F & T & T & F & T & T & F & T & T & F \\
+F & T & F & T & F & T & T & T & F & T & F & T & T & T & F & T & T & F \\
+F & T & F & F & T & F & F & T & F & T & T & F & T & T & F & T & F & T \\
+F & T & F & T & T & T & T & T & F & T & F & T & T & T & F & T & F & T \\
+F & T & T & T & F & T & F & F & T & T & T & F & T & T & F & T & T & F \\
+F & T & T & T & F & T & T & F & T & F & F & T & T & T & F & T & T & F \\
+F & T & T & F & T & F & F & F & T & T & T & F & T & T & F & T & F & T \\
+F & T & T & T & T & T & T & F & T & F & F & T & T & T & F & T & F & T \\
+T & F & F & F & F & T & F & T & F & T & T & F & T & F & T & T & T & F \\
+T & F & F & F & F & T & T & T & F & T & F & T & T & F & T & T & T & F \\
+T & F & F & F & T & F & F & T & F & T & T & F & F & F & T & F & F & T \\
+T & F & F & F & T & T & T & T & F & T & F & T & F & F & T & F & F & T \\
+T & T & T & T & F & T & F & F & T & T & T & F & T & F & T & T & T & F \\
+T & T & T & T & F & T & T & F & T & F & F & T & T & F & T & T & T & F \\
+T & T & T & F & T & F & F & F & T & T & T & F & F & F & T & F & F & T \\
+T & T & T & T & T & T & T & F & T & F & F & T & T & F & T & F & F & T \\
+\hline
+\end{array}$
+Hence the result.
+{{qed}}
+Note that the two [[Definition:Propositional Formula|formulas]] are not [[Definition:Logical Equivalence|equivalent]], as the relevant columns do not match exactly.
+\end{proof}<|endoftext|>
+\section{Destructive Dilemma/Formulation 2}
+Tags: Destructive Dilemma
+
+\begin{theorem}
+:$\paren {p \implies q} \land \paren {r \implies s}, \neg q \lor \neg s \vdash \neg p \lor \neg r$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\paren {p \implies q} \land \paren {r \implies s}, \neg q \lor \neg s \vdash \neg p \lor \neg r}}
+{{Premise|1|\paren {p \implies q} \land \paren {r \implies s} }}
+{{Premise|2|\neg q \lor \neg s}}
+{{Simplification|3|1|p \implies q|1|1}}
+{{Simplification|4|1|r \implies s|1|2}}
+{{Assumption|5|\neg q}}
+{{ModusTollens|6|1, 5|\neg p|3|5}}
+{{Addition|7|1, 5|\neg p \lor \neg r|6|1}}
+{{Assumption|8|\neg s}}
+{{ModusTollens|9|1, 8|\neg r|4|8}}
+{{Addition|10|1, 8|\neg p \lor \neg r|9|2}}
+{{ProofByCases|11|1, 2|\neg p \lor \neg r|2|5|7|8|10}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Open Ray is Open in GO-Space/Definition 1}
+Tags: Generalized Ordered Spaces
+
+\begin{theorem}
+Let $\struct {S, \preceq, \tau}$ be a [[Definition:Generalized Ordered Space|generalized ordered space]].
+Let $p \in S$.
+Then:
+:$p^\prec$ and $p^\succ$ are [[Definition:Open Set (Topology)|$\tau$-open]]
+where:
+:$p^\prec$ is the [[Definition:Strict Lower Closure of Element|strict lower closure]] of $p$
+:$p^\succ$ is the [[Definition:Strict Upper Closure of Element|strict upper closure]] of $p$.
+\end{theorem}
+
+\begin{proof}
+We will prove that $U = p^\succ$ is [[Definition:Open Set (Topology)|$\tau$-open]].
+That $p^\prec$ is [[Definition:Open Set (Topology)|$\tau$-open]] will follow by duality.
+Let $u \in U$.
+Since $p \notin U$, $p \ne u$.
+By the definition of GO-space, $\tau$ is Hausdorff, and therefore $T_1$.
+Thus by the definition of GO-space, there is an open, convex set $M$ such that $u \in M$ and $p \notin M$.
+Next we will show that $M \subseteq U$:
+Let $x \in S \setminus U$.
+Then $x \preceq p \preceq u$.
+Suppose for the sake of contradiction that $x \in M$.
+Since $x, u \in M$, $p \in M$ because $M$ is convex, contradicting the choice of $M$.
+Thus $x \notin M$.
+Since this hold for all $x \in S \setminus U$, $M \subseteq U$.
+Thus $U$ contains a neighborhood of each of its points, so it is open.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Upper and Lower Closures of Open Set in GO-Space are Open}
+Tags: Topology, Total Orderings
+
+\begin{theorem}
+Let $\left({X, \preceq, \tau}\right)$ be a [[Definition:Generalized Ordered Space/Definition 1|Generalized Ordered Space/Definition 1]].
+Let $A$ be open in $X$.
+Then the upper and lower closures of $A$ are open.
+\end{theorem}
+
+\begin{proof}
+We will show that the upper closure $U$ of $A$ is open.
+The lower closure can be proven open by the same method.
+By the definition of upper closure:
+:$U = \left\{ {u \in X: \exists a \in A: a \preceq u}\right\}$
+But then:
+{{begin-eqn}}
+{{eqn | l = U
+ | r = \left\{ {u \in X: \left({u \in A}\right) \lor \left({\exists a \in A: a \prec u}\right) }\right\}
+}}
+{{eqn | r = A \cup \bigcup \left\{ {a^\succeq: a \in A }\right\}
+}}
+{{end-eqn}}
+where $a^\preceq$ denotes the [[Definition:Upper Closure of Element|upper closure]] of $a$.
+By [[Open Ray is Open in GO-Space/Definition 1]], each $a^\succeq$ is open.
+Thus $U$ is a union of open sets.
+Thus $U$ is open by the definition of a topology.
+{{qed}}
+[[Category:Topology]]
+[[Category:Total Orderings]]
+39ofm2hzen1z5lbb1anoktvpftjwa7g
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Generalized Ordered Space/Definition 1 implies Definition 3}
+Tags: Equivalence of Definitions of Generalized Ordered Space
+
+\begin{theorem}
+Let $\struct {S, \preceq, \tau}$ be a [[Definition:Generalized Ordered Space/Definition 1|generalized ordered space by Definition 1]]:
+{{:Definition:Generalized Ordered Space/Definition 1}}
+Then $\struct {S, \preceq, \tau}$ is a [[Definition:Generalized Ordered Space/Definition 3|generalized ordered space by Definition 3]]:
+{{:Definition:Generalized Ordered Space/Definition 3}}
+\end{theorem}
+
+\begin{proof}
+
+Let $\BB$ be a [[Definition:Basis (Topology)|basis]] for $\tau$ consisting of [[Definition:Convex Set (Order Theory)|convex sets]].
+Let:
+:$\SS = \set {U^\succeq: U \in \BB} \cup \set {U^\preceq: U \in \BB}$
+where $U^\succeq$ and $U^\preceq$ denote the [[Definition:Upper Closure|upper closure]] and [[Definition:Lower Closure|lower closure]] respectively of $U$.
+By [[Upper Closure is Upper Set]] and [[Lower Closure is Lower Set]], the [[Definition:Element|elements]] of $\SS$ are [[Definition:Upper Set|upper]] and [[Definition:Lower Set|lower sets]].
+It is to be shown that $\SS$ is a [[Definition:Sub-Basis|sub-basis]] for $\tau$.
+By [[Upper and Lower Closures of Open Set in GO-Space are Open]]:
+:$\SS \subseteq \tau$
+By [[Convex Set Characterization (Order Theory)]], each [[Definition:Element|element]] of $\BB$ is the [[Definition:Set Intersection|intersection]] of its [[Definition:Upper Closure|upper closure]] with its [[Definition:Lower Closure|lower closure]].
+Thus each [[Definition:Element|element]] of $\BB$ is generated by $\SS$.
+{{Disambiguate|Definition:Generated}}
+Thus $\SS$ is a [[Definition:Sub-Basis|sub-basis]] for $\tau$.
+{{qed}}
+
+[[Category:Equivalence of Definitions of Generalized Ordered Space]]
+bctoewd1tp2ms06qszp7bg85bfvzjyv
+\end{proof}<|endoftext|>
+\section{Upper Set is Convex}
+Tags: Upper Sets, Convex Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T \subseteq S$ be an [[Definition:Upper Set|upper set]].
+Then $T$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Let $a, c \in T$.
+Let $b \in S$.
+Let $a \preceq b \preceq c$.
+Since:
+:$a \in T$
+:$a \preceq b$
+:$T$ is an [[Definition:Upper Set|upper set]]
+it follows that:
+:$b \in T$
+This holds for all such $a$, $b$, and $c$.
+Therefore, by definition, $T$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Lower Set is Convex}
+Tags: Lower Sets, Convex Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T \subseteq S$ be a [[Definition:Lower Set|lower set]].
+Then $T$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Let $a, c \in T$.
+Let $b \in S$.
+Let $a \preceq b \preceq c$.
+Since:
+:$c \in T$
+:$b \preceq c$
+:$T$ is a [[Definition:Lower Set|lower set]]
+it follows that:
+:$b \in T$
+This holds for all such $a$, $b$, and $c$.
+Hence, by definition, $T$ is [[Definition:Convex Set (Order Theory)|convex]] in $S$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{GO-Space Embeds Densely into Linearly Ordered Space}
+Tags: Generalized Ordered Spaces, Linearly Ordered Spaces
+
+\begin{theorem}
+Let $\left({Y, \preceq, \tau}\right)$ be a [[Definition:Generalized Ordered Space/Definition 3|generalized ordered space (GO-space) by Definition 3]].
+That is:
+: let $\left({Y, \tau}\right)$ be a [[Definition:Hausdorff Space|Hausdorff space]]
+and:
+: let $\tau$ have a [[Definition:Sub-Basis|sub-basis]] consisting of [[Definition:Upper Set|upper sets]] and [[Definition:Lower Set|lower sets]] relative to $\preceq$.
+Then $\left({Y, \preceq, \tau}\right)$ is a [[Definition:Generalized Ordered Space/Definition 2|GO-space by Definition 2]].
+That is, there is a [[Definition:Linearly Ordered Space|linearly ordered space]] $\left({X, \preceq', \tau'}\right)$ and a [[Definition:Mapping|mapping]] from $Y$ to $X$ which is a [[Definition:Order Embedding|order embedding]] and a [[Definition:Topological Embedding|topological embedding]].
+\end{theorem}
+
+\begin{proof}
+
+Let $X$ be the disjoint union of $Y$ with the set of all lower sets $L$ in $Y$ such that $L$ and $Y \setminus L$ are open and nonempty and either:
+:$L$ has a maximum, and $Y\setminus L$ does not have a minimum, or
+:$Y \setminus L$ has a minimum, and $L$ does not have a maximum.
+Let $\phi:Y \to X$ be the inclusion mapping.
+In the following, let $y$, $y_1$, $y_2$, and $y_3$ represent elements of $Y$, and let $L$, $L_1$, $L_2$, and $L_3$ represent lower sets of $Y$ that are members of $X$.
+Define a relation $\preceq'$ extending $\preceq$ by letting:
+:$y_1 \preceq' y_2 \iff y_1 \preceq y_2$
+:$y \preceq' L \iff y \in L$
+:$L_1 \preceq' L_2 \iff L_1 \subseteq L_2$
+:$L \preceq' y \iff y \in Y \setminus L$
+By [[Union of Total Ordering with Lower Sets is Total Ordering]] and [[Restriction of Total Ordering is Total Ordering]], $\preceq'$ is a [[Definition:Total Ordering|total ordering]].
+Because $y_1 \preceq' y_2$ iff $y_1 \preceq y_2$, $\phi$ is an [[Definition:Order Embedding|order embedding]].
+Let $\tau'$ be the $\preceq'$ [[Definition:Order Topology|order topology]] on $X$.
+=== $\phi$ is a topological embedding of $\left({Y, \tau}\right)$ into $\left({X, \tau'}\right)$ ===
+Let $L^\succeq$ and $L^\succ$ represent the [[Definition:Upper Closure of Subset|upper closure]] and [[Definition:Strict Upper Closure of Subset|strict upper closure]] of $L$ with respect to $\preceq$.
+Let $L^{\succeq'}$ and $L^{\succ'}$ represent the [[Definition:Upper Closure of Subset|upper closure]] and [[Definition:Strict Upper Closure of Subset|strict upper closure]] with respect to $\preceq'$.
+Let $L^\preceq$ and $L^\prec$ represent the [[Definition:Lower Closure of Subset|lower closure]] and [[Definition:Strict Lower Closure of Subset|strict lower closure]] of $L$ with respect to $\preceq$.
+Let $L^{\preceq'}$ and $L^{\prec'}$ represent the [[Definition:Lower Closure of Subset|lower closure]] and [[Definition:Strict Lower Closure of Subset|strict lower closure]] with respect to $\preceq'$.
+Open rays from elements of $Y$ are $\tau$-open by [[Open Ray is Open in GO-Space]].
+$L^{\preceq'} \cap Y = L$, which is open.
+$L^{\succeq'} \cap Y = Y \setminus L$, which is open.
+Thus the subspace topology is coarser than $\tau$.
+Let $U$ be an open upper set in $Y$ with $\varnothing \subsetneqq U \subsetneqq Y$.
+Let $U$ have no minimum.
+Then by [[Upper Set with no Smallest Element is Open in GO-Space]], $U$ is open in the [[Definition:Subspace Topology|subspace topology]].
+Let $Y \setminus U$ has a maximum $p$.
+Then $U = p^\succ = Y \cap p^{\succ'}$, which is open in the [[Definition:Subspace Topology|subspace topology]].
+Otherwise, by [[Lower Set with no Greatest Element is Open in GO-Space]], $Y \setminus U$ is open.
+Therefore $Y \setminus U \in X$.
+Then:
+:$U = Y \cap \left({Y \setminus U}\right)^{\succ'}$
+A similar argument works for open lower sets.
+Thus it follows that the [[Definition:Subspace Topology|subspace topology]] is finer than $\tau$.
+Thus they are equal by definition of [[Definition:Set Equality/Definition 2|set equality]].
+{{qed|lemma}}
+=== $Y$ is dense in $X$ ===
+Let $L \in X \setminus Y$.
+By the definition of $X$, $L$ and $Y \setminus L$ are non-empty.
+So:
+: $L$ is $\preceq'$-preceded by at least one element of $Y$
+: $L$ is $\preceq'$-succeeded by at least one element of $Y$.
+Thus every open ray in $X$ containing $L$ contains an element of $Y$.
+Let $x_1, x_2 \in X$ such that $x_1 \prec' L \prec' x_2$.
+By the definition of $X$, either:
+:$L$ has a $\preceq$-greatest element and $Y \setminus L$ has no $\preceq$-smallest element
+or:
+:$L$ has no $\preceq$-greatest element and $Y \setminus L$ has a $\preceq$-smallest element.
+Suppose that $L$ has a $\preceq$-greatest element and $Y \setminus L$ has no $\preceq$-smallest element.
+Let $x_2 \in Y$.
+Then $x_2 \in Y \setminus L$.
+Since $Y \setminus L$ has no $\preceq$-smallest element, there is an element $q\in Y \setminus L$ such that $q \prec x_2$.
+Then $x_1 \prec L \prec' q \prec' x_2$.
+Therefore:
+:$q \in Y \cap \left({x_1 \,.\,.\, x_2}\right)$
+On the other hand, let $x_2 \notin Y$.
+Then $L \subsetneqq x_2$.
+Then there is some $q \in x_2 \setminus L$.
+Therefore:
+:$x_1 \prec' L \prec' q \prec' x_2$
+In particular:
+:$q \in Y \cap \left({x_1 \,.\,.\, x_2}\right)$
+If on the other hand we had supposed that $L$ has no $\preceq$-greatest element and $Y \setminus L$ has a $\preceq$-smallest element, we could obtain similar results, with $x_1$ taking on the role we have given $x_2$.
+Thus in any case, $L$ is an adherent point of $Y$.
+Since every element of $X \setminus Y$ is an adherent point of $Y$, $Y$ is dense in $X$.
+Thus the inclusion map from $Y$ to $X$ is a [[Definition:Topological Embedding|topological embedding]] and an [[Definition:Order Embedding|order embedding]] of $\left({Y, \preceq, \tau}\right)$ as a dense subspace of $\left({X, \preceq', \tau'}\right)$.
+{{qed}}
+
+[[Category:Generalized Ordered Spaces]]
+[[Category:Linearly Ordered Spaces]]
+hn4l61n82lai0amm0ngzstkocscdrn5
+\end{proof}<|endoftext|>
+\section{Upper Set with no Minimal Element}
+Tags: Upper Sets
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $U \subseteq S$.
+Then:
+:$U$ is an [[Definition:Upper Set|upper set]] in $S$ with no [[Definition:Minimal Element|minimal element]]
+{{iff}}:
+:$\displaystyle U = \bigcup \set {u^\succ: u \in U}$
+where $u^\succ$ is the [[Definition:Strict Upper Closure|strict upper closure]] of $u$.
+\end{theorem}
+
+\begin{proof}
+=== Forward implication ===
+Let $U$ be an [[Definition:Upper Set|upper set]] in $S$ with no [[Definition:Minimal Element|minimal element]].
+Then by the definition of [[Definition:Upper Set|upper set]]:
+:$\displaystyle \bigcup \set {u^\succ: u \in U} \subseteq U$
+Let $x \in U$.
+Since $U$ has no [[Definition:Minimal Element|minimal element]], $x$ is not [[Definition:Minimal Element|minimal]].
+Thus there is a $u \in U$ such that $u \prec x$.
+Then $x \in u^\succ$, so:
+:$\displaystyle x \in \bigcup \set {u^\succ: u \in U }$
+Since this holds for all $x \in U$:
+:$\displaystyle U \subseteq \bigcup \set {u^\succ: u \in U}$
+Thus the theorem holds by definition of [[Definition:Set Equality/Definition 2|set equality]].
+{{qed|lemma}}
+=== Reverse implication ===
+Let:
+: $\displaystyle U = \bigcup \set {u^\succ: u \in U}$
+Then:
+:$\forall u \in U: u^\succ \subseteq U$
+so $U$ is an [[Definition:Upper Set|upper set]].
+Furthermore:
+:$\forall x \in U: \exists u \in U: x \in u^\succ$
+But then:
+:$u \prec x$
+so $x$ is not [[Definition:Minimal Element|minimal]].
+Since this holds for all $x \in U$, $U$ has no [[Definition:Minimal Element|minimal element]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Lower Sets in Totally Ordered Set form Nest}
+Tags: Lower Sets, Total Orderings, Nests
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $\mathcal L$ be a [[Definition:set|set]] of [[Definition:Lower Set|lower sets]] in $S$.
+Then $\mathcal L$ is a [[Definition:Nest|nest]].
+That is, $\mathcal L$ is [[Definition:Totally Ordered Set|totally ordered]] by $\subseteq$.
+\end{theorem}
+
+\begin{proof}
+Let $L, M \in \mathcal L$.
+Suppose that $M \not\subseteq L$.
+Then for some $x \in M$: $x \notin L$.
+Let $y \in L$.
+Then since $\preceq$ is a [[Definition:Total Ordering|total ordering]], $x \preceq y$ or $y \preceq x$.
+If $x \preceq y$, then since $L$ is a [[Definition:Lower Set|lower set]]: $x \in L$, a contradiction.
+Thus $y \preceq x$.
+Since $M$ is a lower set, $y \in M$.
+Since this holds for all $y \in L$, $L \subseteq M$.
+Hence, for all $L, M \in \mathcal L$:
+:$M \subseteq L$ or $L \subseteq M$
+That is, $\mathcal L$ is a [[Definition:Nest|nest]].
+{{qed}}
+[[Category:Lower Sets]]
+[[Category:Total Orderings]]
+[[Category:Nests]]
+sxailfjx37zyldndgzq5xmy6y0j176n
+\end{proof}<|endoftext|>
+\section{Exclusive Or is Self-Inverse}
+Tags: Exclusive Or
+
+\begin{theorem}
+:$\paren {p \oplus q} \oplus q \dashv \vdash p$
+where $\oplus$ denotes the [[Definition:Exclusive Or|exclusive or operator]].
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] on the {{LHS}} match those for $p$ on the {{RHS}} for all [[Definition:Boolean Interpretation|boolean interpretations]]:
+$\begin{array}{|ccccc||c|} \hline
+(p & \oplus & q) & \oplus & q & p \\
+\hline
+\F & \F & \F & \F & \F & \F \\
+\F & \T & \T & \F & \T & \F \\
+\T & \T & \F & \T & \F & \T \\
+\T & \F & \T & \T & \T & \T \\
+\hline
+\end{array}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Conjunction has no Inverse}
+Tags: Conjunction
+
+\begin{theorem}
+Let $\land$ denote the [[Definition:Conjunction|conjunction operation]] of [[Definition:Propositional Logic|propositional logic]].
+Then there exists no [[Definition:Binary Logical Connective|binary logical connective]] $\circ$ such that:
+:$(1): \quad \forall p, q \in \left\{{T, F}\right\}: \left({p \land q}\right) \circ q = p$
+\end{theorem}
+
+\begin{proof}
+This will be [[Proof by Contradiction|proven by contradiction]].
+Let such an operation $\circ$ exist.
+Let $f^\circ: \mathbb B^2 \to \mathbb B$ be the associated [[Definition:Truth Function|truth function]].
+Suppose now that $q = F$, while $p$ remains unspecified.
+Then:
+:$p \land q = f^\land \left({p, F}\right) = F$
+where $f^\land$ is the [[Definition:Truth Function|truth function]] of [[Definition:Conjunction|conjunction]].
+It does not matter what $p$ is, for:
+:$f^\land \left({T, F}\right) = f^\land \left({F, F}\right) = F$
+Hence, for $\left({p \land q}\right) \circ q = p$ to hold, $f^\circ$ must satisfy:
+:$f^\circ \left({F, F}\right) = p$
+However, because $p$ could still be either $T$ or $F$, this identity cannot always hold.
+Therefore, $\circ$ cannot exist.
+{{qed}}
+[[Category:Conjunction]]
+iz8xrrjo16at5wz9wiqbjhnuzh6k4nl
+\end{proof}<|endoftext|>
+\section{Disjunction has no Inverse}
+Tags: Disjunction
+
+\begin{theorem}
+Let $\lor$ denote the [[Definition:Disjunction|disjunction operation]] of [[Definition:Propositional Logic|propositional logic]].
+Then there exists no [[Definition:Binary Logical Connective|binary logical connective]] $\circ$ such that:
+:$(1): \quad \forall p, q \in \left\{{T, F}\right\}: \left({p \lor q}\right) \circ q = p$
+\end{theorem}
+
+\begin{proof}
+Let $q$ be [[Definition:True|true]].
+Then $p \lor q = T$, whatever [[Definition:Truth Value|truth value]] $p$ holds.
+Either $T \circ T = T$ or $T \circ T = F$, but not both.
+So if $q = T$ either:
+:$\left({p \land q}\right) \circ q = T$
+or:
+:$\left({p \land q}\right) \circ q = F$
+If the first, then $(1)$ does not hold when $p = F$.
+If the second, then $(1)$ does not hold when $p = T$.
+Hence there can be no such $\circ$.
+{{qed}}
+{{improve|I'm not proud of this - it needs to be couched in more rigorous language. Also possible to prove it by truth table.}}
+[[Category:Disjunction]]
+dnjqp2uu46fx7uxt68yrblp9sbhwmor
+\end{proof}<|endoftext|>
+\section{Binary Logical Connectives with Inverse}
+Tags: Exclusive Or, Biconditional, Propositional Logic
+
+\begin{theorem}
+Let $\circ$ be a [[Definition:Binary Logical Connective|binary logical connective]].
+Then there exists another [[Definition:Binary Logical Connective|binary logical connective]] $*$ such that:
+:$\forall p, q \in \set {\F, \T}: \paren {p \circ q} * q \dashv \vdash p \dashv \vdash q * \paren {p \circ q}$
+{{iff}} $\circ$ is either:
+:$(1): \quad$ the [[Definition:Exclusive Or|exclusive or operator]]
+or:
+:$(2): \quad$ the [[Definition:Biconditional|biconditional operator]].
+That is, the only [[Definition:Truth Function|truth functions]] that have an [[Definition:Inverse Operation|inverse operation]] are the [[Definition:Exclusive Or|exclusive or]] and the [[Definition:Biconditional|biconditional]].
+\end{theorem}
+
+\begin{proof}
+=== Necessary Condition ===
+Let $\circ$ be a [[Definition:Binary Logical Connective|binary logical connective]] such that there exists $*$ such that:
+:$\paren {p \circ q} * q \dashv \vdash p$
+That is, by definition (and minor abuse of notation):
+:$\forall p, q \in \set {\F, \T}: \paren {p \circ q} * q = p$
+For reference purposes, let us list from [[Binary Truth Functions]] the complete truth table containing all of the [[Definition:Binary Logical Connective|binary logical connectives]]:
+$\begin{array}{|r|cccc|} \hline
+p & \T & \T & \F & \F \\
+q & \T & \F & \T & \F \\
+\hline
+\map {f_\T} {p, q} & \T & \T & \T & \T \\
+p \lor q & \T & \T & \T & \F \\
+p \impliedby q & \T & \T & \F & \T \\
+\map {\pr_1} {p, q} & \T & \T & \F & \F \\
+p \implies q & \T & \F & \T & \T \\
+\map {\pr_2} {p, q} & \T & \F & \T & \F \\
+p \iff q & \T & \F & \F & \T \\
+p \land q & \T & \F & \F & \F \\
+p \uparrow q & \F & \T & \T & \T \\
+\map \neg {p \iff q} & \F & \T & \T & \F \\
+\map {\overline {\pr_2} } {p, q} & \F & \T & \F & \T \\
+\map \neg {p \implies q} & \F & \T & \F & \F \\
+\map {\overline {\pr_1} } {p, q} & \F & \F & \T & \T \\
+\map \neg {p \impliedby q} & \F & \F & \T & \F \\
+p \downarrow q & \F & \F & \F & \T \\
+\map {f_\F} {p, q} & \F & \F & \F & \F \\
+\hline
+\end{array}$
+Suppose that for some $q \in \set {\F, \T}$:
+:$\paren {p \circ q}_{p = \F} = \paren {p \circ q}_{p = \T}$
+Then:
+:$\paren {\paren {p \circ q} * q}_{p = \F} = \paren {\paren {p \circ q} * q}_{p = \T}$
+and so either:
+:$\paren {\paren {p \circ q} * q}_{p = \F} \ne p$
+or:
+:$\paren {\paren {p \circ q} * q}_{p = \T} \ne p$
+Thus for $\circ$ to have an [[Definition:Inverse Operation|inverse operation]] it is necessary for $\F \circ q \ne \T \circ q$.
+This eliminates:
+{{begin-eqn}}
+{{eqn | o =
+ | r = \map {f_\T} {p, q}
+ | c = as $p \circ q = \T$ for all values of $p$ and $q$
+}}
+{{eqn | o =
+ | r = p \lor q
+ | c = as $p \circ q = \T$ for $q = \T$
+}}
+{{eqn | o =
+ | r = p \impliedby q
+ | c = as $p \circ q = \T$ for $q = \F$
+}}
+{{eqn | o =
+ | r = p \implies q
+ | c = as $p \circ q = \T$ for $q = \T$
+}}
+{{eqn | o =
+ | r = \map {\pr_2} {p, q}
+ | c = as $p \circ q = \T$ for $q = \T$ and also $p \circ q = \F$ for $q = \F$
+}}
+{{eqn | o =
+ | r = p \land q
+ | c = as $p \circ q = \F$ for $q = \F$
+}}
+{{eqn | o =
+ | r = p \uparrow q
+ | c = as $p \circ q = \T$ for $q = \F$
+}}
+{{eqn | o =
+ | r = \map {\overline {\pr_2} } {p, q}
+ | c = as $p \circ q = \T$ for $q = \F$ and also $p \circ q = \F$ for $q = \T$
+}}
+{{eqn | o =
+ | r = \map \neg {p \implies q}
+ | c = as $p \circ q = \F$ for $q = \T$
+}}
+{{eqn | o =
+ | r = \map \neg {p \impliedby q}
+ | c = as $p \circ q = \F$ for $q = \F$
+}}
+{{eqn | o =
+ | r = p \downarrow q
+ | c = as $p \circ q = \F$ for $q = \T$
+}}
+{{eqn | o =
+ | r = \map {f_\F} {p, q}
+ | c = as $p \circ q = \T$ for all values of $p$ and $q$
+}}
+{{end-eqn}}
+The remaining [[Definition:Binary Logical Connective|connectives]] which may have [[Definition:Inverse Operation|inverses]] are:
+$\begin{array}{|r|cccc|} \hline
+p & \T & \T & \F & \F \\
+q & \T & \F & \T & \F \\
+\hline
+\map {\pr_1} {p, q} & \T & \T & \F & \F \\
+p \iff q & \T & \F & \F & \T \\
+\map \neg {p \iff q} & \F & \T & \T & \F \\
+\map {\overline {\pr_1} } {p, q} & \F & \F & \T & \T \\
+\hline
+\end{array}$
+Suppose that for some $p \in \set {\F, \T}$:
+:$\paren {p \circ q}_{q = \F} = \paren {p \circ q}_{q = \T}$
+Then:
+:$\paren {q * \paren {p \circ q} }_{q = \F} = \paren {q * \paren {p \circ q} }_{q = \T}$
+and so either:
+:$\paren {q * \paren {p \circ q} }_{q = \F} \ne p$
+or:
+:$\paren {q * \paren {p \circ q} }_{q = \T} \ne p$
+This eliminates:
+{{begin-eqn}}
+{{eqn | o =
+ | r = \map {\pr_1} {p, q}
+ | c = as $p \circ q = \T$ for $p = \T$ and also $p \circ q = \F$ for $p = \F$
+}}
+{{eqn | o =
+ | r = \map {\overline {\pr_1} } {p, q}
+ | c = as $p \circ q = \T$ for $p = \F$ and also $p \circ q = \F$ for $p = \T$
+}}
+{{end-eqn}}
+We are left with [[Definition:Exclusive Or|exclusive or]] and the [[Definition:Biconditional|biconditional]].
+The result follows from [[Exclusive Or is Self-Inverse]] and [[Biconditional is Self-Inverse]].
+{{qed}}
+=== Sufficient Condition ===
+Let $\circ$ be the [[Definition:Exclusive Or|exclusive or operator]].
+Then by [[Exclusive Or is Self-Inverse]] it follows that:
+:$\paren {p \circ q} \circ q \dashv \vdash p$
+Thus $*$ is the [[Definition:Inverse Operation|inverse operation]] of the [[Definition:Exclusive Or|exclusive or operation]].
+Similarly, let $\circ$ be the [[Definition:Biconditional|biconditional operator]].
+Then by [[Biconditional is Self-Inverse]] it follows that:
+:$\paren {p \circ q} \circ q \dashv \vdash p$
+{{qed}}
+[[Category:Exclusive Or]]
+[[Category:Biconditional]]
+[[Category:Propositional Logic]]
+kbykk6g9uanajr5qrrir9gyc2juer2k
+\end{proof}<|endoftext|>
+\section{Biconditional is Self-Inverse}
+Tags: Biconditional
+
+\begin{theorem}
+:$\paren {p \iff q} \iff q \dashv \vdash p$
+where $\iff$ denotes the [[Definition:Biconditional|biconditional operator]].
+\end{theorem}
+
+\begin{proof}
+We apply the [[Method of Truth Tables]].
+As can be seen by inspection, the [[Definition:Truth Value|truth values]] under the [[Definition:Main Connective (Propositional Logic)|main connective]] on the {{LHS}} match those for $p$ on the {{RHS}} for all [[Definition:Boolean Interpretation|boolean interpretations]]:
+$\begin{array}{|ccccc||c|} \hline
+(p & \iff & q) & \iff & q & p \\
+\hline
+F & T & F & F & F & F \\
+F & F & T & F & T & F \\
+T & F & F & T & F & T \\
+T & T & T & T & T & T \\
+\hline
+\end{array}$
+{{qed}}
+[[Category:Biconditional]]
+5ybdy25cskm1eyv6h2mol5sojt7fmiv
+\end{proof}<|endoftext|>
+\section{Finite Chain is Order-Isomorphic to Finite Ordinal}
+Tags: Total Orderings, Ordinals
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $C$ be a [[Definition:Finite Set|finite]] [[Definition:Chain (Set Theory)|chain]] in $S$.
+Then for some finite [[Definition:Ordinal|ordinal]] $\mathbf n$:
+:$\left({C, {\preceq \restriction_C} }\right)$ is [[Definition:Order Isomorphism|order-isomorphic]] to $\mathbf n$.
+That is:
+:$\left({C, {\preceq \restriction_C} }\right)$ is [[Definition:Order Isomorphism|order-isomorphic]] to $\N_n$
+where $\N_n$ is the [[Definition:Initial Segment of Zero-Based Natural Numbers|initial segment of $\N$ determined by $n$]]:
+:$\N_n = \left\{ {k \in \N: k < n}\right\} = \left\{ {0, 1, \ldots, n - 1}\right\}$
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Finite Set|finite set]]:
+:there exists an $n \in \N$ such that there exists a [[Definition:Bijection|bijection]] $f: C \to \N_n$.
+This $n$ is [[Definition:Unique|unique]] by [[Equality of Natural Numbers]] and [[Set Equivalence is Equivalence Relation]].
+Define a [[Definition:Mapping|mapping]] $g: \N_n \to C$ [[Principle of Recursive Definition|recursively]] as:
+:$g \left({0}\right) = \min C$
+:$g \left({k + 1}\right) = \min \left({ C \setminus g \left({N_k}\right) }\right)$
+{{explain|How is $\min C$ is defined?}}
+{{finish}}
+\end{proof}<|endoftext|>
+\section{Complete Linearly Ordered Space is Compact}
+Tags: Order Topology
+
+\begin{theorem}
+Let $\left({X, \preceq, \tau}\right)$ be a [[Definition:Linearly Ordered Space|linearly ordered space]].
+Let $\left({X, \preceq}\right)$ be a [[Definition:Complete Lattice|complete lattice]].
+Then $\left({X, \tau}\right)$ is [[Definition:Compact Space|compact]].
+\end{theorem}
+
+\begin{proof}
+By [[Compactness from Basis]], it is sufficient to prove that an open cover of $X$ consisting of open intervals and rays has a finite subcover.
+Let $\mathcal A$ be an open cover of $X$ consisting of open rays and open intervals.
+Let $m = \inf X$. This infimum exists because $\left({X, \preceq}\right)$ is complete.
+Let $C$ be the set of all $x \in X$ such that a finite subset of $\mathcal A$ covers $\left[{m \,.\,.\, x}\right]$.
+$C$ is non-empty because $m \in C$.
+Let $s = \sup C$.
+Since $\mathcal A$ covers $X$, there is a $U \in \mathcal A$ such that $s \in U$.
+Then we must have $U = \left({a \,.\,.\, b}\right)$, $U = {\dot\uparrow} a$, or $U = {\dot\downarrow} b$.
+Suppose that $U = \left({a \,.\,.\, b}\right)$.
+Let $V \in \mathcal U$ contain $b$.
+Then by the definition of supremum, there is an $x \succ a$ such that there is a finite $\mathcal F \subseteq \mathcal A$ that covers $\left[{m \,.\,.\, x}\right]$.
+Then $\mathcal F \cup \left\{{U, V}\right\}$ covers $\left[{m \,.\,.\, b}\right]$, contradicting the fact that $s$ is an upper bound of $C$.
+Suppose next that $U = \dot\downarrow b$.
+Then for some $V \in \mathcal A$, $b \in V$.
+Then $\left[{m \,.\,.\, b}\right]$ is covered by $\left\{{U, V}\right\}$, contradicting the fact that $s$ is the supremum of $C$.
+Thus $U = \dot\uparrow a$.
+By the definition of supremum, $a$ is not an upper bound of $C$.
+So there is an $x \succ a$ such that there is a finite subset $\mathcal F$ of $\mathcal A$ that covers $\left[{m \,.\,.\, x}\right]$.
+Thus $\mathcal F \cup \left\{{U}\right\}$ is a finite subcover of $A$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Condition for Well-Foundedness/Reverse Implication}
+Tags: Condition for Well-Foundedness
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Suppose that there is no [[Definition:Infinite Sequence|infinite sequence]] $\left \langle {a_n}\right \rangle$ of [[Definition:Element|elements]] of $S$ such that $\forall n \in \N: a_{n+1} \prec a_n$.
+Then $\left({S, \preceq}\right)$ is [[Definition:Well-Founded|well-founded]].
+\end{theorem}<|endoftext|>
+\section{Condition for Well-Foundedness/Reverse Implication/Proof 1}
+Tags: Condition for Well-Foundedness
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Suppose that there is no [[Definition:Infinite Sequence|infinite sequence]] $\sequence {a_n}$ of [[Definition:Element|elements]] of $S$ such that $\forall n \in \N: a_{n + 1} \prec a_n$.
+Then $\struct {S, \preceq}$ is [[Definition:Well-Founded|well-founded]].
+\end{theorem}
+
+\begin{proof}
+Suppose $\struct {S, \preceq}$ is not [[Definition:Well-Founded|well-founded]].
+So by definition there exists a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] $T$ of $S$ which has no [[Definition:Minimal Element|minimal element]].
+Let $a \in T$.
+Since $a$ is not minimal in $T$, we can find $b \in T: b \prec a$.
+Since this holds for all $a \in T$, $\prec \restriction_{T \times T}$, the [[Definition:Restriction of Relation|restriction]] of $\prec$ to $T \times T$, is a [[Definition:Right-Total Relation|right-total]] [[Definition:Endorelation|endorelation]] on $T$.
+So, by the [[Axiom:Axiom of Dependent Choice/Right-Total|Axiom of Dependent Choice]], it follows that there is an [[Definition:Sequence|infinite sequence]] $\sequence {a_n}$ in $T$ such that $\forall n \in \N: a_{n + 1} \prec a_n$.
+{{qed|lemma}}
+\end{proof}<|endoftext|>
+\section{Condition for Well-Foundedness/Reverse Implication/Proof 2}
+Tags: Condition for Well-Foundedness
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Suppose that there is no [[Definition:Infinite Sequence|infinite sequence]] $\sequence {a_n}$ of [[Definition:Element|elements]] of $S$ such that:
+:$\forall n \in \N: a_{n + 1} \prec a_n$
+Then $\struct {S, \preceq}$ is [[Definition:Well-Founded|well-founded]].
+\end{theorem}
+
+\begin{proof}
+Suppose $\struct {S, \preceq}$ is not [[Definition:Well-Founded|well-founded]].
+Let $T \subseteq S$ have no [[Definition:Minimal Element|minimal element]].
+Let $a_0 \in T$.
+We have that $a_0$ is not [[Definition:Minimal Element|minimal]] in $T$.
+So:
+:$\exists a_1 \in T: a_1 \prec a_0$
+Similarly, $a_1$ is not [[Definition:Minimal Element|minimal]] in $T$.
+So:
+:$\exists a_2 \in T: a_2 \prec a_1$
+Let $a_{k + 1}$ be an arbitrary element for which $a_{k + 1} \prec a_k$.
+In order to allow this to be possible in the infinite case, it is necessary to invoke the [[Axiom:Axiom of Dependent Choice/Right-Total|Axiom of Dependent Choice]] as follows:
+Let $a_k \in T$.
+Then as $a_k$ is not [[Definition:Minimal Element|minimal]] in $T$:
+:$\exists a_{k + 1} \in T: a_{k + 1} \prec a_k$
+Hence by definition $\prec$ is a [[Definition:Right-Total Relation|right-total relation]].
+So, by the [[Axiom:Axiom of Dependent Choice/Right-Total|Axiom of Dependent Choice]], it follows that:
+:$\forall n \in \N: \exists a_n \in T: a_{n + 1} \prec a_n$
+Thus we have been able to construct an [[Definition:Sequence|infinite sequence]] $\sequence {a_n}$ in $T$ such that:
+:$\forall n \in \N: a_{n + 1} \prec a_n$.
+It follows by the [[Rule of Transposition]] that if there is no [[Definition:Infinite Sequence|infinite sequence]] $\sequence {a_n}$ of [[Definition:Element|elements]] of $S$ such that:
+:$\forall n \in \N: a_{n + 1} \prec a_n$
+then $\struct {S, \preceq}$ is [[Definition:Well-Founded|well-founded]].
+{{qed|lemma}}
+\end{proof}<|endoftext|>
+\section{Inversion Mapping on Ordered Group is Dual Order-Isomorphism}
+Tags: Ordered Groups, Inversion Mappings
+
+\begin{theorem}
+Let $\struct {G, \circ, \preceq}$ be an [[Definition:Ordered Group|ordered group]].
+Let $\iota: G \to G$ be the [[Definition:Inversion Mapping|inversion mapping]], defined by $\map \phi x = x^{-1}$.
+Then $\iota$ is a [[Definition:Dual Isomorphism (Order Theory)|dual order-isomorphism]].
+\end{theorem}
+
+\begin{proof}
+By [[Inversion Mapping is Involution]] and [[Involution is Permutation]], $\iota$ is a [[Definition:Permutation|permutation]] and so by definition [[Definition:Bijection|bijective]].
+Let $x, y \in G$ such that $x \prec y$.
+Then $y^{-1} \prec x^{-1}$ by [[Inversion Mapping Reverses Ordering in Ordered Group]].
+Thus $\map \iota y \prec \map \iota x$.
+Since this holds for all $x$ and $y$ with $x \prec y$, $\iota$ is [[Definition:Strictly Decreasing Mapping|strictly decreasing]].
+If $\map \iota x \prec \map \iota y$, then $\map \iota {\map \iota y} \prec \map \iota {\map \iota x}$ by the above.
+Thus by [[Inverse of Group Inverse]]: $y \prec x$.
+Therefore, $\iota$ reverses ordering in both directions, and is thus a [[Definition:Dual Isomorphism (Order Theory)|dual isomorphism]].
+{{qed}}
+[[Category:Ordered Groups]]
+[[Category:Inversion Mappings]]
+r4yxtzyk6j10d5wzp88nv29h4wf9ga9
+\end{proof}<|endoftext|>
+\section{Properties of Ordered Group/OG2/Proof 1}
+Tags: Ordered Groups
+
+\begin{theorem}
+Let $\left({G, \circ, \preceq}\right)$ be an [[Definition:Ordered Group|ordered group]] with [[Definition:Identity Element|identity]] $e$.
+Let $\prec$ be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\preceq$.
+Let $x, y \in G$.
+Then the following equivalences hold:
+:$(\operatorname{OG}2.1):\quad x \preceq y \iff e \preceq y \circ x^{-1}$
+:$(\operatorname{OG}2.2):\quad x \preceq y \iff e \preceq x^{-1} \circ y$
+:$(\operatorname{OG}2.3):\quad x \preceq y \iff x \circ y^{-1} \preceq e$
+:$(\operatorname{OG}2.4):\quad x \preceq y \iff y^{-1} \circ x \preceq e$
+:$(\operatorname{OG}2.1'):\quad x \prec y \iff e \prec y \circ x^{-1}$
+:$(\operatorname{OG}2.2'):\quad x \prec y \iff e \prec x^{-1} \circ y$
+:$(\operatorname{OG}2.3'):\quad x \prec y \iff x \circ y^{-1} \prec e$
+:$(\operatorname{OG}2.4'):\quad x \prec y \iff y^{-1} \circ x \prec e$
+\end{theorem}
+
+\begin{proof}
+By the definition of an [[Definition:Ordered Group|ordered group]], $\preceq$ is a relation [[Definition:Relation Compatible with Operation|compatible]] with $\circ$.
+Thus by [[Properties of Relation Compatible with Group Operation/CRG2]] , we obtain the first four results.
+By [[Reflexive Reduction of Relation Compatible with Group Operation is Compatible]], $\prec$ is compatible with $\circ$.
+Again by [[Properties of Relation Compatible with Group Operation/CRG2]], we obtain the remaining results.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Properties of Ordered Group/OG2/Proof 2}
+Tags: Ordered Groups
+
+\begin{theorem}
+Let $\left({G, \circ, \preceq}\right)$ be an [[Definition:Ordered Group|ordered group]] with [[Definition:Identity Element|identity]] $e$.
+Let $\prec$ be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\preceq$.
+Let $x, y \in G$.
+Then the following equivalences hold:
+:$(\operatorname{OG}2.1):\quad x \preceq y \iff e \preceq y \circ x^{-1}$
+:$(\operatorname{OG}2.2):\quad x \preceq y \iff e \preceq x^{-1} \circ y$
+:$(\operatorname{OG}2.3):\quad x \preceq y \iff x \circ y^{-1} \preceq e$
+:$(\operatorname{OG}2.4):\quad x \preceq y \iff y^{-1} \circ x \preceq e$
+:$(\operatorname{OG}2.1'):\quad x \prec y \iff e \prec y \circ x^{-1}$
+:$(\operatorname{OG}2.2'):\quad x \prec y \iff e \prec x^{-1} \circ y$
+:$(\operatorname{OG}2.3'):\quad x \prec y \iff x \circ y^{-1} \prec e$
+:$(\operatorname{OG}2.4'):\quad x \prec y \iff y^{-1} \circ x \prec e$
+\end{theorem}
+
+\begin{proof}
+Each result follows from [[Properties of Ordered Group/OG1]]. For example, by [[Properties of Ordered Group/OG1]],
+:$x \preceq y \iff x \circ x^{-1} \preceq y \circ x^{-1}$
+Since $x \circ x^{-1} = e$:
+:$(\operatorname{OG}2.1):\quad x \preceq y \iff e \preceq y \circ x^{-1}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Properties of Ordered Group/OG4/Proof 1}
+Tags: Ordered Groups
+
+\begin{theorem}
+Let $\struct {G, \circ, \preceq}$ be an [[Definition:Ordered Group|ordered group]] with [[Definition:Identity Element|identity]] $e$.
+Let $x \in G$.
+Then the following equivalences hold:
+{{begin-axiom}}
+{{axiom | n = \operatorname {OG} 4.1
+ | m = x \preceq e \iff e \preceq x^{-1}
+}}
+{{axiom | n = \operatorname {OG} 4.2
+ | m = e \preceq x \iff x^{-1} \preceq e
+}}
+{{axiom | n = \operatorname {OG} 4.1'
+ | m = x \prec e \iff e \prec x^{-1}
+}}
+{{axiom | n = \operatorname {OG} 4.2'
+ | m = e \prec x \iff x^{-1} \prec e
+}}
+{{end-axiom}}
+\end{theorem}
+
+\begin{proof}
+By [[Inversion Mapping Reverses Ordering in Ordered Group]]:
+{{begin-axiom}}
+{{axiom | m = x \preceq e \iff e^{-1} \preceq x^{-1}
+}}
+{{axiom | m = e \preceq x \iff x^{-1} \preceq e^{-1}
+}}
+{{axiom | m = x \prec e \iff e^{-1} \prec x^{-1}
+}}
+{{axiom | m = e \prec x \iff x^{-1} \prec e^{-1}
+}}
+{{end-axiom}}
+Since $e^{-1} = e$, the theorem holds.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Properties of Ordered Group/OG4/Proof 2}
+Tags: Ordered Groups
+
+\begin{theorem}
+Let $\left({G, \circ, \preceq}\right)$ be an [[Definition:Ordered Group|ordered group]] with [[Definition:Identity Element|identity]] $e$.
+Let $x \in G$.
+Then the following equivalences hold:
+{{begin-axiom}}
+{{axiom|n = \operatorname{OG}4.1
+ |m = x \preceq e \iff e \preceq x^{-1}
+}}
+{{axiom|n = \operatorname{OG}4.2
+ |m = e \preceq x \iff x^{-1} \preceq e
+}}
+{{axiom|n = \operatorname{OG}4.1'
+ |m = x \prec e \iff e \prec x^{-1}
+}}
+{{axiom|n = \operatorname{OG}4.2'
+ |m = e \prec x \iff x^{-1} \prec e
+}}
+{{end-axiom}}
+\end{theorem}
+
+\begin{proof}
+By the definition of an [[Definition:Ordered Group|ordered group]], $\preceq$ is a relation [[Definition:Relation Compatible with Operation|compatible]] with $\circ$.
+Thus by [[Properties of Relation Compatible with Group Operation/CRG4]], we obtain the first two results:
+:$(\operatorname{OG}4.1):\quad x \preceq e \iff e \preceq x^{-1}$
+:$(\operatorname{OG}4.2):\quad e \preceq x \iff x^{-1} \preceq e$
+By [[Reflexive Reduction of Relation Compatible with Group Operation is Compatible]], $\prec$ is also compatible with $\circ$.
+Thus by again [[Properties of Relation Compatible with Group Operation/CRG4]], we obtain the remaining results:
+:$(\operatorname{OG}4.1'):\quad x \prec e \iff e \prec x^{-1}$
+:$(\operatorname{OG}4.2'):\quad e \prec x \iff x^{-1} \prec e$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Properties of Ordered Group/OG5/Proof 1}
+Tags: Ordered Groups
+
+\begin{theorem}
+Let $\left({G, \circ, \preceq}\right)$ be an [[Definition:Ordered Group|ordered group]] with [[Definition:Identity Element|identity]] $e$.
+Let $\prec$ be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\preceq$.
+Let $x \in G$.
+Let $n \in \N_{>0}$ be a [[Definition:Strictly Positive Integer|strictly positive integer]].
+Then the following hold:
+:$x \preceq e \implies x^n \preceq e$
+:$e \preceq x \implies e \preceq x^n$
+:$x \prec e \implies x^n \prec e$
+:$e \prec x \implies e \prec x^n$
+\end{theorem}
+
+\begin{proof}
+By [[Power Function Strictly Preserves Ordering in Ordered Group]]:
+:$x \preceq e \implies x^n \preceq e^n$
+:$e \preceq x \implies e^n \preceq x^n$
+:$x \prec e \implies x^n \prec e^n$
+:$e \prec x \implies e^n \prec x^n$
+By [[Identity Element is Idempotent]], $e$ is [[Definition:Idempotent Element|idempotent]] with respect to $\circ$.
+Therefore by the definition of an [[Definition:Idempotent Element|idempotent element]], $e^n = e$.
+Thus the theorem holds.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Properties of Ordered Group/OG5/Proof 2}
+Tags: Ordered Groups
+
+\begin{theorem}
+Let $\left({G, \circ, \preceq}\right)$ be an [[Definition:Ordered Group|ordered group]] with [[Definition:Identity Element|identity]] $e$.
+Let $\prec$ be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\preceq$.
+Let $x \in G$.
+Let $n \in \N_{>0}$ be a [[Definition:Strictly Positive Integer|strictly positive integer]].
+Then the following hold:
+:$x \preceq e \implies x^n \preceq e$
+:$e \preceq x \implies e \preceq x^n$
+:$x \prec e \implies x^n \prec e$
+:$e \prec x \implies e \prec x^n$
+\end{theorem}
+
+\begin{proof}
+By the definition of an [[Definition:Ordered Group|ordered group]], $\preceq$ is a [[Definition:Transitive Relation|transitive relation]] [[Definition:Relation Compatible with Operation|compatible]] with $\circ$.
+By [[Transitive Relation Compatible with Semigroup Operation Relates Powers of Related Elements]]:
+:$x \preceq e \implies x^n \preceq e^n$
+:$e \preceq x \implies e^n \preceq x^n$
+By [[Identity Element is Idempotent]], $e$ is [[Definition:Idempotent Element|idempotent]] with respect to $\circ$.
+Thus we obtain the first two results:
+:$x \preceq e \implies x^n \preceq e$
+:$e \preceq x \implies e \preceq x^n$
+By [[Reflexive Reduction of Relation Compatible with Group Operation is Compatible]], $\prec$ is [[Definition:Relation Compatible with Operation|compatible]] with $\circ$.
+By [[Reflexive Reduction of Transitive Antisymmetric Relation is Strict Ordering]], $\prec$ is [[Definition:Transitive Relation|transitive]].
+Thus by the same method as above, we obtain the remaining results:
+:$x \prec e \implies x^n \prec e$
+:$e \prec x \implies e \prec x^n$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Mapping from Totally Ordered Set is Dual Order Embedding iff Strictly Decreasing}
+Tags: Total Orderings, Order Embeddings
+
+\begin{theorem}
+Let $\left({S, \preceq_1}\right)$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $\left({T, \preceq_2}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $\phi: S \to T$ be a [[Definition:Mapping|mapping]].
+Then $\phi$ is a [[Definition:Dual Order Embedding|dual order embedding]] {{iff}} $\phi$ is [[Definition:Strictly Decreasing Mapping|strictly decreasing]].
+That is:
+:$\forall x, y \in S: x \preceq_1 y \iff \phi \left({y}\right) \preceq_2 \phi \left({x}\right)$
+{{iff}}
+:$\forall x, y \in S: x \prec_1 y \implies \phi \left({y}\right) \prec_2 \phi \left({x}\right)$
+\end{theorem}
+
+\begin{proof}
+=== Forward Implication ===
+Let $\phi$ be a [[Definition:Dual Order Embedding|dual order embedding]].
+Then $\phi$ is an [[Definition:Order Embedding|order embedding]] of $\left({S, \preceq_1}\right)$ into $\left({T, \succeq_2}\right)$, where $\succeq_2$ is the [[Definition:Dual Ordering|dual]] of $\preceq_2$.
+Thus by [[Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing]]:
+:$\phi: \left({S, \preceq_1}\right) \to \left({T, \succeq_2}\right)$ is [[Definition:Strictly Increasing Mapping|strictly increasing]].
+Thus:
+:$\forall x, y \in S: x \prec_1 y \implies \phi \left({x}\right) \succ_2 \phi \left({y}\right)$
+so:
+:$\forall x, y \in S: x \prec_1 y \implies \phi \left({y}\right) \prec_2 \phi \left({x}\right)$
+Thus $\phi: \left({S, \preceq_1}\right) \to \left({T, \preceq_2}\right)$ is [[Definition:Strictly Decreasing Mapping|strictly decreasing]].
+{{qed|lemma}}
+=== Reverse Implication ===
+Suppose that $\phi: \left({S, \preceq_1}\right) \to \left({T, \preceq_2}\right)$ is [[Definition:Strictly Decreasing Mapping|strictly decreasing]].
+Then by the same argument as above:
+:$\phi: \left({S, \preceq_1}\right) \to \left({T, \preceq_2}\right)$ is [[Definition:Strictly Increasing Mapping|strictly increasing]].
+Thus by [[Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing]], $\phi$ is an [[Definition:Order Embedding|order embedding]] of $\left({S, \preceq_1}\right)$ into $\left({T, \succeq_2}\right)$.
+So $\phi$ is a [[Definition:Dual Order Embedding|dual order embedding]] of $\left({S, \preceq_1}\right)$ into $\left({T, \preceq_2}\right)$.
+{{qed}}
+[[Category:Total Orderings]]
+[[Category:Order Embeddings]]
+cvbcsnw2k9bzz4617102zbbql23f35t
+\end{proof}<|endoftext|>
+\section{Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication}
+Tags: Order Embeddings, Total Orderings
+
+\begin{theorem}
+Let $\struct {S, \preceq_1}$ be a [[Definition:Totally Ordered Set|totally ordered set]] and let $\struct {T, \preceq_2}$ be an [[Definition:Ordered Set|ordered set]].
+Let $\phi: S \to T$ be a [[Definition:Strictly Increasing/Mapping|strictly increasing mapping]].
+Then $\phi$ is an [[Definition:Order Embedding|order embedding]].
+
+== [[Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication/Proof 1|Proof 1]] ==
+{{:Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication/Proof 1}}
+== [[Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication/Proof 2|Proof 2]] ==
+{{:Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication/Proof 2}}
+
+[[Category:Order Embeddings]]
+[[Category:Total Orderings]]
+ip0v3n07lm6pfuiq8dh8hmlp58or3cs
+\end{theorem}<|endoftext|>
+\section{Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication/Proof 2}
+Tags: Order Embeddings, Total Orderings
+
+\begin{theorem}
+Let $\struct {S, \preceq_1}$ be a [[Definition:Totally Ordered Set|totally ordered set]] and let $\struct {T, \preceq_2}$ be an [[Definition:Ordered Set|ordered set]].
+Let $\phi: S \to T$ be a [[Definition:Strictly Increasing Mapping|strictly increasing mapping]].
+Then $\phi$ is an [[Definition:Order Embedding|order embedding]].
+\end{theorem}
+
+\begin{proof}
+Let $\phi$ be [[Definition:Strictly Increasing Mapping|strictly increasing]].
+Let $\map \phi x \preceq_2 \map \phi y$.
+As $\struct {S, \prec_1}$ is a [[Definition:Strictly Totally Ordered Set|strictly totally ordered set]]:
+:Either $y \prec_1 x$, $y = x$, or $x \prec_1 y$.
+{{AimForCont}} that $y \prec_1 x$.
+By the definition of a strictly increasing mapping:
+:$\map \phi y \prec_2 \map \phi x$
+which [[Definition:Contradiction|contradicts]] the fact that $\map \phi x \preceq_2 \map \phi y$.
+Therefore $y \nprec_1 x$.
+Thus $y = x$, or $x \prec_1 y$, so $x \preceq_1 y$.
+Hence:
+:$\map \phi x \preceq_2 \map \phi y \iff x \preceq_1 y$
+and $\phi$ has been proved to be an [[Definition:Order Embedding|order embedding]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Mapping from Totally Ordered Set is Order Embedding iff Strictly Increasing/Reverse Implication/Proof 1}
+Tags: Order Embeddings, Total Orderings
+
+\begin{theorem}
+Let $\struct {S, \preceq_1}$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $\struct {T, \preceq_2}$ be an [[Definition:Ordered Set|ordered set]].
+Let $\phi: S \to T$ be a [[Definition:Strictly Increasing Mapping|strictly increasing mapping]].
+Then $\phi$ is an [[Definition:Order Embedding|order embedding]] .
+\end{theorem}
+
+\begin{proof}
+Let $x \preceq_1 y$.
+Then $x = y$ or $x \prec_1 y$.
+Let $x = y$.
+Then
+:$\map \phi x = \map \phi y$
+so:
+:$\map \phi x \preceq_2 \map \phi y$
+Let $x \prec_1 y$.
+Then by the definition of [[Definition:Strictly Increasing Mapping|strictly increasing mapping]]:
+:$\map \phi x \prec_2 \map \phi y$
+so by the definition of $\prec_2$:
+:$\map \phi x \preceq_2 \map \phi y$
+Thus:
+:$x \preceq_1 y \implies \map \phi x \preceq_2 \map \phi y$
+It remains to be shown that:
+:$\map \phi x \preceq_2 \map \phi y \implies x \preceq_1 y$
+Suppose that $x \npreceq_1 y$.
+Since $\preceq_1$ is a [[Definition:Total Ordering|total ordering]]:
+:$y \prec_1 x$
+Thus since $\phi$ is [[Definition:Strictly Increasing Mapping|strictly increasing]]:
+:$\map \phi y \prec_1 \map \phi x$
+Thus:
+:$\map \phi x \npreceq_1 \map \phi y$
+Therefore:
+:$x \npreceq_1 y \implies \map \phi x \npreceq_2 \map \phi y$
+By the [[Rule of Transposition]]:
+:$\map \phi x \preceq_2 \map \phi y \implies x \preceq y$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Foundational Relation is Antireflexive}
+Tags: Foundational Relations, Reflexive Relations
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Foundational Relation|foundational relation]] on a [[Definition:Set|set]] or class $A$.
+Then $\mathcal R$ is [[Definition:Antireflexive Relation|antireflexive]].
+\end{theorem}
+
+\begin{proof}
+Let $p \in A$.
+Then $\left\{{p}\right\} \ne \varnothing$ and $\left\{{p}\right\} \subseteq A$.
+Thus, by the definition of [[Definition:Foundational Relation|foundational relation]]:
+:$\exists x \in \left\{{p}\right\}: \forall y \in \left\{{p}\right\}: \neg \left({y \mathrel{\mathcal R} x}\right)$
+Since $x \in \left\{{p}\right\}$, it must be that $x = p$.
+It follows that $p \not\mathrel{\mathcal R} p$.
+Since this holds for all $p \in A$, $\mathcal R$ is [[Definition:Antireflexive Relation|antireflexive]].
+{{qed}}
+[[Category:Foundational Relations]]
+[[Category:Reflexive Relations]]
+5un4m8mr37k753e9fnq83rdai1bc2zb
+\end{proof}<|endoftext|>
+\section{Foundational Relation is Asymmetric}
+Tags: Foundational Relations, Symmetric Relations
+
+\begin{theorem}
+Let $\struct {S, \RR}$ be a [[Definition:Relational Structure|relational structure]], where $S$ is a [[Definition:Set|set]] or a [[Definition:Proper Class|proper class]].
+Let $\RR$ be a [[Definition:Foundational Relation|foundational relation]].
+Then $\RR$ is [[Definition:Asymmetric Relation|asymmetric]].
+\end{theorem}
+
+\begin{proof}
+Let $p, q \in S$ and suppose that $p \mathrel \RR q$.
+Then $\set {p, q} \ne \O$ and $\set {p, q} \subseteq S$.
+By the definition of [[Definition:Foundational Relation|foundational relation]], $\set {p, q}$ has an [[Definition:Minimal Element under Relation|$\RR$-minimal element]].
+Since $p \mathrel \RR q$, $q$ is not an $\RR$-minimal element of $\set {p, q}$.
+Thus $p$ is an [[Definition:Minimal Element under Relation|$\RR$-minimal element]] of $\set {p, q}$.
+Thus $q \not \mathrel \RR p$.
+Since for all $p, q \in S$, $p \mathrel \RR q \implies q \not \mathrel \RR p$, $\RR$ is [[Definition:Asymmetric Relation|asymmetric]].
+{{qed}}
+[[Category:Foundational Relations]]
+[[Category:Symmetric Relations]]
+dxlx9qoocb3xu2h7wv4ce074awnbyzr
+\end{proof}<|endoftext|>
+\section{Upper Set with no Smallest Element is Open in GO-Space}
+Tags: Generalized Ordered Spaces
+
+\begin{theorem}
+Let $\struct {S, \preceq, \tau}$ be a [[Definition:Generalized Ordered Space|generalized ordered space]].
+Let $U$ be an [[Definition:Upper Set|upper set]] in $S$ with no [[Definition:Smallest Element|smallest element]].
+Then $U$ is [[Definition:Open Set (Topology)|open]] in $\struct {S, \preceq, \tau}$.
+\end{theorem}
+
+\begin{proof}
+By [[Minimal Element in Toset is Unique and Smallest]], $U$ has no [[Definition:Minimal Element|minimal element]].
+By [[Upper Set with no Minimal Element]]:
+:$U = \bigcup \set {u^\succ: u \in U}$
+where $u^\succ$ is the [[Definition:Strict Upper Closure of Element|strict upper closure]] of $u$.
+By [[Open Ray is Open in GO-Space]] and the fact that a union of [[Definition:Open Set (Topology)|open sets]] is open, $U$ is open.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Lower Set with no Maximal Element}
+Tags: Lower Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $L \subseteq S$.
+Then:
+: $L$ is a [[Definition:Lower Set|lower set]] in $S$ with no [[Definition:Maximal Element|maximal element]]
+{{iff}}:
+: $\displaystyle L = \bigcup \left\{{l^\prec: l \in L }\right\}$
+where $l^\prec$ is the [[Definition:Strict Lower Closure of Element|strict lower closure]] of $l$.
+\end{theorem}
+
+\begin{proof}
+By [[Dual Pairs (Order Theory)]]:
+* [[Definition:Lower Set|Lower set]] is dual to [[Definition:Upper Set|upper set]].
+* [[Definition:Maximal Element|Maximal element]] is dual to [[Definition:Minimal Element|minimal element]].
+* [[Definition:Strict Lower Closure of Element|Strict lower closure]] is dual to [[Definition:Strict Upper Closure of Element|strict upper closure]].
+Thus the theorem holds by the [[Duality Principle (Order Theory)/Global Duality|duality principle]] applied to [[Upper Set with no Minimal Element]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Lower Set with no Greatest Element is Open in GO-Space}
+Tags: Generalized Ordered Spaces
+
+\begin{theorem}
+Let $\struct {S, \preceq, \tau}$ be a [[Definition:Generalized Ordered Space|generalized ordered space]].
+Let $L$ be a [[Definition:Lower Set|lower set]] in $S$ with no [[Definition:Greatest Element|greatest element]].
+Then $L$ is [[Definition:Open Set (Topology)|open]] in $\struct {S, \preceq, \tau}$.
+\end{theorem}
+
+\begin{proof}
+By [[Maximal Element in Toset is Unique and Greatest]], $L$ has no [[Definition:Maximal Element|maximal element]].
+By [[Lower Set with no Maximal Element]]:
+:$\displaystyle L = \bigcup \set {l^\prec: l \in L}$
+where $l^\prec$ is the [[Definition:Strict Lower Closure of Element|strict lower closure]] of $l$.
+By [[Open Ray is Open in GO-Space]] and the fact that a union of [[Definition:Open Set (Topology)|open sets]] is open, $L$ is open.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Lower Set is Dual to Upper Set}
+Tags: Upper Sets, Lower Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T \subseteq S$.
+The following are [[Definition:Dual Statement (Order Theory)|dual statements]]:
+:$T$ is a [[Definition:Lower Set|lower set]] in $S$
+:$T$ is an [[Definition:Upper Set|upper set]] in $S$
+\end{theorem}
+
+\begin{proof}
+By definition, $T$ is a [[Definition:Lower Set|lower set]] in $S$ [[Definition:Iff|iff]]:
+:$\forall t \in T: \forall s \in S: s \preceq t \implies s \in T$
+The [[Definition:Dual Statement (Order Theory)|dual]] of this statement is:
+:$\forall t \in T: \forall s \in S: t \preceq s \implies s \in T$
+by [[Dual Pairs (Order Theory)]].
+
+By definition, this means $T$ is an [[Definition:Upper Set|upper set]] in $S$.
+The converse follows from [[Dual of Dual Statement (Order Theory)]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Order Topology equals Dual Order Topology}
+Tags: Order Topology
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $\tau$ be the $\preceq$-[[Definition:Order Topology|order topology]] on $S$.
+Let $\tau'$ be the $\succeq$-order topology on $S$, where $\succeq$ is the [[Definition:Dual Ordering|dual ordering]] of $\preceq$.
+Then $\tau' = \tau$.
+\end{theorem}
+
+\begin{proof}
+{{improve|recast in terms of dual statements}}
+Let $U$ be an [[Definition:Open Ray|open ray]] in $\left({S, \preceq}\right)$.
+By [[Open Ray is Dual to Open Ray]], $U$ is an [[Definition:Open Ray|open ray]] in $\left({S, \preceq}\right)$.
+Since the [[Definition:Open Ray|open rays]] in a [[Definition:Totally Ordered Set|totally ordered set]] form a [[Definition:Sub-Basis|sub-basis for the topology]] on that set, $\tau'$ is [[Definition:Finer Topology|finer]] than $\tau$.
+{{explain|Invoke some other duality principle?}}
+By the same argument, $\tau$ is [[Definition:Finer Topology|finer]] than $\tau'$.
+Thus by definition of [[Definition:Set Equality/Definition 2|set equality]]:
+: $\tau' = \tau$
+{{qed}}
+[[Category:Order Topology]]
+4o99v8qf2um7xjidwbxm35zjftnjqcc
+\end{proof}<|endoftext|>
+\section{Open Ray is Dual to Open Ray}
+Tags: Total Orderings
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $R$ be an [[Definition:Open Ray|open ray]] in $\struct {S, \preceq}$.
+Then $R$ is an [[Definition:Open Ray|open ray]] in $\struct {S, \succeq}$, where $\succeq$ is the [[Definition:Dual Ordering|dual ordering]] of $\preceq$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Open Ray|open ray]], there is some $p \in S$ such that:
+:$R$ is the [[Definition:Strict Upper Closure|strict upper]] or [[Definition:Strict Lower Closure|strict lower closure]] of $p$ with respect to $\preceq$.
+By [[Strict Lower Closure is Dual to Strict Upper Closure]], the [[Definition:Dual Statement (Order Theory)|dual statement]] is:
+:$R$ is the [[Definition:Strict Upper Closure|strict upper]] or [[Definition:Strict Lower Closure|strict lower closure]] of $p$ with respect to $\succeq$.
+Thus $R$ is an [[Definition:Open Ray|open ray]] in $\struct {S, \succeq}$.
+{{qed}}
+[[Category:Total Orderings]]
+gvmyntzwkf86e0y22eoizxb1pgju6ic
+\end{proof}<|endoftext|>
+\section{Topologies on Set form Complete Lattice}
+Tags: Topology, Complete Lattices
+
+\begin{theorem}
+Let $X$ be a [[Definition:Non-Empty Set|non-empty set]].
+Let $\mathcal L$ be the [[Definition:Set|set]] of [[Definition:Topology|topologies]] on $X$.
+Then $\left({\mathcal L, \subseteq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal K \subseteq \mathcal L$.
+Then by [[Intersection of Topologies is Topology]]:
+:$\bigcap \mathcal K \in \mathcal L$
+By [[Intersection is Largest Subset]], $\bigcap \mathcal L$ is the [[Definition:Infimum of Set|infimum]] of $\mathcal K$.
+{{explain}}
+Let $\tau$ be the topology generated by the sub-basis $\bigcup \mathcal K$.
+Then $\tau \in \mathcal L$ and $\tau$ is the [[Definition:Supremum of Set|supremum]] of $\mathcal K$.
+We have that each subset of $\mathcal L$ has a supremum and an infimum in $\mathcal L$.
+Thus it follows that $\left({\mathcal L, \subseteq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+{{qed}}
+[[Category:Topology]]
+[[Category:Complete Lattices]]
+6s1khlyr62d80t7yamqkq7fgmssgpd7
+\end{proof}<|endoftext|>
+\section{Complement of Lower Set is Upper Set}
+Tags: Upper Sets, Lower Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $L$ be a [[Definition:Lower Set|lower set]].
+Then $S \setminus L$ is an [[Definition:Upper Set|upper set]].
+\end{theorem}
+
+\begin{proof}
+Let $u \in S \setminus L$.
+Let $s \in S$ such that $u \preceq s$.
+Suppose for the sake of contradiction that $s \notin S \setminus L$.
+Then $s \in L$.
+By the definition of [[Definition:Lower Set|lower set]], $u \in L$, a contradiction.
+Hence $s \in S \setminus L$.
+Since this holds for all such $u$ and $s$, $S \setminus L$ is an [[Definition:Upper Set|upper set]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{GO-Space Embeds as Closed Subspace of Linearly Ordered Space}
+Tags: Generalized Ordered Spaces, Linearly Ordered Spaces
+
+\begin{theorem}
+Let $(X, \preceq_X, \tau_X)$ be a [[Definition:Generalized Ordered Space|generalized ordered space]].
+Then there is a [[Definition:Linearly Ordered Space|linearly ordered space]] $(Y, \preceq_Y, \tau_Y)$ and a mapping $\phi: X \to Y$ such that $\phi$ is a topological embedding and an order embedding, and $\phi(X)$ is closed in $Y$.
+\end{theorem}
+
+\begin{proof}
+By [[GO-Space Embeds Densely into Linearly Ordered Space]], there is a linearly ordered space $(W, \preceq_W, \tau_W)$ and a mapping $\psi:X \to W$ which is an order embedding and a topological embedding.
+Assume without loss of generality that $X$ is a subspace of $W$.
+Let $Y = \left\{{ (x, 0): x \in X }\right\} \cup (W \setminus X) \times \Z$.
+Let $\preceq_Y$ be the restriction to $Y$ of the lexicographic ordering on $W \times \Z$.
+Let $\tau_Y$ be the $\preceq_Y$-order topology on $Y$.
+Let $\phi:X \to Y$ be given by $\phi(x) = (x,0)$.
+$\phi$ is clearly an order embedding.
+Next, we show that it is a topological embedding:
+If $$
+Finally, we show that $\phi(X)$ is closed in $Y$:
+{{finish}}
+\end{proof}<|endoftext|>
+\section{Union of Total Ordering with Lower Sets is Total Ordering}
+Tags: Lower Sets, Total Orderings
+
+\begin{theorem}
+Let $\left({Y, \preceq}\right)$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+Let $X$ be the [[Definition:Disjoint Union (Set Theory)|disjoint union]] of $Y$ with the set of [[Definition:Lower Set|lower sets]] of $Y$.
+Define a relation $\preceq'$ on $X$ extending $\preceq$ by letting:
+:$y_1 \preceq' y_2 \iff y_1 \preceq y_2$
+:$y \preceq' L \iff y \in L$
+:$L_1 \preceq' L_2 \iff L_1 \subseteq L_2$
+:$L \preceq' y \iff y \in Y \setminus L$
+Then $\preceq'$ is a total ordering.
+\end{theorem}
+
+\begin{proof}
+First note that by [[Lower Sets in Totally Ordered Set form Nest]]:
+:$\subseteq$ is a [[Definition:Total Ordering|total ordering]] on the set of lower sets.
+Also note that by [[Complement of Lower Set is Upper Set]], the complement of each $\preceq$-lower set is a $\preceq$-upper set.
+=== Reflexivity ===
+This follows immediately from the fact that $\preceq$ and $\subseteq$ are reflexive.
+Thus $\preceq'$ is [[Definition:Reflexive Relation|reflexive]].
+{{qed|lemma}}
+=== Transitivity ===
+There are eight possibilities to consider.
+If $y_1 \preceq' y_2$ and $y_2 \preceq' y_3$, then $y_1 \preceq' y_3$ because $\preceq$ is transitive.
+If $L_1 \preceq' L_2$ and $L_2 \preceq' L_3$, then $L_1 \preceq' L_3$ because $\subseteq$ is transitive.
+If $y_1 \preceq' y_2$ and $y_2 \preceq' L$, then:
+:$y_1 \preceq y_2$ and $y_2 \in L$
+Since $L$ is a [[Definition:Lower Set|lower set]] in $Y$:
+:$y_1 \in L$
+so:
+:$y_1 \preceq' L$
+If $L \preceq' y_1$ and $y_1 \preceq' y_2$, then:
+:$y_1 \in Y \setminus L$ and $y_1 \preceq y_2$
+Since $Y \setminus L$ is an [[Definition:Upper Set|upper set]] in $Y$:
+:$y_2 \in Y \setminus L$
+so:
+:$L \preceq' y_2$
+If $y \preceq' L_1$ and $L_1 \preceq' L_2$, then:
+:$y \in L_1$ and $L_1 \subseteq L_2$
+By the definition of subset:
+:$y \in L_2$
+so:
+:$y \preceq' L_2$
+If $L_1 \preceq' L_2$ and $L_2 \preceq' y$, then:
+:$y \in Y \setminus L_2$ and $L_2 \supseteq L_1$
+so:
+:$y \in Y \setminus L_1$
+so:
+:$L_1 \preceq' y$
+If $y_1 \preceq' L$ and $L \preceq' y_2$ then
+:$y_1 \in L$ and $y_2 \in Y \setminus L$.
+Since $L$ is a lower set:
+: $y_2 \not\preceq y_1$
+Since $\preceq$ is a total ordering:
+:$y_1 \preceq y_2$
+so:
+:$y_1 \preceq' y_2$
+If $L_1 \preceq' y$ and $y \preceq' L_2$, then:
+:$y \in L_2$ but $y \notin L_1$
+Thus:
+:$L_2 \not\subseteq L_1$
+Since $\subseteq$ is a total ordering on the lower sets:
+:$L_1 \subseteq L_2$
+so:
+:$L_1 \preceq' L_2$
+Thus $\preceq'$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+=== Antisymmetry ===
+There are three cases.
+If $y_1 \preceq' y_2$ and $y_2 \preceq' y_1$ then:
+:$y_1 \preceq y_2$ and $y_2 \preceq y_1$
+Since $\preceq$ is antisymmetric:
+:$y_1 = y_2$
+If $L_1 \preceq' L_2$ and $L_2 \preceq' L_1$ then:
+:$L_1 \subseteq L_2$ and $L_2 \subseteq L_1$
+Since $\subseteq$ is antisymmetric:
+:$L_1 = L_2$
+By the definition of $\preceq'$, it is impossible for $y \preceq' L$ and $L \preceq' y$, so the third case cannot occur.
+Thus$\preceq'$ is [[Definition:Antisymmetric Relation|antisymmetric]]
+{{qed|lemma}}
+Since $\preceq'$ is [[Definition:Reflexive Relation|reflexive]], [[Definition:Transitive Relation|transitive]], and [[Definition:Antisymmetric Relation|antisymmetric]], it is an [[Definition:Ordering|ordering]].
+$\preceq'$ is a [[Definition:Total Ordering|total ordering]] of $X$ because:
+:$\preceq$ is a total ordering
+:the set of lower sets is a nest
+and:
+:for any $y$ and $L$ either $y \in L$ or $y \in Y\setminus L$.
+{{explain|Specify exactly why it follows from the above that $\preceq'$ is a [[Definition:Total Ordering|total ordering]]}}
+{{qed}}
+[[Category:Lower Sets]]
+[[Category:Total Orderings]]
+8p7dffqm112z0q5kjcc20o60fxg6c58
+\end{proof}<|endoftext|>
+\section{Lower Closure is Lower Set}
+Tags: Lower Sets, Lower Closures
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T$ be a [[Definition:Subset|subset]] of $S$.
+Let $L$ be the [[Definition:Lower Closure of Subset|lower closure]] of $T$.
+Then $L$ is a [[Definition:Lower Set|lower set]].
+\end{theorem}
+
+\begin{proof}
+Let $a \in L$.
+Let $b \in S$ with $b \preceq a$.
+By the definition of [[Definition:Lower Closure of Subset|lower closure]], there is a $t \in T$ such that $a \preceq t$.
+By [[Definition:Transitive Relation|transitivity]], $b \preceq t$.
+Thus, again by the definition of [[Definition:Lower Closure of Subset|lower closure]], $b \in L$.
+Since this holds for all such $a$ and $b$, $L$ is a [[Definition:Lower Set|lower set]].
+{{qed}}
+\end{proof}
+
+\begin{proof}
+Let $l \in p^\prec$.
+Let $s \in S$ with $s \preceq l$.
+Then by the definition of [[Definition:Strict Lower Closure of Element|strict lower closure]]:
+:$l \prec p$
+Thus by [[Extended Transitivity]]:
+:$s \prec p$
+So by the definition of [[Definition:Strict Lower Closure of Element|strict lower closure]]:
+:$s \in p^\prec$
+Since this holds for all such $l$ and $s$, $p^\prec$ is a [[Definition:Lower Set|lower set]].
+{{qed}}
+\end{proof}
+
+\begin{proof}
+By [[Dual Pairs (Order Theory)]]:
+:[[Definition:Strict Upper Closure of Element|strict upper closure]] is dual to [[Definition:Strict Lower Closure of Element|strict lower closure]]
+:[[Definition:Upper Set|Upper set]] is dual to [[Definition:Lower Set|lower set]]
+Thus the theorem holds by [[Strict Upper Closure is Upper Set]] and the [[Duality Principle (Order Theory)/Global Duality|duality principle]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ordered Set is Upper Set in Itself}
+Tags: Upper Sets
+
+\begin{theorem}
+Let $(S, \preceq)$ be an [[Definition:Ordered Set|ordered set]].
+Then $S$ is an [[Definition:Upper Set|upper set]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Follows immediately from the definition of [[Definition:Upper Set|upper set]].
+{{qed}}
+[[Category:Upper Sets]]
+3b2tr3jpcfkuapt8nr2dx0ad887se19
+\end{proof}<|endoftext|>
+\section{Ordered Set is Lower Set in Itself}
+Tags: Lower Sets
+
+\begin{theorem}
+Let $(S, \preceq)$ be an [[Definition:Ordered Set|ordered set]].
+Then $S$ is a [[Definition:Lower Set|lower set]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Follows immediately from the definition of [[Definition:Lower Set|lower set]].
+{{qed}}
+[[Category:Lower Sets]]
+57868spvvmikxz5nls23916ev9xoxg8
+\end{proof}<|endoftext|>
+\section{Ordered Set is Convex in Itself}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Then $S$ is a [[Definition:Convex Set (Order Theory)|convex set]] in $S$.
+\end{theorem}
+
+\begin{proof}
+Follows immediately from the definition of [[Definition:Convex Set (Order Theory)|convex set]].
+{{qed}}
+[[Category:Order Theory]]
+6j6iuiv0lnnhfp51h069w7inoa9hvwq
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Generalized Ordered Space/Definition 3 implies Definition 1}
+Tags: Equivalence of Definitions of Generalized Ordered Space
+
+\begin{theorem}
+Let $\left({S, \preceq, \tau}\right)$ be a [[Definition:Generalized Ordered Space/Definition 3|generalized ordered space by Definition 3]]:
+{{:Definition:Generalized Ordered Space/Definition 3}}
+Then $\left({S, \preceq, \tau}\right)$ is a [[Definition:Generalized Ordered Space/Definition 1|generalized ordered space by Definition 1]]:
+{{:Definition:Generalized Ordered Space/Definition 1}}
+\end{theorem}
+
+\begin{proof}
+
+Let $\mathcal S$ be a [[Definition:Sub-Basis|sub-basis]] for $\tau$ consisting of [[Definition:Upper Set|upper sets]] and [[Definition:Lower Set|lower sets]].
+Let $\mathcal B$ be the [[Definition:Set|set]] of [[Definition:Set Intersection|intersections]] of [[Definition:Finite Set|finite]] [[Definition:Subset|subsets]] of $\mathcal S$.
+By [[Upper Set is Convex]], [[Lower Set is Convex]] and [[Intersection of Convex Sets is Convex Set (Order Theory)]] :
+:the [[Definition:Element|elements]] of $\mathcal B$ are [[Definition:Convex Set (Order Theory)|convex]].
+{{explain|Link to theorem on why this is a basis.}}
+But $\mathcal B$ is a [[Definition:Basis (Topology)|basis]] for $\tau$.
+Therefore $\tau$ has a [[Definition:Basis (Topology)|basis]] consisting of [[Definition:Convex Set (Order Theory)|convex sets]].
+{{qed}}
+
+[[Category:Equivalence of Definitions of Generalized Ordered Space]]
+et9g81yhxxur5f9ub0xyqbvfl2lbdbl
+\end{proof}<|endoftext|>
+\section{Strict Upper Closure is Upper Set}
+Tags: Upper Sets, Upper Closures
+
+\begin{theorem}
+Let $(S, \preceq)$ be an [[Definition:Ordered Set|ordered set]].
+Let $p \in S$.
+Then $p^\succ$, the [[Definition:Strict Upper Closure|strict upper closure]] of $p$, is an [[Definition:Upper Set|upper set]].
+\end{theorem}
+
+\begin{proof}
+Let $u \in p^\succ$.
+Let $s \in S$ with $u \preceq s$.
+Then by the definition of [[Definition:Strict Upper Closure|strict upper closure]]:
+: $p \prec u$
+Thus by [[Extended Transitivity]]:
+: $p \prec s$
+So by the definition of [[Definition:Strict Upper Closure|strict upper closure]]:
+: $s \in p^\succ$
+Since this holds for all such $u$ and $s$, $p^\succ$ is an [[Definition:Upper Set|upper set]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Strict Lower Closure is Lower Set}
+Tags: Lower Sets, Lower Closures, Strict Lower Closure is Lower Set
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $p \in S$.
+Then $p^\prec$, the [[Definition:Strict Lower Closure of Element|strict lower closure]] of $p$, is a [[Definition:Lower Set|lower set]].
+\end{theorem}
+
+\begin{proof}
+Let $l \in p^\prec$.
+Let $s \in S$ with $s \preceq l$.
+Then by the definition of [[Definition:Strict Lower Closure of Element|strict lower closure]]:
+:$l \prec p$
+Thus by [[Extended Transitivity]]:
+:$s \prec p$
+So by the definition of [[Definition:Strict Lower Closure of Element|strict lower closure]]:
+:$s \in p^\prec$
+Since this holds for all such $l$ and $s$, $p^\prec$ is a [[Definition:Lower Set|lower set]].
+{{qed}}
+\end{proof}
+
+\begin{proof}
+By [[Dual Pairs (Order Theory)]]:
+:[[Definition:Strict Upper Closure of Element|strict upper closure]] is dual to [[Definition:Strict Lower Closure of Element|strict lower closure]]
+:[[Definition:Upper Set|Upper set]] is dual to [[Definition:Lower Set|lower set]]
+Thus the theorem holds by [[Strict Upper Closure is Upper Set]] and the [[Duality Principle (Order Theory)/Global Duality|duality principle]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Topology Discrete iff All Singletons Open}
+Tags: Topology
+
+\begin{theorem}
+Let $(X, \tau)$ be a [[Definition:Topological Space|topological space]].
+Then $\tau$ is the [[Definition:Discrete Topology|discrete topology]] on $X$ [[Definition:iff|iff]]:
+: For all $x \in X$: $\{ x \} \in \tau$
+That is, iff every [[Definition:singleton|singleton]] of $X$ is [[Definition:Open Set (Topology)|$\tau$-open]].
+\end{theorem}
+
+\begin{proof}
+=== Forward Implication ===
+Follows directly from [[Set in Discrete Topology is Clopen]].
+{{qed|lemma}}
+=== Reverse Implication ===
+{{MissingLinks}}
+Suppose that:
+: For all $x \in X$: $\{ x \} \in \tau$
+Let $S \subseteq X$.
+Then $S = \bigcup \left\{{ \{ s \}: s \in S }\right\}$.
+Then since each $\{ s \}$ is open, and a union of open sets is open, $S$ is open.
+Since this holds for all $S \subseteq X$, $\tau$ is the discrete topology.
+{{qed}}
+[[Category:Topology]]
+8ze217dzd9ex1x6k6yqayam6ogag6xk
+\end{proof}<|endoftext|>
+\section{Characteristic Function of Universe}
+Tags: Characteristic Functions
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\chi_S: S \to \left\{ {0, 1}\right\}$ be its [[Definition:Characteristic Function of Set|characteristic function]] (in itself).
+Then:
+:$\chi_S = f_1$
+where $f_1: S \to \left\{ {0, 1}\right\}$ is the [[Definition:Constant Mapping|constant mapping]] with value $1$.
+\end{theorem}
+
+\begin{proof}
+From [[Characteristic Function Determined by 1-Fiber]], $\chi_S$ is the [[Definition:Mapping|mapping]] determined by:
+:$\forall s \in S: \chi_S \left({s}\right) = 1 \iff s \in S$
+Thus:
+:$\forall s \in S: \chi_S \left({s}\right) = 1$
+By definition of [[Definition:Constant Mapping|constant mapping]]:
+:$\chi_S = f_1$
+{{qed}}
+[[Category:Characteristic Functions]]
+gf9zrtuostpsz441vhfkigouidtz6u6
+\end{proof}<|endoftext|>
+\section{Faltings' Theorem}
+Tags: Algebraic Geometry, Named Theorems: Mordell
+
+\begin{theorem}
+Let $C$ be a [[Definition:Curve (Algebraic Geometry)|curve]] over $\Q$ of [[Definition:Genus|genus]] $g > 1$.
+Then $C$ has only finitely many [[Definition:Rational Point of Curve|rational points]].
+\end{theorem}
+
+\begin{proof}
+{{ProofWanted}}
+{{Namedfor|Gerd Faltings|cat = Faltings}}
+\end{proof}<|endoftext|>
+\section{Supremum of Lower Closure of Set}
+Tags: Lower Closures
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T \subseteq S$.
+Let $L = T^\preceq$ be the [[Definition:Lower Closure of Subset|lower closure]] of $T$ in $S$.
+Let $s \in S$
+Then $s$ is the [[Definition:Supremum of Set|supremum]] of $T$ {{iff}} it is the [[Definition:Supremum of Set|supremum]] of $L$.
+\end{theorem}
+
+\begin{proof}
+By [[Supremum and Infimum are Unique]] we need only show that $s$ is a [[Definition:Supremum of Set|supremum]] of $L$ {{iff}} it is a [[Definition:Supremum of Set|supremum]] of $T$.
+=== Forward Implication ===
+Let $s$ be a [[Definition:Supremum of Set|supremum]] of $T$.
+$s$ is an [[Definition:Upper Bound of Set|upper bound]] of $L$:
+Let $l \in L$.
+Then by the definition of [[Definition:Lower Closure of Subset|lower closure]], there is a $t \in T$ such that $l \preceq t$.
+By the definition of [[Definition:Supremum of Set|supremum]], $s$ is an [[Definition:Upper Bound of Set|upper bound]] of $T$.
+Thus $t \preceq s$.
+Since $\preceq$ is [[Definition:Transitive Relation|transitive]], $l \preceq s$.
+Since this holds for all $l \in L$, $s$ is an [[Definition:Upper Bound of Set|upper bound]] of $L$.
+Suppose that $u$ is an [[Definition:Upper Bound of Set|upper bound]] of $L$.
+Then since $T \subseteq L$, $u$ is an [[Definition:Upper Bound of Set|upper bound]] of $T$.
+Thus by the definition of [[Definition:Supremum of Set|supremum]], $s \preceq u$.
+So:
+: $s$ is an [[Definition:Upper Bound of Set|upper bound]] of $L$
+: $s$ precedes all [[Definition:Upper Bound of Set|upper bounds]] of $L$
+Thus it follows that $s$ is the [[Definition:Supremum of Set|supremum]] of $L$.
+{{qed|lemma}}
+=== Reverse Implication ===
+Let $s$ be a [[Definition:Supremum of Set|supremum]] of $L$.
+Then $s$ is an [[Definition:Upper Bound of Set|upper bound]] of $L$.
+Since $T \subseteq L$, $s$ is an [[Definition:Upper Bound of Set|upper bound]] of $T$.
+Let $u$ be any [[Definition:Upper Bound of Set|upper bound]] of $T$ and let $l \in L$.
+Then by the definition of [[Definition:Lower Closure of Subset|lower closure]], there is a $t \in T$ such that $l \preceq t$.
+Then since $u$ is an [[Definition:Upper Bound of Set|upper bound]] of $T$, $t \preceq u$.
+Thus since $\preceq$ is [[Definition:Transitive Relation|transitive]], $l \preceq u$.
+Thus $u$ is an [[Definition:Upper Bound of Set|upper bound]] of $L$.
+By the definition of [[Definition:Supremum of Set|supremum]], $s \preceq u$.
+Thus $s$ is an [[Definition:Upper Bound of Set|upper bound]] of $T$ which precedes every [[Definition:Upper Bound of Set|upper bound]] of $T$.
+Therefore $s$ is the [[Definition:Supremum of Set|supremum]] of $T$.
+{{qed}}
+[[Category:Lower Closures]]
+ksm1xyro52wfs8c899gn9ltzff1h9jw
+\end{proof}<|endoftext|>
+\section{Upper Closure is Smallest Containing Upper Set}
+Tags: Upper Closures, Upper Sets
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $T \subseteq S$.
+Let $U = T^\succeq$ be the [[Definition:Upper Closure of Subset|upper closure]] of $T$.
+Then $U$ is the smallest [[Definition:Upper Set|upper set]] containing $T$ as a [[Definition:Subset|subset]].
+\end{theorem}
+
+\begin{proof}
+Follows from [[Upper Closure is Closure Operator]] and [[Set Closure is Smallest Closed Set/Closure Operator]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Upper Closure is Closure Operator}
+Tags: Upper Closures, Closure Operators
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $T^\succeq$ be the [[Definition:Upper Closure of Subset|upper closure]] of $T$ for each $T \subseteq S$.
+Then $\cdot^\succeq$ is a [[Definition:Closure Operator|closure operator]].
+\end{theorem}
+
+\begin{proof}
+=== Inflationary ===
+Let $T \subseteq S$.
+Let $t \in T$.
+Then since $T \subseteq S$, $t \in S$ by the definition of [[Definition:subset|subset]].
+Since $\preceq$ is [[Definition:Reflexive Relation|reflexive]], $t \preceq t$.
+Thus by the definition of [[Definition:Upper Closure of Subset|upper closure]], $t \in T^\succeq$.
+Since this holds for all $t \in T$, $T \subseteq T^\succeq$.
+Since this holds for all $T \subseteq S$:
+: $\cdot^\succeq$ is [[Definition:Inflationary Mapping|inflationary]].
+{{qed|lemma}}
+=== Order-Preserving===
+Let $T \subseteq U \subseteq S$.
+Let $x \in T^\succeq$.
+Then by the definition of [[Definition:Upper Closure of Subset|upper closure]]: for some $t \in T$, $t \preceq x$.
+By the definition of [[Definition:Subset|subset]]:
+: $t \in U$
+Thus by the definition of [[Definition:Upper Closure of Subset|upper closure]]:
+: $x \in U^\succeq$
+Since this holds for all $x \in T^\succeq$:
+: $T^\succeq \subseteq U^\succeq$
+Since this holds for all $T$ and $U$:
+: $\cdot^\succeq$ is [[Definition:Increasing Mapping|order-preserving]].
+{{qed|lemma}}
+=== Idempotent ===
+Let $T \subseteq S$.
+By [[Upper Closure is Upper Set]], $T^\succeq$ is an [[Definition:Upper Set|upper set]].
+Thus by [[Equivalence of Definitions of Upper Set]]:
+: $\left({T^\succeq}\right)^\succeq = T^\succeq$
+Since this holds for all $T$:
+: $\cdot^\succeq$ is [[Definition:Idempotent Mapping|idempotent]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Upper Set}
+Tags: Upper Sets
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $U \subseteq S$.
+{{TFAE|def = Upper Set}}
+\end{theorem}
+
+\begin{proof}
+=== Definition 1 implies Definition 2 ===
+Suppose that:
+:$\forall u \in U: \forall s \in S: u \preceq s \implies s \in U$
+Let $k \in U^\succeq$.
+Then by the definition of [[Definition:Upper Closure of Subset|upper closure]], there is some $u \in U$ such that $u \preceq k$.
+Since $k \in U^\succeq \subseteq S$, the premise proves that $k \in U$.
+Since this holds for all $k \in U^\succeq$, it follows that:
+: $U^\succeq \subseteq U$
+{{qed|lemma}}
+=== Definition 2 implies Definition 3 ===
+Suppose that $U^\succeq \subseteq U$.
+Let $u \in U$.
+Then since $U \subseteq S$, $u \in S$ by the definition of [[Definition:subset|subset]].
+Since $\preceq$ is [[Definition:Reflexive Relation|reflexive]]:
+: $u \preceq u$
+Thus by the definition of [[Definition:Upper Closure of Subset|upper closure]]:
+: $u \in U^\succeq$.
+Since this holds for all $u \in U$:
+: $U \subseteq U^\succeq$
+Thus by definition of [[Definition:Set Equality/Definition 2|set equality]]:
+: $U^\succeq = U$
+{{qed|lemma}}
+=== Definition 3 implies Definition 1 ===
+Suppose that $U^\succeq = U$.
+Let $u \in U$.
+Let $s \in S$.
+Let $u \preceq s$.
+Then by the definition of [[Definition:Upper Closure of Subset|upper closure]], $s \in U$.
+Thus we have shown that:
+:$\forall u \in U: \forall s \in S: u \preceq s \implies s \in U$
+{{qed}}
+[[Category:Upper Sets]]
+pfk2cix6ah4phtbuf7v3g63yku3xwvj
+\end{proof}<|endoftext|>
+\section{Topological Closure is Closure Operator}
+Tags: Set Closures, Examples of Closure Operators
+
+\begin{theorem}
+The [[Definition:Closure (Topology)|topological closure]] operator is a [[Definition:Closure Operator|closure operator]].
+\end{theorem}
+
+\begin{proof}
+=== Extensive ===
+Follows from [[Set is Subset of its Topological Closure]].
+=== Increasing ===
+Follows immediately from [[Topological Closure of Subset is Subset of Topological Closure]].
+=== Idempotent ===
+Follows immediately from [[Closure of Topological Closure equals Closure]].
+{{MissingLinks|the three concepts in the headers (link in the ''text'')}}
+{{qed}}
+[[Category:Set Closures]]
+[[Category:Examples of Closure Operators]]
+7j6hyd3oqs9bvg7unfcg2u4munbralz
+\end{proof}<|endoftext|>
+\section{Reflexive Closure is Closure Operator}
+Tags: Reflexive Closures, Closure Operators, Reflexive Closure is Closure Operator
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $R$ be the set of all [[Definition:Endorelation|endorelations]] on $S$.
+Then the [[Definition:Reflexive Closure|reflexive closure]] operator on $R$ is a [[Definition:Closure Operator|closure operator]].
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal Q$ be the [[Definition:Set of Sets|set]] of [[Definition:Reflexive Relation|reflexive relations]] on $S$.
+By [[Intersection of Reflexive Relations is Reflexive]], the [[Definition:Set Intersection|intersection]] of any [[Definition:subset|subset]] of $\mathcal Q$ is in $Q$.
+By the definition of [[Definition:Reflexive Closure/Intersection of Reflexive Supersets|reflexive closure]] as the intersection of reflexive supersets:
+:The reflexive closure of a [[Definition:Endorelation|relation]] $\mathcal R$ on $S$ is the [[Definition:Set Intersection|intersection]] of elements of $\mathcal Q$ that contain $S$.
+From [[Closure Operator from Closed Sets]] we conclude that reflexive closure is a [[Definition:Closure Operator|closure operator]].
+{{qed}}
+\end{proof}
+
+\begin{proof}
+=== [[Reflexive Closure is Inflationary]] ===
+{{:Reflexive Closure is Inflationary}}{{qed|lemma}}
+=== [[Reflexive Closure is Order Preserving]] ===
+{{:Reflexive Closure is Order Preserving}}{{qed|lemma}}
+=== [[Reflexive Closure is Idempotent]] ===
+{{:Reflexive Closure is Idempotent}}{{qed|lemma}}
+Thus by the definition of [[Definition:Closure Operator|closure operator]], [[Definition:Reflexive Closure|reflexive closure]] is a closure operator.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Set Closure is Smallest Closed Set/Closure Operator}
+Tags: Closure Operators, Set Closure is Smallest Closed Set
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\cl: \powerset S \to \powerset S$ be a [[Definition:Closure Operator|closure operator]].
+Let $T \subseteq S$.
+Then $\map \cl T$ is the smallest [[Definition:Closed Set under Closure Operator|closed set]] (with respect to $\cl$) containing $T$ as a [[Definition:Subset|subset]].
+\end{theorem}
+
+\begin{proof}
+By definition, $\map \cl T$ is [[Definition:Closed Set under Closure Operator|closed]].
+Let $C$ be closed.
+Let $T \subseteq C$.
+By the definition of [[Definition:Closure Operator|closure operator]], $\cl$ is $\subseteq$-[[Definition:Increasing Mapping|increasing]].
+So:
+:$\map \cl T \subseteq \map \cl C$
+Since $C$ is [[Definition:Closed Set under Closure Operator|closed]], $\map \cl C = C$.
+So:
+:$\map \cl T \subseteq C$
+Thus $\map \cl T$ is the smallest [[Definition:Closed Set under Closure Operator|closed set]] containing $T$ as a [[Definition:Subset|subset]].
+{{qed}}
+[[Category:Closure Operators]]
+[[Category:Set Closure is Smallest Closed Set]]
+rgb5hevh3a910j1evzkrqqrzjppg3ey
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Lower Set}
+Tags: Lower Sets
+
+\begin{theorem}
+{{TFAE|def = Lower Set}}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $U \subseteq S$.
+Then the following are equivalent:
+{{begin-axiom}}
+{{axiom | n = 1
+ | m = \forall u \in U: \forall s \in S: s \preceq u \implies s \in U
+}}
+{{axiom | n = 2
+ | m = U^\preceq \subseteq U
+}}
+{{axiom | n = 3
+ | m = U^\preceq = U
+}}
+{{end-axiom}}
+where $U^\preceq$ is the [[Definition:Lower Closure of Subset|lower closure]] of $U$.
+\end{theorem}
+
+\begin{proof}
+By the [[Duality Principle (Order Theory)/Global Duality|Duality Principle]], it suffices to prove that:
+:$(1^*)$, $(2^*)$ and $(3^*)$ are [[Definition:Logically Equivalent|equivalent]]
+where these are the [[Definition:Dual Statement (Order Theory)|dual statements]] of $(1)$, $(2)$ and $(3)$, respectively.
+By [[Dual Pairs (Order Theory)|Dual Pairs]], it can be seen that these dual statements are as follows:
+{{begin-axiom}}
+{{axiom | n = 1^*
+ | m = \forall u \in U: \forall s \in S: u \preceq s \implies s \in U
+}}
+{{axiom | n = 2^*
+ | m = U^\succeq \subseteq U
+}}
+{{axiom | n = 3^*
+ | m = U^\succeq = U
+}}
+{{end-axiom}}
+Their [[Definition:Logically Equivalent|equivalence]] is proved on [[Equivalence of Definitions of Upper Set]].
+{{qed}}
+[[Category:Lower Sets]]
+f08gllkp30urt8cig57kk7zesq4jvog
+\end{proof}<|endoftext|>
+\section{Zero and One are the only Consecutive Perfect Squares/Proof 1}
+Tags: Zero and One are the only Consecutive Perfect Squares
+
+\begin{theorem}
+{{:Zero and One are the only Consecutive Perfect Squares}}
+
+Let $x$ and $h$ be [[Definition:Integer|integers]] such that $x^2 + 1 = \paren {x - h}^2$
+{{begin-eqn}}
+{{eqn | l = x^2 + 1
+ | r = \left({x - h}\right)^2
+}}
+{{eqn | l = 1
+ | r = -2 x h + h^2
+}}
+{{eqn | l = 2 x h
+ | r = h^2 - 1
+}}
+{{eqn | l = 2 x h
+ | r = \paren {h - 1} \paren {h + 1}
+}}
+{{end-eqn}}
+We have that [[Consecutive Integers are Coprime]].
+However, both sides must have the same unique prime factorization by the [[Fundamental Theorem of Arithmetic]]
+Therefore $h$ cannot have any prime factors since they cannot be shared by $\paren {h - 1} \paren {h + 1}$.
+This leaves $h = -1$, $h = 0$, or $h = 1$ as the only possibilities since they are the only integers with no prime factors.
+If $h = -1$ then $h + 1 = 0$, so $2 x h = 0$.
+It follows that $x = 0$.
+If $h = 1$ then $h - 1 = 0$, so $2 x h = 0$.
+It follows that $x = 0$.
+If $h = 0$, then $2 x \cdot 0 = \paren {-1} \paren 1$, which is a [[Definition:Contradiction|contradiction]].
+Therefore the only pairs of consecutive perfect squares are:
+:$0^2 = 0$ and $\paren {0 + \paren {-1} }^2 = \paren {-1}^2 = 1$
+and:
+:$0^2 = 0$ and $\paren {0 + 1}^2 = 1^2 = 1$
+{{qed}}
+
+[[Category:Zero and One are the only Consecutive Perfect Squares]]
+htz0vf4du4yfpb36uyojnh9tye85uv2
+\end{theorem}<|endoftext|>
+\section{Zero and One are the only Consecutive Perfect Squares/Proof 2}
+Tags: Zero and One are the only Consecutive Perfect Squares
+
+\begin{theorem}
+{{:Zero and One are the only Consecutive Perfect Squares}}
+
+Suppose that $k, l \in \Z$ are such that their squares are consecutive, i.e.:
+:$l^2 - k^2 = 1$
+Then we can factor the left-hand side as:
+:$l^2 - k^2 = \left({l + k}\right) \left({l - k}\right)$
+By [[Invertible Integers under Multiplication]], it follows that:
+:$l + k = \pm 1 = l - k$
+Therefore, it must be that:
+:$\left({l + k}\right) - \left({l - k}\right) = 0$
+That is, $2 k = 0$, from which we conclude $k = 0$.
+So if $n$ and $n + 1$ are squares, then necessarily $n = 0$.
+The result follows.
+{{qed}}
+
+[[Category:Zero and One are the only Consecutive Perfect Squares]]
+k8g8dzfo5fx677zrb5d7aaqbvi1ouhp
+\end{theorem}<|endoftext|>
+\section{Convergent Series of Natural Numbers}
+Tags: Natural Numbers, Series
+
+\begin{theorem}
+Let $\left({a_n}\right)_{n \in \N}$ be a [[Definition:Sequence|sequence]] of [[Definition:Natural Number|natural numbers]].
+Then the following are [[Definition:Logical Equivalence|equivalent]]:
+$(1): \quad \displaystyle \sum_{n \mathop = 1}^\infty a_n$ [[Definition:Convergent Series|converges]]
+$(2): \quad \exists N \in \N: \forall n \ge N: a_n = 0$
+That is, $\displaystyle \sum_{n \mathop = 1}^\infty a_n$ converges {{iff}} only [[Definition:Finite Set|finitely many]] of the $a_n$ are non-zero.
+\end{theorem}
+
+\begin{proof}
+$(1) \implies (2)$:
+Suppose that there is an [[Definition:Infinity|infinite]] [[Definition:Subsequence|subsequence]] $\left({ a_{n_k} }\right)_{k \in \N}$ such that for each $k$, $a_{n_k} \neq 0$.
+For $N \in \N$ let
+:$\displaystyle s_N = \sum_{n \mathop = 1}^N a_n$
+To show that $s_N$ [[Definition:Divergent Sequence|diverges]] it suffices to show that:
+:$\forall M > 0\ \exists N \in \N : \forall n > N : \left\vert{ s_n }\right\vert > M$
+Since for each $n$, $a_n \ge 0$, $s_N$ is a [[Definition:Positive|positive]] [[Definition:Increasing Sequence|increasing sequence]] in $N$.
+Therefore it suffices to show that:
+:$\forall M > 0\ \exists N \in \N : s_N > M$
+Fix $M > 0$.
+Let $k$ be any positive integer such that $n_k > M$.
+Then we have:
+{{begin-eqn}}
+{{eqn | l = s_{n_k}
+ | r = \sum_{n \mathop = 1}^{n_k} a_n
+}}
+{{eqn | r =\sum_{n \mathop = 1}^{n_k} 1
+ | o = \ge
+ | c = as the $a_n$ are positive and non-zero
+}}
+{{eqn | r = n_k
+ | c =
+}}
+{{eqn | r = M
+ | c = By the choice of $n_k$
+}}
+{{end-eqn}}
+Therefore the sequence $s_N$ diverges.
+$(2) \implies (1)$:
+Suppose there exists $N > 0$ such that $a_n = 0$ for all $n > N$.
+Then we have, for all $L > N$:
+:$\displaystyle s_L = \sum_{n \mathop = 1}^L a_n = \sum_{n \mathop = 1}^N a_n = s_N$
+In particular, for any $\epsilon > 0$ and all $L > N$:
+:$\left\vert{s_L - s_N}\right\vert = 0 < \epsilon$
+Therefore the sequence converges to $s_N$.
+{{Qed}}
+[[Category:Natural Numbers]]
+[[Category:Series]]
+p7ollayrl8lk9jknqx3lw4nfcmlpv5f
+\end{proof}<|endoftext|>
+\section{Closure Operator from Closed Sets}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $\CC$ be a set of [[Definition:Subset|subsets]] of $S$.
+Let $\CC$ be closed under arbitrary [[Definition:Set Intersection|intersections]]:
+:$\forall \KK \in \powerset \CC: \bigcap \KK \in \CC$
+where $\bigcap \O$ is taken to be $S$.
+Define $\cl: \powerset S \to \CC$ by letting:
+:$\map \cl T = \bigcap \set {C \in \CC: T \subseteq C}$
+Then $\cl$ is a [[Definition:Closure Operator|closure operator]] whose [[Definition:Closed Set under Closure Operator|closed sets]] are the elements of $\CC$.
+\end{theorem}
+
+\begin{proof}
+First we will show that $\cl$ is a [[Definition:Closure Operator|closure operator]].
+=== Inflationary ===
+Let $T \subseteq S$.
+By [[Set Intersection Preserves Subsets/General Result/Corollary]], $T \subseteq \map \cl T$.
+Since this holds for all such $T$, $\cl$ is [[Definition:Inflationary Mapping|inflationary]].
+{{qed|lemma}}
+=== Increasing ===
+Let $T \subseteq U \subseteq S$.
+Let $\TT$ and $\UU$ be the sets of elements of $\CC$ containing $T$ and $U$, respectively.
+Since [[Subset Relation is Transitive]], every set containing $U$ contains $T$, so $\UU \subseteq \TT$.
+By [[Intersection is Decreasing]], $\bigcap \TT \subseteq \bigcap \UU$.
+Thus $\map \cl T \subseteq \map \cl U$.
+{{qed|lemma}}
+=== Idempotent ===
+Let $T \subseteq S$.
+By the premise, the [[Definition:Set Intersection|intersection]] of a [[Definition:Subset|subset]] of $\CC$ is in $\CC$.
+Thus in particular $\map \cl T \in \CC$.
+Therefore:
+:$\map \cl {\map \cl T} \subseteq \map \cl T$
+Since $\cl$ is [[Definition:Inflationary Mapping|inflationary]]:
+:$\map \cl T \subseteq \map \cl {\map \cl T}$
+By definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\map \cl {\map \cl T} = \map \cl T$
+Since this holds for all $T \subseteq S$, $\cl$ is [[Definition:Idempotent Mapping|idempotent]].
+{{qed|lemma}}
+Finally, we need to show that the elements of $\CC$ are the [[Definition:Closed Set/Closure Operator|closed sets]] with respect to $\cl$.
+If $C \in \CC$, then since $\cl$ is [[Definition:Inflationary Mapping|inflationary]]:
+:$C \subseteq \map \cl C$
+But since $C \subseteq C$, $\map \cl C \subseteq C$.
+Thus by definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\map \cl C = C$
+so $C$ is [[Definition:Closed Set under Closure Operator|closed]] with respect to $\cl$.
+Suppose instead that $C$ is closed with respect to $\cl$.
+Then $\map \cl C = C$.
+Since $\CC$ is closed under intersections, $C \in \CC$.
+{{qed}}
+[[Category:Closure Operators]]
+8l27ak827j0rbdlqdhr4a1qkp4tt57z
+\end{proof}<|endoftext|>
+\section{Intersection is Decreasing}
+Tags: Set Intersection
+
+\begin{theorem}
+Let $U$ be a [[Definition:Set|set]].
+Let $\mathcal F$ and $\mathcal G$ be [[Definition:Set of Sets|sets]] of [[Definition:Subset|subsets]] of $U$.
+Then $\mathcal F \subseteq \mathcal G \implies \bigcap \mathcal G \subseteq \bigcap \mathcal F$, where by convention $\bigcap \varnothing = U$.
+That is, $\bigcap$ is a [[Definition:Decreasing Mapping|decreasing mapping]] from $(\mathcal P(\mathcal P(U)), \subseteq)$ to $(\mathcal P(U), \subseteq)$, where $\mathcal P(U)$ is the [[Definition:Power Set|power set]] of $U$.
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal F \subseteq \mathcal G$.
+Let $x \in \bigcap \mathcal G$.
+Then for each $S \in \mathcal F$, $S \in \mathcal G$.
+By the definition of [[Definition:Set Intersection|intersection]], $x \in S$.
+Since this holds for all $S \in \mathcal F$, $x \in \bigcap \mathcal F$.
+Since this holds for all $ x \in \bigcap \mathcal G$:
+: $\bigcap \mathcal G \subseteq \bigcap \mathcal F$
+{{qed}}
+[[Category:Set Intersection]]
+n0i62gr2ponbnrgzwsyc1rt36hg0id4
+\end{proof}<|endoftext|>
+\section{Open Ray is Open in GO-Space}
+Tags: Generalized Ordered Spaces
+
+\begin{theorem}
+Let $\left({S, \preceq, \tau}\right)$ be a [[Definition:Generalized Ordered Space|generalized ordered space]].
+Let $p \in S$.
+Then:
+: $p^\prec$ and $p^\succ$ are [[Definition:Open Set (Topology)|$\tau$-open]]
+where:
+: $p^\prec$ is the [[Definition:Strict Lower Closure of Element|strict lower closure]] of $p$
+: $p^\succ$ is the [[Definition:Strict Upper Closure of Element|strict upper closure]] of $p$.
+\end{theorem}<|endoftext|>
+\section{Open Ray is Open in GO-Space/Definition 2}
+Tags: Generalized Ordered Spaces
+
+\begin{theorem}
+Let $\struct {S, \preceq, \tau}$ be a [[Definition:Generalized Ordered Space/Definition 2|generalized ordered space by Definition 2]].
+That is:
+:Let $\struct {S, \preceq}$ be a [[Definition:Totally Ordered Set|totally ordered set]].
+:Let $\struct {S, \tau}$ be a [[Definition:Topological Space|topological space]].
+Let there be:
+:a [[Definition:Linearly Ordered Space|linearly ordered space]] $\struct {S', \preceq', \tau'}$
+and:
+:a [[Definition:Mapping|mapping]] $\phi: S \to S'$ which is both:
+::a $\preceq$-$\preceq'$ [[Definition:Order Embedding|order embedding]]
+:and:
+::a $\tau$-$\tau'$ [[Definition:Topological Embedding|topological embedding]].
+Let $p \in S$.
+Then:
+:$p^\prec$ and $p^\succ$ are [[Definition:Open Set (Topology)|$\tau$-open]]
+where:
+:$p^\prec$ is the [[Definition:Strict Lower Closure of Element|strict lower closure]] of $p$
+:$p^\succ$ is the [[Definition:Strict Upper Closure of Element|strict upper closure]] of $p$.
+\end{theorem}
+
+\begin{proof}
+We will prove that $p^\succ$ is [[Definition:Open Set (Topology)|open]].
+{{explain|follow by duality how?}}
+That $p^\prec$ is open will follow by duality.
+By [[Inverse Image under Order Embedding of Strict Upper Closure of Image of Point]]:
+:$\map {\phi^{-1} } {\map \phi p^\succ} = p^\succ$
+:$\map \phi p^\succ$ is an [[Definition:Open Ray|open ray]] in $S'$
+Therefore [[Definition:Open Set (Topology)|$\tau'$-open]] by the definition of the [[Definition:Order Topology|order topology]].
+{{explain|What exactly is [[Definition:Open Set (Topology)|$\tau'$-open]] here?}}
+Since $\phi$ is a topological embedding, it is [[Definition:Continuous Mapping (Topology)|continuous]].
+Thus $p^\succ$ is [[Definition:Open Set (Topology)|$\tau$-open]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Union is Increasing}
+Tags: Set Union
+
+\begin{theorem}
+Let $U$ be a [[Definition:Set|set]].
+Let $\mathcal F$ and $\mathcal G$ be [[Definition:Set of Sets|sets]] of [[Definition:Subset|subsets]] of $U$.
+Then $\mathcal F \subseteq \mathcal G \implies \bigcup \mathcal F \subseteq \bigcup \mathcal G$.
+That is, $\bigcup$ is an [[Definition:Increasing Mapping|increasing mapping]] from $(\mathcal P(\mathcal P(U)), \subseteq)$ to $(\mathcal P(U), \subseteq)$, where $\mathcal P(U)$ is the [[Definition:Power Set|power set]] of $U$.
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal F \subseteq \mathcal G$.
+Let $x \in \bigcup \mathcal F$.
+Then by the definition of [[Definition:Set Union|union]], for some $S \in \mathcal F$, $x \in S$.
+By the definition of [[Definition:subset|subset]], $S \in \mathcal G$.
+Thus by the definition of union, $x \in \bigcup \mathcal G$.
+Since this holds for all $x \in \bigcup \mathcal F$:
+: $\bigcup \mathcal F \subseteq \bigcup \mathcal G$
+{{qed}}
+[[Category:Set Union]]
+mebmzub0fzw8n5o285nhwbfmgikasrm
+\end{proof}<|endoftext|>
+\section{Intersection is Idempotent/Indexed Family}
+Tags: Set Intersection, Indexed Families, Idempotence
+
+\begin{theorem}
+Let $\family {F_i}_{i \mathop \in I}$ be a non-empty [[Definition:Indexed Family of Sets|indexed family of sets]].
+Suppose that all the [[Definition:Set|sets]] in the $\family {F_i}_{i \mathop \in I}$ are the same.
+That is, suppose that for some [[Definition:Set|set]] $S$:
+:$\forall i \in I: F_i = S$
+Then:
+:$\displaystyle \bigcap_{i \mathop \in I} F_i = S$
+where $\displaystyle \bigcap_{i \mathop \in I} F_i$ is the [[Definition:Intersection of Family|intersection of $\family {F_i}_{i \mathop \in I}$]].
+\end{theorem}
+
+\begin{proof}
+First we show that:
+:$\displaystyle \bigcap_{i \mathop \in I} F_i \subseteq S$
+Let $x \in \displaystyle \bigcap_{i \mathop \in I} F_i$.
+Since $I$ is [[Definition:Non-Empty Set|non-empty]], it has an [[Definition:Element|element]] $k$.
+By the definition of [[Definition:Intersection of Family|intersection]], $x \in F_k$.
+By the premise, $F_k = S$, so $x \in S$.
+Since this holds for all $x \in \displaystyle \bigcap_{i \mathop \in I} F_i$:
+:$\displaystyle \bigcap_{i \mathop \in I} F_i \subseteq S$
+Next we show that:
+:$\displaystyle S \subseteq \bigcap_{i \mathop \in I} F_i$
+Let $x \in S$.
+Then for all $i \in I$, $F_i = S$, so $x \in F_i$.
+Thus by the definition of [[Definition:Intersection of Family|intersection]]:
+:$x \in \displaystyle \bigcap_{i \mathop \in I} F_i$
+Since this holds for all $x \in S$:
+:$S \subseteq \displaystyle \bigcap_{i \mathop \in I} F_i$
+By definition of [[Definition:Set Equality|set equality]]:
+:$\displaystyle \bigcap_{i \mathop \in I} F_i = S$
+{{qed}}
+[[Category:Set Intersection]]
+[[Category:Indexed Families]]
+[[Category:Idempotence]]
+h4ptw6dbvab2024vremmg8oo427n5xt
+\end{proof}<|endoftext|>
+\section{Factor Principles/Conjunction on Right/Formulation 1/Proof 1}
+Tags: Factor Principles
+
+\begin{theorem}
+: $p \implies q \vdash \left({p \land r}\right) \implies \left ({q \land r}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \implies q \vdash \left({p \land r}\right) \implies \left ({q \land r}\right)}}
+{{Premise|1|p \implies q}}
+{{IdentityLaw|2||r \implies r|(None)|This is a theorem so depends on nothing}}
+{{Conjunction|3|1|\left({p \implies q}\right) \land \left({r \implies r}\right)|1|2}}
+{{SequentIntro|4|1|\left({p \land r}\right) \implies \left ({q \land r}\right)|3|[[Praeclarum Theorema]]}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Factor Principles/Conjunction on Left/Formulation 1/Proof 2}
+Tags: Factor Principles
+
+\begin{theorem}
+:$p \implies q \vdash \paren {r \land p} \implies \paren {r \land q}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \implies q \vdash \paren {r \land p} \implies \paren {r \land q}}}
+{{Premise|1|p \implies q}}
+{{Assumption|2|r \land p}}
+{{Simplification|3|1, 2|p|2|2}}
+{{ModusPonens|4|1, 2|q|1|3}}
+{{Simplification|5|1, 2|r|2|1}}
+{{Conjunction|6|1, 2|r \land q|5|4}}
+{{Implication|7|1|\paren {r \land p} \implies \paren {r \land q}|2|6}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Factor Principles/Conjunction on Left/Formulation 1/Proof 1}
+Tags: Factor Principles
+
+\begin{theorem}
+: $p \implies q \vdash \paren {r \land p} \implies \paren {r \land q}$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|p \implies q \vdash \paren {r \land p} \implies \paren {r \land q} }}
+{{Premise|1|p \implies q}}
+{{IdentityLaw|2||r \implies r|(None)|This is a theorem so depends on nothing}}
+{{Conjunction|3|1|\paren {r \implies r} \land \paren {p \implies q}|2|1}}
+{{SequentIntro|4|1|\paren {r \land p} \implies \paren {r \land q}|3|[[Praeclarum Theorema]]}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Constructive Dilemma for Join Semilattices}
+Tags: Lattice Theory
+
+\begin{theorem}
+Let $\struct {S, \vee, \preceq}$ be a [[Definition:Join Semilattice|join semilattice]].
+Let $a, b, c, d \in S$.
+Let $a \preceq b$.
+Let $c \preceq d$.
+Then $\paren {a \vee c} \preceq \paren {b \vee d}$.
+\end{theorem}
+
+\begin{proof}
+By [[Join Semilattice is Ordered Structure]], $\preceq$ is [[Definition:Relation Compatible with Operation|compatible]] with $\vee$.
+By the definition of [[Definition:ordering|ordering]], $\preceq$ is [[Definition:Transitive Relation|transitive]].
+Thus the theorem holds by [[Operating on Transitive Relationships Compatible with Operation]].
+{{qed}}
+[[Category:Lattice Theory]]
+gsg75kiusqg9sajkk7sl9l8iitilbgy
+\end{proof}<|endoftext|>
+\section{Praeclarum Theorema for Meet Semilattices}
+Tags: Lattice Theory
+
+\begin{theorem}
+Let $(S, \wedge, \preceq)$ be a [[Definition:Meet Semilattice|meet semilattice]].
+Let $a, b, c, d \in S$.
+Let $a \preceq b$.
+Let $c \preceq d$.
+Then $(a \wedge c) \preceq (b \wedge d)$.
+\end{theorem}
+
+\begin{proof}
+By [[Meet Semilattice is Ordered Structure]], $\preceq$ is [[Definition:Relation Compatible with Operation|compatible]] with $\wedge$.
+By the definition of [[Definition:ordering|ordering]], $\preceq$ is [[Definition:Transitive Relation|transitive]].
+Thus the theorem holds by [[Operating on Transitive Relationships Compatible with Operation]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Supremum is Increasing relative to Product Ordering}
+Tags: Order Theory, Increasing Mappings
+
+\begin{theorem}
+Let $(S, \preceq)$ be an [[Definition:Ordered Set|ordered set]].
+Let $I$ be a [[Definition:Set|set]].
+Let $f, g: I \to S$.
+Let $f \left[{I}\right]$ denote the [[Definition:Image of Subset under Mapping|image of $I$ under $f$]].
+Let:
+:$\forall i \in I: f \left({i}\right) \preceq g \left({i}\right)$
+That is, let $f \preceq g$ in the product ordering.
+Let $f \left[{I}\right]$ and $g \left[{I}\right]$ admit [[Definition:Supremum of Set|suprema]].
+Then:
+: $\sup f \left[{I}\right] \preceq \sup g \left[{I}\right]$
+\end{theorem}
+
+\begin{proof}
+Let $x \in f \left[{I}\right]$.
+Then:
+:$\exists j \in I: f \left({j}\right) = x$
+Then:
+:$f \left({j}\right) \prec g \left({j}\right)$
+By the definition of [[Definition:Supremum of Set|supremum]]:
+:$\sup g \left[{I}\right]$ is an [[Definition:Upper Bound of Set|upper bound]] of $g \left[{I}\right]$
+Thus:
+:$g \left({j}\right) \preceq \sup g \left[{I}\right]$
+Since $\preceq$ is [[Definition:Transitive Relation|transitive]]:
+:$x = f \left({j}\right) \preceq \sup g \left[{I}\right]$
+Since this holds for all $x \in f \left[{I}\right]$, $\sup g \left[{I}\right]$ is an [[Definition:Upper Bound of Set|upper bound]] of $f \left[{I}\right]$.
+Thus by the definition of [[Definition:Supremum of Set|supremum]]:
+:$\sup f \left[{I}\right] \preceq \sup g \left[{I}\right]$
+{{qed}}
+[[Category:Order Theory]]
+[[Category:Increasing Mappings]]
+t4ucd6b4ofbzzdyz2idqo0fxee3o15s
+\end{proof}<|endoftext|>
+\section{Reflexive Closure of Strict Ordering is Ordering}
+Tags: Order Theory, Reflexive Closures
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $\prec$ be a [[Definition:Strict Ordering|strict ordering]] on $S$.
+Let $\preceq$ be the [[Definition:Reflexive Closure|reflexive closure]] of $\prec$.
+Then $\preceq$ is an [[Definition:ordering|ordering]].
+\end{theorem}
+
+\begin{proof}
+Since $\prec$ is a [[Definition:Strict Ordering|strict ordering]], it is by definition [[Definition:Transitive Relation|transitive]] and [[Definition:Asymmetric Relation|asymmetric]].
+By [[Asymmetric Relation is Antisymmetric]], $\prec$ is [[Definition:Antisymmetric Relation|antisymmetric]].
+Thus by [[Reflexive Closure of Transitive Antisymmetric Relation is Ordering]], $\preceq$ is an [[Definition:Ordering|ordering]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Reflexive Closure is Reflexive}
+Tags: Reflexive Closures
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Binary Relation|relation]] on a [[Definition:Set|set]] $S$.
+Then $\mathcal R^=$, the [[Definition:Reflexive Closure|reflexive closure]] of $\mathcal R$, is [[Definition:Reflexive Relation|reflexive]].
+\end{theorem}
+
+\begin{proof}
+Recall the definition of [[Definition:Reflexive Closure/Union with Diagonal|reflexive closure]]:
+:$\mathcal R^= := \mathcal R \cup \Delta_S$
+From [[Set is Subset of Union]]:
+:$\Delta_S \subseteq \mathcal R^=$
+The result follows directly from [[Relation Contains Diagonal Relation iff Reflexive]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Reflexive Closure}
+Tags: Reflexive Closures
+
+\begin{theorem}
+{{TFAE|def = Reflexive Closure}}
+Let $\RR$ be a [[Definition:Binary Relation|relation]] on a [[Definition:Set|set]] $S$.
+\end{theorem}
+
+\begin{proof}
+Let $\RR$ be a [[Definition:Endorelation|relation]] on a [[Definition:set|set]] $S$.
+=== [[Definition:Reflexive Closure/Union with Diagonal|Union with Diagonal]] is [[Definition:Reflexive Closure/Smallest Reflexive Superset|Smallest Reflexive Superset]] ===
+Let $\Delta_S$ be the [[Definition:Diagonal Relation|diagonal relation]] on $S$.
+Let $\RR^= = \RR \cup \Delta_S$
+By [[Smallest Element is Unique]], at most one [[Definition:Endorelation|relation]] on $S$ can be the [[Definition:Smallest Set by Set Inclusion|smallest]] [[Definition:Reflexive Relation|reflexive]] [[Definition:Superset|superset]] of $\RR$.
+From [[Subset of Union]]:
+:$\RR \subseteq \RR^=$
+:$\Delta_S \subseteq \RR^=$
+By [[Relation Contains Diagonal Relation iff Reflexive]], $\RR^=$ is [[Definition:Reflexive Relation|reflexive]].
+Thus $\RR^=$ is a [[Definition:Reflexive Relation|reflexive relation]] containing $\RR$.
+Again by [[Relation Contains Diagonal Relation iff Reflexive]], ''every'' reflexive relation containing $\RR$ must also contain $\Delta_S$.
+From [[Union is Smallest Superset]], it follows that $\RR^=$ is the [[Definition:Smallest Set by Set Inclusion|smallest]] [[Definition:Reflexive Relation|reflexive relation]] on $S$ which [[Definition:Subset|contains]] $\RR$.
+{{qed|lemma}}
+=== [[Definition:Reflexive Closure/Intersection of Reflexive Supersets|Intersection of Reflexive Supersets]] is [[Definition:Reflexive Closure/Union with Diagonal|Union with Diagonal]] ===
+Let $\QQ$ be the [[Definition:set|set]] of all [[Definition:Reflexive Relation|reflexive relations]] containing $\RR$ as a [[Definition:Subset|subset]].
+Let $\RR^= = \bigcap \QQ$.
+By the above proof that $\RR \cup \Delta_S$ is a reflexive relation containing $\RR$:
+:$\RR \cup \Delta_S \in \QQ$
+By [[Intersection is Subset]]:
+:$\RR^= \subseteq \RR \cup \Delta_S$
+By the above proof that $\RR \cup \Delta_S$ is the smallest reflexive relation containing $\RR$:
+:$\forall \PP \in \QQ: \RR \cup \Delta_S \subseteq \PP$
+By [[Intersection is Largest Subset/Set of Sets|Intersection is Largest Subset]]:
+:$\RR \cup \Delta_S \subseteq \RR^=$
+Thus by definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\RR^= = \RR \cup \Delta_S$
+{{qed}}
+[[Category:Reflexive Closures]]
+fu65mekifuk8slb72c8z8wsj4zcft5m
+\end{proof}<|endoftext|>
+\section{Reflexive Closure is Inflationary}
+Tags: Reflexive Closures
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $R$ denote the set of all [[Definition:Endorelation|endorelations]] on $S$.
+Then the [[Definition:Reflexive Closure|reflexive closure]] operator is an [[Definition:Inflationary Mapping|inflationary mapping]] on $R$.
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal R \in R$.
+The [[Definition:Reflexive Closure/Union with Diagonal|reflexive closure]] $\mathcal R^=$ of $\mathcal R$ is defined as:
+:$\mathcal R^= := \mathcal R \cup \Delta_S$
+From [[Set is Subset of Union]]:
+:$\mathcal R \subseteq \mathcal R^=$
+Hence the [[Definition:Reflexive Closure|reflexive closure]] operator is an [[Definition:Inflationary Mapping|inflationary mapping]].
+\end{proof}<|endoftext|>
+\section{Reflexive Closure is Order Preserving}
+Tags: Reflexive Closures
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $R$ denote the set of all [[Definition:Endorelation|endorelations]] on $S$.
+Then the [[Definition:Reflexive Closure|reflexive closure]] operator is an [[Definition:Increasing Mapping|order preserving mapping]] on $R$.
+That is:
+:$\forall \RR, \SS \in R: \RR \subseteq \SS \implies \mathcal R^= \subseteq \SS^=$
+where $\RR^=$ and $\SS^=$ denote the reflexive closure of $\RR$ and $\SS$ respectively.
+\end{theorem}
+
+\begin{proof}
+Let $\RR, \SS \in R$.
+Suppose:
+:$\RR \subseteq \SS$
+Their respective [[Definition:Reflexive Closure/Union with Diagonal|reflexive closures]] $\RR^=$ and $\SS^=$ are defined as:
+:$\RR^= := \RR \cup \Delta_S$
+:$\SS^= := \SS \cup \Delta_S$
+Hence by [[Set Union Preserves Subsets/Corollary|Corollary to Set Union Preserves Subsets]]:
+:$\RR^= \subseteq \SS^=$
+\end{proof}<|endoftext|>
+\section{Reflexive Closure is Idempotent}
+Tags: Reflexive Closures
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $R$ denote the set of all [[Definition:Endorelation|endorelations]] on $S$.
+Then the [[Definition:Reflexive Closure|reflexive closure]] operator is an [[Definition:Idempotent Mapping|idempotent mapping]] on $R$.
+That is:
+:$\forall \RR \in R: \RR^= = \paren {\RR^=}^=$
+where $\RR^=$ denotes the reflexive closure of $\RR$.
+\end{theorem}
+
+\begin{proof}
+Let $\RR \in R$.
+By the definition of [[Definition:Reflexive Closure/Union with Diagonal|reflexive closure]]:
+:$\RR^= = \RR \cup \Delta_S$
+:$\paren {\RR^=}^= = \paren {\RR \cup \Delta_S} \cup \Delta_S$
+By [[Union is Associative]]:
+:$\paren {\RR^=}^= = \RR \cup \paren {\Delta_S \cup \Delta_S}$
+By [[Union is Idempotent]]:
+:$\paren {\RR^=}^= = \RR \cup \Delta_S$
+Hence:
+:$\forall \RR \in R: \RR^= = \paren {\RR^=}^=$
+\end{proof}<|endoftext|>
+\section{Transitive Closure Always Exists (Relation Theory)}
+Tags: Transitive Closures
+
+\begin{theorem}
+Let $\RR$ be a [[Definition:Relation|relation]] on a [[Definition:Set|set]] $S$.
+Then the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] $\RR^+$ of $\RR$ always exists.
+\end{theorem}
+
+\begin{proof}
+First, note that there exists at least one [[Definition:Transitive Relation|transitive relation]] containing $\mathcal R$.
+That is, the [[Definition:Trivial Relation|trivial relation]] $S \times S$, which is [[Trivial Relation is Equivalence|an equivalence]] and therefore [[Definition:Transitive Relation|transitive]] by [[Definition:Equivalence Relation|definition]].
+Next, note that the [[Intersection of Transitive Relations is Transitive]].
+Hence the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] of $\mathcal R$ is the [[Definition:Set Intersection|intersection]] of all [[Definition:Transitive Relation|transitive relations]] containing $\mathcal R$.
+\end{proof}
+
+\begin{proof}
+Note that the [[Definition:Trivial Relation|trivial relation]] $\mathcal T = S \times S$ on $S$ contains $\mathcal R$, by definition.
+Further, $\mathcal T$ is [[Definition:Transitive Relation|transitive]] by [[Trivial Relation is Equivalence]].
+Thus there is at least one [[Definition:Transitive Relation|transitive relation]] on $S$ that contains $\mathcal R$.
+Now define $\mathcal R^\cap$ as the [[Definition:Set Intersection|intersection]] of all [[Definition:Transitive Relation|transitive relations]] on $S$ that contain $\mathcal R$:
+:$\displaystyle \mathcal R^\cap := \bigcap \left\{{\mathcal R': \text{$\mathcal R'$ is transitive and $\mathcal R \subseteq \mathcal R'$}}\right\}$
+By [[Intersection of Transitive Relations is Transitive]], $\mathcal R^\cap$ is also a [[Definition:Transitive Relation|transitive relation]] on $S$.
+By [[Set Intersection Preserves Subsets]], it also holds that $\mathcal R \subseteq \mathcal R^\cap$.
+Lastly, by [[Intersection is Subset]], for any [[Definition:Transitive Relation|transitive relation]] $\mathcal R'$ containing $\mathcal R$, it must be that $\mathcal R^\cap \subseteq \mathcal R'$.
+Thus $\mathcal R^\cap$ is indeed the [[Definition:Minimal Element|minimal]] [[Definition:Transitive Relation|transitive relation]] on $S$ containing $\mathcal R$.
+That is, $\mathcal R^+ = \mathcal R^\cap$, and thence the [[Definition:Transitive Closure of Relation|transitive closure]] of $\mathcal R$ exists.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Transitive Closure Always Exists (Relation Theory)/Proof 1}
+Tags: Transitive Closures
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Relation|relation]] on a [[Definition:Set|set]] $S$.
+Then the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] $\mathcal R^+$ of $\mathcal R$ always exists.
+\end{theorem}
+
+\begin{proof}
+First, note that there exists at least one [[Definition:Transitive Relation|transitive relation]] containing $\mathcal R$.
+That is, the [[Definition:Trivial Relation|trivial relation]] $S \times S$, which is [[Trivial Relation is Equivalence|an equivalence]] and therefore [[Definition:Transitive Relation|transitive]] by [[Definition:Equivalence Relation|definition]].
+Next, note that the [[Intersection of Transitive Relations is Transitive]].
+Hence the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] of $\mathcal R$ is the [[Definition:Set Intersection|intersection]] of all [[Definition:Transitive Relation|transitive relations]] containing $\mathcal R$.
+\end{proof}<|endoftext|>
+\section{Transitive Closure Always Exists (Relation Theory)/Proof 2}
+Tags: Transitive Closures
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Relation|relation]] on a [[Definition:Set|set]] $S$.
+Then the [[Definition:Transitive Closure of Relation|transitive closure]] $\mathcal R^+$ of $\mathcal R$ always exists.
+\end{theorem}
+
+\begin{proof}
+Note that the [[Definition:Trivial Relation|trivial relation]] $\mathcal T = S \times S$ on $S$ contains $\mathcal R$, by definition.
+Further, $\mathcal T$ is [[Definition:Transitive Relation|transitive]] by [[Trivial Relation is Equivalence]].
+Thus there is at least one [[Definition:Transitive Relation|transitive relation]] on $S$ that contains $\mathcal R$.
+Now define $\mathcal R^\cap$ as the [[Definition:Set Intersection|intersection]] of all [[Definition:Transitive Relation|transitive relations]] on $S$ that contain $\mathcal R$:
+:$\displaystyle \mathcal R^\cap := \bigcap \left\{{\mathcal R': \text{$\mathcal R'$ is transitive and $\mathcal R \subseteq \mathcal R'$}}\right\}$
+By [[Intersection of Transitive Relations is Transitive]], $\mathcal R^\cap$ is also a [[Definition:Transitive Relation|transitive relation]] on $S$.
+By [[Set Intersection Preserves Subsets]], it also holds that $\mathcal R \subseteq \mathcal R^\cap$.
+Lastly, by [[Intersection is Subset]], for any [[Definition:Transitive Relation|transitive relation]] $\mathcal R'$ containing $\mathcal R$, it must be that $\mathcal R^\cap \subseteq \mathcal R'$.
+Thus $\mathcal R^\cap$ is indeed the [[Definition:Minimal Element|minimal]] [[Definition:Transitive Relation|transitive relation]] on $S$ containing $\mathcal R$.
+That is, $\mathcal R^+ = \mathcal R^\cap$, and thence the [[Definition:Transitive Closure of Relation|transitive closure]] of $\mathcal R$ exists.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Transitive Closure (Relation Theory)/Union of Compositions is Smallest}
+Tags: Equivalence of Definitions of Transitive Closure (Relation Theory)
+
+\begin{theorem}
+Let $\RR$ be a [[Definition:Endorelation|relation]] on a [[Definition:Set|set]] $S$.
+Let:
+:$\RR^n := \begin{cases}
+\RR & : n = 0 \\
+\RR^{n - 1} \circ \RR & : n > 0
+\end{cases}$
+where $\circ$ denotes [[Definition:Composition of Relations|composition of relations]].
+{{explain|Really? I would have thought $\RR^1 {{=}} \RR$, not $\RR^0 {{=}} \RR$. If anything, the [[Definition:Diagonal Relation|diagonal relation]] $\Delta_S$ should be $\RR^0$.}}
+Finally, let:
+:$\displaystyle \RR^+ = \bigcup_{i \mathop \in \N} \RR^i$
+Then $\RR^+$ is the [[Definition:Smallest Set by Set Inclusion|smallest]] [[Definition:Transitive Relation|transitive relation]] on $S$ that [[Definition:Subset|contains]] $\RR$.
+\end{theorem}
+
+\begin{proof}
+==== $\RR^+$ is Transitive ====
+By [[Relation contains Composite with Self iff Transitive]], we can prove that $\RR^+$ is [[Definition:Transitive Relation|transitive]] by proving the following:
+:$\RR^+ \circ \RR^+ \subseteq \RR^+$
+Let $\tuple {a, c} \in \RR^+ \circ \RR^+$.
+Then:
+:$\exists b \in S: \tuple {a, b} \in \RR^+, \tuple {b, c} \in \RR^+$
+Thus:
+:$\exists n \in \N: \tuple {a, b} \in \RR^n$
+:$\exists m \in \N: \tuple {b, c} \in \RR^m$
+From [[Composition of Relations is Associative]]:
+:$\RR^{n + m} = \RR^n \circ \RR^m$
+so:
+:$\tuple {a, c} \in \RR^{n + m} \subseteq \RR^+$
+Since this holds for all $\tuple {a, c} \in \RR^+ \circ \RR^+$:
+:$\RR^+ \circ \RR^+ \subseteq \RR^+$
+Thus $\RR^+$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+==== $\RR^+$ contains $\RR$ ====
+$\RR \subseteq \RR^+$ by [[Set is Subset of Union/Family of Sets|Set is Subset of Union]].
+==== $\RR^+$ is Smallest ====
+Let $\RR'$ be a [[Definition:Transitive Relation|transitive relation]] on $S$ such that $\RR \subseteq \RR'$.
+We must show that $\RR^+ \subseteq \RR'$.
+Let $\tuple {a, b} \in \RR^+$.
+That is:
+:$a \mathrel \RR b$
+Then:
+:$\exists n \in \N: \tuple {a, b} \in \RR^n$
+Thus by the definition of [[Definition:Composition of Relations|composition of relations]], there exists $x_{n-1} \in S$ such that:
+:$a \mathrel {\RR^{n - 1} } x_{n - 1} \land x_{n - 1} \mathrel \RR b$
+Likewise there exists $x_{n-2} \in S$ such that:
+:$a \mathrel {\RR^{n - 2} } x_{n - 2} \land x_{n - 2} \mathrel \RR x_{n - 1}$
+
+And so forth there exist elements $x_0, \dots, x_n \in S$ such that:
+:$x_0 = a$
+:$x_n = b$
+:$\forall k \in \N_n: x_k \mathrel \RR x_{k + 1}$
+Since $\RR \subseteq \RR'$:
+:$\forall k \in \N_n: x_k \mathrel {\RR'} x_{k + 1}$
+Since $\RR'$ is [[Definition:Transitive Relation|transitive]]:
+:$a \mathrel {\RR'} b$
+That is:
+:$\tuple {a, b} \in \RR'$
+Since this holds for all $\tuple {a, b} \in \RR^+$:
+:$\RR^+ \subseteq \RR'$
+Since this holds for all transitive relations $\RR'$ that contain $\RR$:
+$\RR^+$ is the [[Definition:Smallest Set by Set Inclusion|smallest]] transitive relation containing $\RR$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Transitive Closure (Relation Theory)/Finite Chain Equivalent to Union of Compositions}
+Tags: Equivalence of Definitions of Transitive Closure (Relation Theory)
+
+\begin{theorem}
+The [[Definition:Transitive Closure (Relation Theory)/Finite Chain|finite chain]] and [[Definition:Transitive Closure (Relation Theory)/Union of Compositions|union of compositions]] definitions of '''transitive closure''' are equivalent.
+\end{theorem}
+
+\begin{proof}
+{{explain|more detail required}}
+Follows from the definition of [[Definition:Composition of Relations|composition of relations]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Chasles' Relation}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\mathcal E$ be an [[Definition:Affine Space|affine space]].
+Let $p, q, r \in \mathcal E$ be points.
+Then:
+:$\vec{p q} = \vec{p r} + \vec{r q}$
+\end{theorem}
+
+\begin{proof}
+We have:
+{{begin-eqn}}
+{{eqn|l = \vec{p r} + \vec{r q}
+ |r = \left({r - p}\right) + \left({q - r}\right)
+ |c = Definition [[Definition:Vector (Affine Geometry)|Vector in Affine Space]]
+}}
+{{eqn|r = \left({r + \left({q - r}\right)}\right) - p
+ |c = Definition of [[Definition:Affine Space|Affine Space: axiom $(A3)$]]
+}}
+{{eqn|r = q - p
+ |c = Definition of [[Definition:Affine Space|Affine Space: axiom $(A1)$]]
+}}
+{{eqn|r = \vec{p q}
+ |c = Definition [[Definition:Vector (Affine Geometry)|Vector in Affine Space]]
+}}
+{{end-eqn}}
+{{Qed}}
+{{namedfor|Michel Chasles|cat=Chasles}}
+[[Category:Affine Geometry]]
+8ncowekvl3l3sifurb0613kogovvmkk
+\end{proof}<|endoftext|>
+\section{Affine Coordinates are Well-Defined}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\mathcal E$ be an [[Definition:Affine Space|affine space]] with [[Definition:Difference Space|difference space]] $V$ over a [[Definition:Field (Abstract Algebra)|field]] $k$.
+Let $\mathcal R = \left({p_0, e_1, \ldots, e_n}\right)$ be an [[Definition:Affine Frame|affine frame]] in $\mathcal E$.
+Define a mapping $\Theta_{\mathcal R} : k^n \to \mathcal E$ by:
+:$\displaystyle \Theta_\mathcal R \left({\lambda_1, \ldots, \lambda_n}\right) = p_0 + \sum_{i \mathop = 1}^n \lambda_i e_i$
+Then $\Theta_\mathcal R$ is a [[Definition:Bijection|bijection]].
+\end{theorem}
+
+\begin{proof}
+=== Proof of Surjection ===
+Let $p \in \mathcal E$.
+Let $v = p - p_0 \in V$.
+Let $\left({\lambda_1, \ldots, \lambda_n}\right)$ be [[Definition:Coordinate|coordinates]] of $v$ in the [[Definition:Ordered Basis|basis]] $\left({e_1, \ldots, e_n}\right)$.
+Then:
+{{begin-eqn}}
+{{eqn|l = p_0 + \sum_{i \mathop = 1}^n \lambda_ie_i
+ |r = p_0 + v
+}}
+{{eqn|r = p_0 + \left({p - p_0}\right)
+}}
+{{eqn|r = p
+}}
+{{end-eqn}}
+thus demonstrating that $\Theta_\mathcal R$ is a [[Definition:Surjection|surjection]].
+=== Proof of Injection ===
+Let:
+:$\Theta_\mathcal R \left({\lambda_1, \ldots, \lambda_n}\right) = \Theta_\mathcal R \left({\mu_1, \ldots, \mu_n}\right)$
+That is:
+:$\displaystyle p_0 + \sum_{i \mathop = 1}^n \lambda_i e_i = p_0 + \sum_{i \mathop = 1}^n \mu_i e_i$
+Then by $(3)$ of [[Properties of Affine Spaces]]:
+:$\displaystyle \sum_{i \mathop = 1}^n \lambda_i e_i = \sum_{i \mathop = 1}^n \mu_i e_i$
+By [[Expression of Vector as Linear Combination from Basis is Unique]]:
+: $\lambda_i = \mu_i$
+for $i = 1, \ldots, n$.
+Hence the result.
+{{Qed}}
+[[Category:Affine Geometry]]
+o23me65r2z1l0vvbrkmcidnuk3vnftm
+\end{proof}<|endoftext|>
+\section{Barycenter Exists and is Well Defined}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\mathcal E$ be an [[Definition:Affine Space|affine space]] over a [[Definition:Field (Abstract Algebra)|field]] $k$.
+Let $p_1, \ldots, p_n \in \mathcal E$ be points.
+Let $\lambda_1, \ldots, \lambda_n \in k$ such that $\displaystyle \sum_{i \mathop = 1}^n \lambda_i = 1$.
+Then the [[Definition:Barycentre|barycentre]] of $p_1, \ldots, p_n$ with weights $\lambda_1, \ldots, \lambda_n$ exists and is unique.
+\end{theorem}
+
+\begin{proof}
+Let $r$ be any point in $\mathcal E$.
+Set:
+:$\displaystyle q = r + \sum_{i \mathop = 1}^n \lambda_i \vec{r p_i}$
+We are required to prove that for any other point $m \in \mathcal E$:
+:$\displaystyle q = m + \sum_{i \mathop = 1}^n \lambda_i \vec{m p_i}$
+So:
+{{begin-eqn}}
+{{eqn | l = m + \sum_{i \mathop = 1}^n \lambda_i \vec{m p_i}
+ | r = m + \sum_{i \mathop = 1}^n \lambda_i \left({\vec{m r} + \vec{r p_i} }\right)
+ | c = [[Chasles' Relation]]
+}}
+{{eqn | r = m + \left(\sum_{i \mathop = 1}^n \lambda_i\right) \vec{m r} + \sum_{i \mathop = 1}^n \lambda_i \vec{r p_i}
+}}
+{{eqn | r = m + \vec{m r} + \sum_{i \mathop = 1}^n \lambda_i \vec{r p_i}
+ | c = by the assumption $\displaystyle \sum_{i \mathop = 1}^n \lambda_i = 1$
+}}
+{{eqn | r = r + \sum_{i \mathop = 1}^n \lambda_i \vec{r p_i}
+ | c = Axiom $(1)$ for an [[Definition:Affine Space|affine space]]
+}}
+{{eqn | r = q
+ | c = Definition of $q$
+}}
+{{end-eqn}}
+Hence the result.
+{{Qed}}
+[[Category:Affine Geometry]]
+0pqxk9fc9gk6u8augbzooca1p170whd
+\end{proof}<|endoftext|>
+\section{Transitive Chaining}
+Tags: Transitive Relations
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Transitive Relation|transitive relation]] on a [[Definition:Set|set]] $S$.
+Let $n \in \N$ be a [[Definition:Natural Number|natural number]].
+Let $n \ge 2$.
+Let $\langle x_k \rangle_{k \in \left\{ {1, 2, \dots, n}\right\} }$ be a [[Definition:Sequence of n Terms|sequence of $n$ terms]].
+For each $k \in \left\{ {1, 2, \dots, n-1}\right\}$, let $x_k \mathrel {\mathcal R} x_{k+1}$.
+That is, let $x_1 \mathrel {\mathcal R} x_2$, $x_2 \mathrel {\mathcal R} x_3, \dotsc, x_n-1 \mathrel {\mathcal R} x_n$.
+Then $x_1 \mathrel {\mathcal R} x_n$
+\end{theorem}
+
+\begin{proof}
+The proof proceeds by [[Principle of Mathematical Induction|induction]] on $n$, the number of terms in the [[Definition:Finite Sequence|sequence]].
+We first define a [[Definition:Propositional Function|propositional function]], $P$, as follows:
+For each $n \in \N$ such that $n \ge 2$, let $P(n)$ be the [[Definition:Proposition|proposition]] that if both of the following hold:
+: $\langle x_k \rangle_{k \in \left\{ {1, 2, \dots, n}\right\} }$ is a [[Definition:Sequence of n Terms|sequence of $n$ terms]]
+: $\forall k \in \left\{ {1, 2, \dots, n-1}\right\}: x_k \mathrel {\mathcal R} x_{k+1}$
+then $x_1 \mathrel {\mathcal R} x_n$.
+=== Basis for the Induction ===
+The case $n = 2$ is verified as follows:
+In this case, the [[Definition:Finite Sequence|sequence]] has only two elements, $x_1$ and $x_2$.
+Thus by the premise, $x_1 \mathrel {\mathcal R} x_2$, so $P(2)$ holds.
+This is the [[Principle of Mathematical Induction#Basis for the Induction|basis for the induction]].
+=== Induction Hypothesis ===
+Fix $n \in \N$ with $n \ge 2$.
+Suppose that $P(n)$ holds.
+This is our [[Principle of Mathematical Induction#Induction Hypothesis|induction hypothesis]].
+=== Induction Step ===
+This is our [[Principle of Mathematical Induction#Induction Step|induction step]]:
+Let $\langle x_k \rangle_{k \in \left\{ {1, 2, \dots, n, n+1}\right\} }$ be a [[Definition:Sequence of n Terms|sequence of $n+1$ terms]].
+For each $k \in \left\{ {1, 2, \dots, n-1, n}\right\}$, let $x_k \mathrel{\mathcal R} x_{k+1}$.
+In particular, $x_n \mathrel{\mathcal R} x_{n+1}$.
+By the [[#Induction Hypothesis|induction hypothesis]]:
+:$x_1 \mathrel{\mathcal R} x_n$.
+Thus since $\mathcal R$ is [[Definition:Transitive Relation|transitive]]:
+:$x_1 \mathrel {\mathcal R} x_{n+1}$
+We conclude that $P(n+1)$ holds.
+The result follows by the [[Principle of Mathematical Induction]].
+{{qed}}
+[[Category:Transitive Relations]]
+a0r2ufbrr8fyp3k1s5ddqbgb6lgb7ar
+\end{proof}<|endoftext|>
+\section{Szpilrajn Extension Theorem}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\struct {S, \prec}$ be a [[Definition:Strictly Ordered Set|strictly ordered set]].
+{{Disambiguate|Definition:Strictly Ordered Set}}
+Then there is a [[Definition:Strict Total Ordering|strict total ordering]] on $S$ of which $\prec$ is a [[Definition:Subset|subset]].
+\end{theorem}
+
+\begin{proof}
+{{proof wanted}}
+{{Namedfor|Edward Szpilrajn|cat = Marczewski}}
+[[Category:Order Theory]]
+lji5i6b8gp30a7jge1hz1i76m5xmudk
+\end{proof}<|endoftext|>
+\section{Strict Ordering can be Expanded to Compare Additional Pair}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S, \prec}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $a$ and $b$ be distinct, [[Definition:Non-Comparable|$\prec$-incomparable]] elements of $S$.
+That is, let:
+:$a \not \prec b$ and $b \not \prec a$.
+Let ${\prec'} = {\prec} \cup \left\{ {\left({a, b}\right)} \right\}$.
+Define a [[Definition:Endorelation|relation]] $\prec'^+$ by letting $p \prec'^+ q$ {{iff}}:
+: $p \prec q$
+or:
+: $p \preceq a$ and $b \preceq q$
+where $\preceq$ is the [[Definition:Reflexive Closure|reflexive closure]] of $\prec$.
+Then:
+: $\prec'^+$ is a [[Definition:Strict Ordering|strict ordering]]
+: $\prec^+$ is the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] of $\prec'$.
+\end{theorem}
+
+\begin{proof}
+First, note that since $\prec$ is a [[Definition:Strict Ordering|strict ordering]], $\preceq$ is an [[Definition:Ordering|ordering]] by [[Reflexive Closure of Strict Ordering is Ordering]].
+=== $a$ and $b$ are $\preceq$-incomparable ===
+Suppose that $a \preceq b$.
+By the definition of [[Definition:Reflexive Closure|reflexive closure]], either $a \prec b$ or $a = b$.
+Each possibility contradicts one of the premises, so $a \not \preceq b$.
+For the same reasons, $b \not \preceq a$.
+{{qed|lemma}}
+=== $\prec'^+$ is antireflexive ===
+Let $p \in S$.
+Then by the definition of [[Definition:Strict Ordering|strict ordering]]:
+$p \not\prec p$.
+Suppose that $p \preceq a$ and $b \preceq p$.
+Then since $\preceq$ is [[Definition:Transitive Relation|transitive]], $b \preceq a$, contradicting the fact that $a$ and $b$ are [[Definition:Non-Comparable|$\preceq$-incomparable]].
+Since neither $p \prec p$ nor $\paren {p \preceq a \land \paren {b \preceq p}$ holds:
+:$p \not \prec'^+ p$.
+Since this is the case for all $p \in S$, $\prec'^+$ is [[Definition:Antireflexive Relation|antireflexive]].
+{{qed|lemma}}
+=== $\prec'^+$ is transitive ===
+Let $p \prec'^+ q$ and $q \prec'^+ r$.
+Then there are three possibilities:
+$(1): \quad p \prec q$ and $p \prec r$
+Because $\prec$ is [[Definition:Transitive Relation|transitive]]:
+:$p \prec r$
+Thus:
+:$p \prec'^+ r$
+$(2): \quad p \prec q$, $q \preceq a$, and $b \preceq r$
+By [[Extended Transitivity]], $p \prec a$.
+Since $p \prec a$ and $b \prec r$:
+:$p \prec'^+ r$
+$(3): \quad p \preceq a$, $b \preceq q$, and $q \prec r$
+By [[Extended Transitivity]]:
+:$b \prec r$
+Therefore:
+:$b \preceq r$
+Since $p \preceq a$ and $b \preceq r$:
+: $p \prec'^+ r$
+Note that it is impossible to have $p \preceq a$, $b \preceq q$, $q \preceq a$ and $b \preceq r$.
+If that were so, $b \preceq q$ and $q \preceq a$ together would imply by transitivity that $b \preceq a$.
+But this contradicts the fact that $a$ and $b$ are [[Definition:Non-Comparable|$\preceq$-incomparable]].
+Thus in all cases, $p \prec'^+ q$ and $q \prec'^+ r$ imply $p \prec'^+ r$, so $\prec'^+$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+Since $\prec'^+$ is [[Definition:Transitive Relation|transitive]] and [[Definition:Antireflexive Relation|antireflexive]], it is by definition a [[Definition:Strict Ordering|strict ordering]].
+=== $\prec'^+$ is the transitive closure of $\prec'$ ===
+First note that $\prec'$ is a [[Definition:Subset|subset]] of $\prec'^+$:
+If $p \prec' q$ then either
+:$p \prec q$ or
+:$p = a$ and $q = b$
+If $p \prec q$ then $p \prec'^+ q$ by definition.
+If $p = a$ and $q = b$, then $p \preceq a$ and $q \preceq b$ by the definition of $\preceq$.
+Thus $p \prec'^+ q$ by the definition of $\preceq'^+$.
+Let $\mathcal R$ be a [[Definition:Transitive Relation|transitive relation]] [[Definition:Subset|containing]] $\prec'$.
+Let $p, q \in S$ and let $p \prec'^+ q$.
+Then either:
+:$p \prec q$ or
+:$p \preceq a$ and $b \preceq q$
+If $p \prec q$, then by the definition of [[Definition:Subset|subset]], $p \mathrel {\mathcal R} q$.
+Suppose instead that $p \preceq a$ and $b \preceq q$. By the definition of $\preceq'$, $a \preceq' b$.
+Since [[Reflexive Closure is Closure Operator]], it is [[Definition:Increasing Mapping|increasing]], so $\preceq \subseteq \mathcal R^=$.
+Thus $p \mathrel {\mathcal R}^= a$, $a \mathrel {\mathcal R}^= b$, and $b \mathrel {\mathcal R}^= q$.
+Since $\mathcal R$ is transitive, $\mathcal R^=$ is as well, by [[Reflexive Closure of Transitive Relation is Transitive]].
+Thus $p \mathrel {\mathcal R}^= q$.
+Since $\preceq'^+$ is [[Definition:Antireflexive Relation|antireflexive]] and we assumed $p \preceq'^+ q$, we conclude that $p \ne q$.
+Thus by the definition of [[Definition:Reflexive Closure|reflexive closure]]:
+:$p \mathrel {\mathcal R} q$
+Thus we have shown that $\preceq'^+$ is a subset of $\mathcal R$.
+Since this holds when $\mathcal R$ is any transitive superset of $\preceq'$, $\preceq'^+$ is the [[Definition:Transitive Closure (Relation Theory)/Smallest Transitive Superset|transitive closure]] of $\preceq'$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Characterization of Interior of Triangle}
+Tags: Topology
+
+\begin{theorem}
+Let $\triangle$ be a [[Definition:Triangle (Geometry)|triangle]] embedded in $\R^2$.
+Denote the [[Definition:Vertex (Geometry)|vertices]] of $\triangle$ as $A_1, A_2, A_3$.
+For $i \in \set {1, 2, 3}$, put $j = i \bmod 3 + 1$, $k = \paren {i + 1} \bmod 3 + 1$, and:
+:$U_i = \set {A_i + s t \paren {A_j - A_i} + \paren {1 - s} t \paren {A_k - A_i} : s \in \openint 0 1, t \in \R_{>0} }$
+Then:
+:$\displaystyle \Int \triangle = \bigcap_{i \mathop = 1}^3 U_i$
+where $\Int \triangle$ denotes the [[Definition:Interior of Jordan Curve|interior]] of the [[Definition:Boundary (Geometry)|boundary]] of $\triangle$.
+\end{theorem}
+
+\begin{proof}
+From [[Boundary of Polygon is Jordan Curve]], it follows that the [[Definition:Boundary (Geometry)|boundary]] of $\triangle$ is equal to the [[Definition:Image of Mapping|image]] of a [[Definition:Jordan Curve|Jordan curve]], so $\Int \triangle$ is well-defined.
+=== Interior is Subset ===
+Let $q \in \Int \triangle$.
+Let $i \in \set {1, 2, 3}$, and put $j = i \bmod 3 + 1$, $k = \paren {i + 1} \bmod 3 + 1$.
+Let $S_i$ be the [[Definition:Side of Polygon|side]] of $\triangle$ that is [[Definition:Adjacent (in Triangle)|adjacent]] to $A_i$ and $A_j$, let $S_j$ be the side adjacent to $A_j$ and $A_k$, and let $S_k$ be the side adjacent to $A_k$ and $A_i$.
+Define $\mathbf v = A_j - A_i \in \R^2$, $\mathbf w = A_k - A_i \in \R^2$, and define $\mathbf u = \mathbf v - \mathbf w$.
+Define two [[Definition:Ray (Geometry)|rays]] $\LL = \set {q + s \mathbf u: s \in \R_{\ge 0} }$, and $\LL' = \set {q + s' \paren {-\mathbf u} : s' \in \R_{\ge 0} }$.
+As both rays are [[Definition:Parallel Lines|parallel]] to $S_j$, $\LL$ or $\LL'$ can only [[Definition:Crossing (Jordan Curve)|cross]] $S_j$ if $S_i$ and $S_k$ lie on opposite sides of $S_j$.
+This would imply that there is a [[Definition:Convex Angle|non-convex angle]] in $\triangle$.
+As [[Sum of Angles of Triangle Equals Two Right Angles]] shows, this is impossible, as a non-convex angle is larger than two [[Definition:Right Angle|right angles]].
+Then, neither $\LL$ nor $\LL'$ crosses $S_j$.
+As $\LL \cup \LL'$ is a [[Definition:Straight Line|straight line]], and $\LL \cap \LL' = \set q$, it follows that $\LL$ and $\LL'$ cannot both intersect the same side.
+Then [[Jordan Polygon Interior and Exterior Criterion]] shows that $\LL$ and $\LL'$ each crosses one of the sides $S_i$ and $S_k$.
+When we denote the intersection points $p_1, p_2 \in \R^2$, we have:
+:$p_1 = A_i + r_1 \mathbf v = q + r \paren {\mathbf v - \mathbf w}$
+:$p_2 = A_i + r_2 \mathbf w = q + r' \paren {\mathbf v - \mathbf w}$
+for some $r_1, r_2 \in \openint 0 1$, where either $r, -r' \in \R_{>0}$ or $-r, r' \in \R_{>0}$.
+Subtracting the two equations gives:
+:$r_1 \mathbf v - r_2 \mathbf w = \paren {r - r'} \paren {\mathbf v - \mathbf w}$
+which can be rearranged as:
+:$\paren {r_1 + r' - r} \mathbf v - \paren {r_2 + r' - r} \mathbf w$
+As $\mathbf v$ and $\mathbf w$ are [[Definition:Direction|direction]] [[Definition:Vector|vectors]] for the adjacent sides $S_i$ and $S_k$, they cannot be parallel, so $v$ and $w$ are [[Definition:Linearly Independent/Sequence/Real Vector Space|linearly independent]].
+It follows that $0 = r_1 + r' - r = r_2 + r' - r$, so $r_1 = r_2$.
+Adding the two equations gives:
+:$2 A_i + r_1 \mathbf v + r_2 \mathbf w = 2 q + \paren {r + r'} \paren {\mathbf v - \mathbf w}$
+which can be rearranged to give an expression for $q$:
+{{begin-eqn}}
+{{eqn | l = q
+ | r = A_i + \paren {\dfrac 1 2 r_1 - \dfrac 1 2 r - \dfrac 1 2 r'} \mathbf v + \paren {\dfrac 1 2 r_1 + \dfrac 1 2 r + \dfrac 1 2 r'} \mathbf w
+ | c = as $r_1 = r_2$
+}}
+{{eqn | r = A_i + \paren {\dfrac 1 2 - \dfrac{r + r'} {2 r_1} } r_1 \mathbf v + \paren {1 - \paren {\dfrac 1 2 - \dfrac {r + r'} {2 r_1} } } r_1 \mathbf w
+}}
+{{end-eqn}}
+This shows that $q \in U_i$.
+As $i \in \set {1, 2, 3}$ was arbitrary, it follows that $\displaystyle q \in \bigcap_{i \mathop = 1}^3 U_i$.
+{{qed|lemma}}
+=== Interior is Superset ===
+Let $\displaystyle q \in \bigcap_{i \mathop = 1}^3 U_i$.
+For $i \in \set {1, 2, 3}$, define $j, k \in \set {1, 2, 3}$, the [[Definition:Side of Polygon|sides]] $S_i, S_j, S_k$ of $\triangle$, and their [[Definition:Direction|direction]] [[Definition:Vector|vectors]] $\mathbf v, \mathbf u, \mathbf w$ as in the section above.
+As $q \in U_i$, it follows that $q = A_i + st \mathbf v + t \paern {1 - s} t \mathbf w$ for some $s \in \openint 0 1$ and $t \in \R_{>0}$.
+Let $\LL = \set {q + r \paren {-\mathbf v}: r \in \R_{\ge 0} }$ be a [[Definition:Ray (Geometry)|ray]] with start point $q$.
+If $\LL$ [[Definition:Crossing (Jordan Curve)|crosses]] the side $S_i$ that $\LL$ is [[Definition:Parallel Lines|parallel]] to, then the intersection point is:
+:$A_i + r_1 \mathbf v = q - r \mathbf v$
+for some $r_1 \in \closedint 0 1, r \in \R_{\ge 0}$, which can be rearranged as:
+:$q = A_i + \paren {r_1 - r} \mathbf v + 0 \mathbf w$
+As $\mathbf v$ and $\mathbf w$ are [[Definition:Linearly Independent/Sequence/Real Vector Space|linearly independent]], this implies $\paren {1 - s} t = 0$.
+Then either $t = 0$ or $s = 1$, which is impossible, so $\LL$ does not intersect $S_i$.
+However, we will show that $\LL$ crosses $S_k$, as the intersection point is:
+{{begin-eqn}}
+{{eqn |l= A_i + r_2 \mathbf w
+ |r= q - r \mathbf v
+}}
+{{eqn |r= A_i + st \mathbf v + t \paren {1 - s} t \mathbf w - r \mathbf v
+}}
+{{end-eqn}}
+for some $r_2 \in \closedint 0 1$, $r \in \R_{>0}$, which implies:
+:$\mathbf 0 = \paren {s t - r} \mathbf v + \paren {t \paren {1 - s} - r_2} \mathbf w$
+As $v$ and $w$ are linearly independent, we have $0 = st - r$ and $0 = t \paren {1 - s} - r_2$.
+Then $r = st \in R_{>0}$, and $r_2 = t \paren {1 - s} \in \R_{>0}$.
+We must show that $r_2 < 1$.
+As $q \in U_k$, we have $q = A_k + s't' \paren {-\mathbf w} + t' \paren {1 - s'} \mathbf u$ for some $s' \in \openint 0 1$, $t' \in \R_{>0}$.
+As $A_k = A_i + \mathbf w$, we have:
+:$q - r \mathbf v = A_i + \mathbf w - s't' \mathbf w + t' \\paren {1 - s'} \mathbf v - t' \paren {1 - s'} \mathbf w - r \mathbf v$
+As $q - r \mathbf v = A_i + r_2 \mathbf w$, we rearrange the equality to obtain:
+:$\mathbf 0 = \paren {t' \paren {1 - s'} - r} \mathbf v + \paren {1 - r_2 - s't' - t' \paren {1 - s'} } \mathbf w$
+As $\mathbf v$ and $\mathbf w$ are linearly independent, this gives two equations:
+:$0 = t' \paren {1 - s'} - r$
+:$0 = 1 - r_2 - s't' - t' \paren {1 - s'}$
+Adding these equations gives:
+:$s't' + r = 1 - r_2$
+As $s't' > 0$ and $r > 0$, it follows that $r_2 < 1$.
+Then, we have shown that $\LL$ crosses $S_k$.
+Let $\LL' = \set {q + r \mathbf v : r \in \R_{\ge 0} }$ be a ray with start point $q$.
+An argument similar to the one above shows that $\LL'$ crosses the side $S_j$.
+As $\LL \cup \LL'$ is a [[Definition:Straight Line|straight line]], $\LL$ and $\LL'$ cannot both intersect the same side.
+It follows that $\LL$ has one crossing of the [[Definition:Boundary (Geometry)|boundary]] of $\triangle$, so the [[Definition:Crossing (Jordan Curve)/Parity|parity]] of $q$ is $\map {\mathrm {par} } q = 1$.
+From [[Jordan Polygon Interior and Exterior Criterion]], it follows that $q \in \Int \triangle$.
+{{qed|lemma}}
+The result now follows by definition of [[Definition:Set Equality/Definition 2|set equality]].
+{{qed}}
+[[Category:Topology]]
+4iuiu6w17z89omsjuyr48ykh9htwp6p
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Reflexive Transitive Closure}
+Tags: Reflexive Closures, Transitive Closures
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Endorelation|relation]] on a [[Definition:set|set]] $S$.
+{{TFAE|def = Reflexive Transitive Closure}}
+\end{theorem}
+
+\begin{proof}
+The result follows from:
+:[[Transitive Closure of Reflexive Relation is Reflexive]]
+:[[Reflexive Closure of Transitive Relation is Transitive]]
+:[[Composition of Compatible Closure Operators]]
+{{qed}}
+[[Category:Reflexive Closures]]
+[[Category:Transitive Closures]]
+bw3oi7eog31sgwy7klra98nk1gd0sr8
+\end{proof}<|endoftext|>
+\section{Intersection of Relation with Inverse is Symmetric Relation}
+Tags: Set Intersection, Inverse Relations, Symmetric Relations
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Binary Relation|relation]] on a [[Definition:Set|set]] $S$.
+Then $\mathcal R \cap \mathcal R^{-1}$, the [[Definition:Set Intersection|intersection]] of $\mathcal R$ with its [[Definition:Inverse Relation|inverse]], is [[Definition:Symmetric Relation|symmetric]].
+\end{theorem}
+
+\begin{proof}
+Let $\left({x, y}\right) \in \mathcal R \cap \mathcal R^{-1}$
+By definition of [[Definition:Set Intersection|intersection]]:
+:$\left({x, y}\right) \in \mathcal R$
+:$\left({x, y}\right) \in \mathcal R^{-1}$
+By definition of [[Definition:Inverse Relation|inverse relation]]:
+:$\left({x, y}\right) \in \mathcal R \implies \left({y, x}\right) \in \mathcal R^{-1}$
+:$\displaystyle \left({x, y}\right) \in \mathcal R^{-1} \implies \left({y, x}\right) \in \left ({\mathcal R^{-1}} \right )^{-1}$
+By [[Inverse of Inverse Relation]] the second statement may be rewritten:
+:$\left({x, y}\right) \in \mathcal R \implies \left({y, x}\right) \in \mathcal R^{-1}$
+:$\left({x, y}\right) \in \mathcal R^{-1} \implies \left({y, x}\right) \in \mathcal R$
+Then by definition of [[Definition:Set Intersection|intersection]]:
+:$\left({y, x}\right) \in \mathcal R \cap \mathcal R^{-1}$
+Hence $\mathcal R \cap \mathcal R^{-1}$ is [[Definition:Symmetric Relation|symmetric]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Intersection of Closed Sets is Closed/Closure Operator}
+Tags: Closure Operators, Set Intersection
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $f: \powerset S \to \powerset S$ be a [[Definition:Closure Operator|closure operator]] on $S$.
+Let $\CC$ be the [[Definition:Set|set]] of all [[Definition:Subset|subsets]] of $S$ that are [[Definition:Closed Set under Closure Operator|closed]] with respect to $f$.
+Let $\AA \subseteq \CC$.
+Then $\bigcap \AA \in \CC$.
+\end{theorem}
+
+\begin{proof}
+Let $Q = \bigcap \AA$.
+By the definition of [[Definition:Closure Operator|closure operator]], $f$ is [[Definition:Inflationary Mapping|inflationary]], [[Definition:Order-Preserving Mapping|order-preserving]], and [[Definition:Idempotent Mapping|idempotent]].
+Let $A \in \AA$.
+By [[Intersection is Largest Subset/Set of Sets|Intersection is Largest Subset]], $Q \subseteq A$.
+Since $f$ is [[Definition:Order-Preserving Mapping|order-preserving]], $\map f Q \subseteq \map f A$.
+By the definition of [[Definition:Closed Set under Closure Operator|closed set]], $\map f A = A$
+Thus $\map f Q \subseteq A$.
+This holds for all $A \in \AA$.
+Thus by [[Intersection is Largest Subset/Set of Sets|Intersection is Largest Subset]]:
+:$\map f Q \subseteq \bigcap \AA$
+Since $\bigcap \AA = Q$:
+:$\map f Q \subseteq Q$
+Since $f$ is [[Definition:Inflationary Mapping|inflationary]]:
+:$Q \subseteq \map f Q$
+Thus by definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$Q = \map f Q$
+Therefore $Q$ is [[Definition:Closed Set under Closure Operator|closed]] with respect to $f$.
+{{qed}}
+[[Category:Closure Operators]]
+[[Category:Set Intersection]]
+0wx60nxfhxnwbqfzj1mo6nccgfu2jok
+\end{proof}<|endoftext|>
+\section{Closure is Closed/Power Set}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\powerset S$ be the [[Definition:Power Set|power set]] of $S$.
+Let $\cl: \powerset S \to \powerset S$ be a [[Definition:Closure Operator|closure operator]].
+Let $T \subseteq S$.
+Then $\map \cl T$ is a [[Definition:Closed Set under Closure Operator|closed set]] with respect to $\cl$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Closure Operator/Power Set|closure operator]], $\cl$ is [[Definition:Idempotent Mapping|idempotent]].
+Therefore $\map \cl {\map \cl T} = \map \cl T$, so $\map \cl t$ is [[Definition:Closed Set under Closure Operator|closed]].
+{{qed}}
+[[Category:Closure Operators]]
+eclqthg2av1s8vwraog48pwy1xzukub
+\end{proof}<|endoftext|>
+\section{Relation Intersection Inverse is Greatest Symmetric Subset of Relation}
+Tags: Relation Theory, Symmetric Relations
+
+\begin{theorem}
+Let $\RR$ be a [[Definition:Binary Relation|relation]] on a [[Definition:Set|set]] $S$.
+Let $\powerset \RR$ be the [[Definition:Power Set|power set]] of $\RR$.
+By definition, $\powerset \RR$ is the [[Definition:Set|set]] of all [[Definition:Binary Relation|relation]]s on $S$ that are [[Definition:Subset|subsets]] of $\RR$.
+Then the [[Definition:Greatest Element|greatest element]] of $\powerset \RR$ that is [[Definition:Symmetric Relation|symmetric]] is:
+:$\RR \cap \RR^{-1}$
+\end{theorem}
+
+\begin{proof}
+By [[Intersection of Relation with Inverse is Symmetric Relation]]:
+:$\RR \cap \RR^{-1}$ is a [[Definition:Symmetric Relation|symmetric relation]].
+Suppose for some $\SS \in \powerset \RR$ that $S$ is [[Definition:Symmetric Relation|symmetric]] and not equal to $\RR \cap \RR^{-1}$.
+We will show that it is a [[Definition:Proper Subset|proper subset]] of $\RR \cap \RR^{-1}$.
+Suppose $\tuple {x, y} \in \SS$.
+Then as $\SS \subseteq \RR$:
+:$\tuple {x, y} \in \RR$
+As $\SS$ is [[Definition:Symmetric Relation|symmetric]]:
+:$\tuple {x, y} \in \SS$
+So as $\SS \subseteq \RR$:
+:$\tuple {x, y} \in \RR$
+By definition of [[Definition:Inverse Relation|inverse relation]]:
+:$\tuple {x, y} \in \RR^{-1}$
+By definition of [[Definition:Set Intersection|intersection]]:
+:$\tuple {x, y} \in \RR \cap \RR^{-1}$
+Thus:
+:$\tuple {x, y} \in \SS \implies \tuple {x, y} \in \RR \cap \RR^{-1}$
+By definition of [[Definition:Subset|subset]]:
+:$\SS \subseteq \RR \cap \RR^{-1}$
+Finally, as we assumed $\SS \ne \RR \cap \RR^{-1}$:
+:$\SS \subset \RR \cap \RR^{-1}$
+Hence the result.
+{{qed}}
+[[Category:Relation Theory]]
+[[Category:Symmetric Relations]]
+6k95y20lcui80oc5hcca1henqlhj01p
+\end{proof}<|endoftext|>
+\section{Composition of Compatible Closure Operators}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $f, g: \mathcal P \left({S}\right) \to \mathcal P \left({S}\right)$ be [[Definition:Closure Operator|closure operators]] on $S$.
+Let $\mathcal C_f$ and $\mathcal C_g$ be the sets of [[Definition:Closed Set under Closure Operator|closed sets]] of $S$ with respect to $f$ and $g$ respectively.
+For each [[Definition:subset|subset]] $T$ of $S$, let the following hold:
+: $(1): \quad$ If $T$ is [[Definition:Closed Set/Closure Operator|closed]] with respect to $g$, then $f \left({T}\right)$ is closed with respect to $g$.
+:: That is, if $T \in \mathcal C_g$ then $f \left({T}\right) \in \mathcal C_g$.
+: $(2): \quad$ If $T$ is closed with respect to $f$, then $g \left({T}\right)$ is closed with respect to $f$.
+:: That is, if $T \in \mathcal C_f$ then $g \left({T}\right) \in \mathcal C_f$.
+Let $\mathcal C_h = \mathcal C_f \cap \mathcal C_g$.
+Then:
+: $\mathcal C_h$ [[Closure Operator from Closed Sets|induces a closure operator]] $h$ on $S$
+: $f \circ g = g \circ f = h$, where $\circ$ represents [[Definition:Composition of Mappings|composition of mappings]].
+\end{theorem}
+
+\begin{proof}
+First we show that $\mathcal C_h$ [[Closure Operator from Closed Sets|induces a closure operator]] on $S$.
+Let $\mathcal A \subseteq \mathcal C_h$.
+By [[Intersection is Largest Subset]]:
+: $\mathcal A \subseteq \mathcal C_f$
+and:
+: $\mathcal A \subseteq \mathcal C_g$
+Thus by [[Intersection of Closed Sets is Closed/Closure Operator]]:
+:$\bigcap \mathcal A \in \mathcal C_f$
+and
+:$\bigcap \mathcal A \in \mathcal C_g$
+Thus by the definition of [[Definition:Set Intersection|set intersection]]:
+: $\bigcap \mathcal A \in \mathcal C_h$
+Thus by [[Closure Operator from Closed Sets]], $C_h$ [[Closure Operator from Closed Sets|induces a closure operator]] $h$ on $S$.
+Now we will show that $f \circ g = h$.
+Having established that, it can be seen that $g \circ f = h$ will hold by reversing the variable names.
+Let $T \subseteq S$.
+By definition of [[Definition:Closed Set under Closure Operator|closed set]]:
+:$f \left({g \left({T}\right)}\right) \in \mathcal C_f$
+:$g \left({T}\right) \in \mathcal C_g$
+By the premise:
+: $f \left({g \left({T}\right)}\right) \in \mathcal C_g$
+Thus by definition of [[Definition:Set Intersection|set intersection]]:
+: $f \left({g \left({T}\right)}\right) \in \mathcal C_f \cap \mathcal C_g = C_h$
+So $f \left({g \left({T}\right)}\right)$ is [[Definition:Closed Set/Closure Operator|closed]] with respect to $h$.
+By [[Set Closure is Smallest Closed Set/Closure Operator]]:
+: $h \left({T}\right) \subseteq f \left({g \left({T}\right)}\right)$
+By definition of [[Definition:Closed Set under Closure Operator|closed set]]:
+: $h \left({T}\right) \in C_h$
+Thus by the definition of [[Definition:Set Intersection|set intersection]]:
+: $h \left({T}\right) \in C_f$
+and
+: $h \left({T}\right) \in C_g$
+By [[Set Closure is Smallest Closed Set/Closure Operator]]:
+: $g \left({T}\right) \subseteq h \left({T}\right)$
+By [[Definition:Closure Operator/Power Set|Closure Operator: Axiom $(2)$]] $f$ is [[Definition:Order-Preserving Mapping|order-preserving]]:
+: $f \left({g \left({T}\right)}\right) \subseteq f \left({h \left({T}\right)}\right)$
+Recall that $h \left({T}\right) \in C_f$.
+By definition of [[Definition:Closed Set under Closure Operator|closed set]]:
+: $f \left({h \left({T}\right)}\right) = h \left({T}\right)$
+Thus:
+: $f \left({g \left({T}\right)}\right) \subseteq h \left({T}\right)$
+We have that:
+:$h \left({T}\right) \subseteq f \left({g \left({T}\right)}\right)$
+So by definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$h \left({T}\right) = f \left({g \left({T}\right)}\right)$
+{{qed}}
+[[Category:Closure Operators]]
+fhkw1vv19pbm9uivq9sbv61y6amq2cu
+\end{proof}<|endoftext|>
+\section{Closure is Closed}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $\cl: S \to S$ be a [[Definition:Closure Operator (Order Theory)|closure operator]].
+Let $x \in S$.
+Then $\map \cl x$ is a [[Definition:Closed Element|closed element]] of $S$ with respect to $\cl$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Closure Operator (Order Theory)|closure operator]], $\cl$ is [[Definition:Idempotent Mapping|idempotent]].
+Therefore:
+:$\map \cl {\map \cl x} = \map \cl x$
+It follows by definition that $\map \cl x$ is a [[Definition:Closed Element|closed element]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Closure Operator from Closed Elements}
+Tags: Closure Operators, Order Theory
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $C \subseteq S$.
+Suppose that $C$ is a [[Definition:Subset|subset]] of $S$ with the property that every [[Definition:Element|element]] of $S$ has a [[Definition:Smallest Element|smallest]] [[Definition:Successor Element|successor]] in $C$.
+Let $\cl: S \to S$ be defined as follows:
+For $x \in S$:
+:$\map \cl x = \map \min {C \cap x^\succeq}$
+where $x^\succeq$ is the [[Definition:Upper Closure|upper closure]] of $x$.
+That is, let $\map \cl x$ be the [[Definition:Smallest Element|smallest]] [[Definition:Successor Element|successor]] of $x$ in $C$.
+Then:
+:$\cl$ is a [[Definition:Closure Operator (Order Theory)|closure operator]] on $S$
+:The [[Definition:Closed Element|closed elements]] of $\cl$ are precisely the elements of $C$.
+\end{theorem}
+
+\begin{proof}
+=== Inflationary ===
+$x$ is a [[Definition:Lower Bound of Set|lower bound]] of $x^\succeq$.
+Hence by [[Lower Bound for Subset]], $x$ is also a [[Definition:Lower Bound of Set|lower bound]] of $C \cap x^\succeq$.
+By the definition of [[Definition:Smallest Element|smallest element]], $x \preceq \map \cl x$.
+{{qed|lemma}}
+=== Order-Preserving ===
+Suppose that $x \preceq y$.
+Then:
+:$C \cap y^\succeq \subseteq C \cap x^\succeq$
+By [[Smallest Element of Subset]]:
+:$\map \cl x \preceq \map \cl y$
+=== Idempotent ===
+Let $x \in S$.
+For each $x \in S$:
+:$\map \cl x = \map \min {C \cap x^\succeq}$
+Thus:
+:$\map \cl x \in \paren {C \cap x^\succeq} \subseteq C$
+That is to say, $\map \cl x$ is its own [[Definition:Smallest Element|smallest]] [[Definition:Successor Element|successor]] in $C$.
+Thus:
+:$\map \cl x = \map \cl {\map \cl x}$
+{{qed|lemma}}
+When $x \in C$, $x$ is the [[Definition:Smallest Element|minimum]] of $C \cap x^\succeq$
+Hence, elements of $C$ are [[Definition:Closed Element|closed elements]] with respect to $\cl$.
+Suppose that $x$ is [[Definition:Closed Element|closed]] with respect to $\cl$.
+Then:
+:$x = \map \min {C \cap x^\succeq}$
+so in particular:
+:$x \in C$
+{{qed}}
+[[Category:Closure Operators]]
+[[Category:Order Theory]]
+hzff01klrhedb7dqje56pkzdwsi8ljk
+\end{proof}<|endoftext|>
+\section{Product of Affine Spaces is Affine Space}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\mathcal E, \mathcal F$ be [[Definition:Affine Space|affine spaces]].
+Let $\mathcal G = \mathcal E \times \mathcal F$ be the [[Definition:Product of Affine Spaces|product]] of $\mathcal E$ and $\mathcal F$.
+Then $\mathcal G$ is an affine space.
+\end{theorem}
+
+\begin{proof}
+Let $G = \vec{\mathcal G}$ be the [[Definition:Difference Space|difference space]] of $\mathcal G$.
+We are required to show that the following axioms are satisfied:
+{{begin-axiom}}
+{{axiom | n = 1
+ | q = \forall p, q \in \mathcal G
+ | m = p + \left({q - p}\right) = q
+}}
+{{axiom | n = 2
+ | q = \forall p \in \mathcal G: \forall u, v \in G
+ | m = \left({p + u}\right) + v = p + \left({u + v}\right)
+}}
+{{axiom | n = 3
+ | q = \forall p, q \in \mathcal G: \forall u \in G
+ | m = \left({p - q}\right) + u = \left({p + u}\right) - q
+}}
+{{end-axiom}}
+Proof of $(1)$:
+Let $p = \left({p', p''}\right), q = \left({q', q''}\right) \in \mathcal G$.
+We have:
+{{begin-eqn}}
+{{eqn | l = p + \left({q - p}\right)
+ | r = \left({p', p''}\right) + \left({\left({q', q''}\right) - \left({p', p''}\right)}\right)
+}}
+{{eqn | r = \left({p', p''}\right) + \left({q' - p', q'' - p''}\right)
+ | c = Definition of $-$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = \left({p' + \left({q' - p'}\right), p'' + \left({q'' - p''}\right)}\right)
+ | c = Definition of $+$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = \left({q' , q''}\right)
+ | c = Axiom $(1)$ in [[Definition:Affine Space|Affine Spaces]] $\mathcal E$, $\mathcal F$
+}}
+{{end-eqn}}
+{{qed|lemma}}
+Proof of $(2)$:
+Let $p = \left({p', p''}\right) \in \mathcal G$.
+Let $u = \left({u', u''}\right), v = \left({v', v''}\right) \in G$.
+We have:
+{{begin-eqn}}
+{{eqn | l = \left({p + u}\right) + v
+ | r = \left({\left({p', p''}\right) + \left({u', u''}\right)}\right) + \left({v', v''}\right)
+}}
+{{eqn | r = \left({p' + u', p'' + u''}\right) + \left({v', v''}\right)
+ | c = Definition of $+$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = \left({\left({p' + u'}\right) + v', \left({p'' + u''}\right) + v''}\right)
+ | c = Definition of $+$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = \left({p' + \left({u' + v'}\right), p'' + \left({u'' + v''}\right)}\right)
+ | c = Axiom $(2)$ in [[Definition:Affine Space|Affine Spaces]] $\mathcal E$, $\mathcal F$
+}}
+{{eqn | r = \left({p', p''}\right) + \left({\left({u', u''}\right) + \left({v', v''}\right)}\right)
+ | c = Definition of $+$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = p + \left({u + v}\right)
+}}
+{{end-eqn}}
+{{qed|lemma}}
+Proof of $(3)$:
+Let $p = \left({p', p''}\right), q = \left({q', q''}\right) \in \mathcal G$.
+Let $u = \left({u', u''}\right) \in G$.
+We have:
+{{begin-eqn}}
+{{eqn | l = \left({p - q}\right) + u
+ | r = \left({\left({p', p''}\right) - \left({q', q''}\right)}\right) + \left({u', u''}\right)
+}}
+{{eqn | r = \left({\left({p' - q'}\right) + u', \left({p'' - q''}\right) + u''}\right)
+ | c = Definition of $+,-$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = \left({\left({p' + u'}\right) - q', \left({p'' + u''}\right) - q''}\right)
+ | c = Axiom $(3)$ in [[Definition:Affine Space|Affine Spaces]] $\mathcal E$, $\mathcal F$
+}}
+{{eqn | r = \left({\left({p', p''}\right) + \left({u', u''}\right)}\right) - \left({q', q''}\right)
+ | c = Definition of $+,-$ in [[Definition:Product of Affine Spaces|Product Space]]
+}}
+{{eqn | r = \left({p - q}\right) + u
+}}
+{{end-eqn}}
+{{Qed}}
+[[Category:Affine Geometry]]
+flidbfjyz98ggjlutx77hmw1swmiuyy
+\end{proof}<|endoftext|>
+\section{Intersection of Complete Meet Subsemilattices invokes Closure Operator}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\struct {S, \preccurlyeq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $f_i$ be a [[Definition:Closure Operator (Order Theory)|closure operator]] on $S$ for each $i \in I$.
+Let $C_i = \map {f_i} S$ be the [[Definition:Set|set]] of [[Definition:Closed Element|closed elements]] with respect to $f_i$ for each $i \in I$.
+Suppose that for each $i \in I$, $C_i$ is a '''complete meet subsemilattice''' of $S$ in the following sense:
+:For each $D \subseteq C_i$, $D$ has an [[Definition:Infimum of Set|infimum]] in $S$ such that $\inf D \in C_i$.
+Then $C = \displaystyle \bigcap_{i \mathop \in I} C_i$ [[Closure Operator from Closed Elements|induces]] a [[Definition:Closure Operator (Order Theory)|closure operator]] on $S$.
+\end{theorem}
+
+\begin{proof}
+=== Lemma ===
+Let $\struct {S, \preccurlyeq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $C_i$ be a '''complete meet subsemilattice''' of $S$.
+Then $C = \displaystyle \bigcap_{i \mathop \in I} C_i$ is also a '''complete meet subsemilattice'''.
+=== Proof ===
+Let $D \subseteq C$.
+By [[Intersection is Largest Subset]], $D \subseteq C_i$ for each $i \in I$.
+Thus $D$ has an [[Definition:Infimum of Set|infimum]] in $S$ and $\inf D \in C_i$ for each $i \in I$.
+By the definition of [[Definition:Set Intersection|intersection]], $\inf D \in C$.
+{{qed|lemma}}
+By the [[Intersection of Complete Meet Subsemilattices invokes Closure Operator#Lemma|lemma]], $C$ is a '''complete meet semilattice'''.
+Let $x \in S$.
+Then $C \cap x^\succcurlyeq$ has an [[Definition:Infimum of Set|infimum]] in $S$ which lies in $C$, where $x^\succcurlyeq$ is the [[Definition:Upper Closure of Element|upper closure]] of $x$.
+By the definition of [[Definition:Infimum of Set|infimum]]:
+:$x \preceq \inf \struct {C \cap x^\succcurlyeq}$
+so this [[Definition:Infimum of Set|infimum]] is in fact the [[Definition:Smallest Element|smallest element]] of $C \cap x^\succcurlyeq$.
+Thus $C$ induces a [[Definition:Closure Operator|closure operator]] on $S$ by [[Closure Operator from Closed Elements]].
+{{qed}}
+[[Category:Closure Operators]]
+5pdwkakzph1uw2ae1711bkuw0g0c9gu
+\end{proof}<|endoftext|>
+\section{Vector Space with Standard Affine Structure is Affine Space}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $E$ be a [[Definition:Vector Space|vector space]].
+Let $\left({\mathcal E, E, +, -}\right)$ be the [[Definition:Standard Affine Structure on Vector Space|standard affine structure]] on $E$.
+Then with this structure, $\mathcal E$ is an [[Definition:Affine Space|affine space]].
+\end{theorem}
+
+\begin{proof}
+We are required to show that:
+{{begin-axiom}}
+{{axiom|n = 1
+ |q = \forall p, q \in \mathcal E
+ |m = p + \left({q - p}\right) = q
+}}
+{{axiom|n = 2
+ |q = \forall p \in \mathcal E: \forall u, v \in E
+ |m = \left({p + u}\right) + v = p + \left({u \mathop{+} v}\right)
+}}
+{{axiom|n = 3
+ |q = \forall p, q \in \mathcal E: \forall u \in E
+ |m = \left({p - q}\right) \mathop{+} u = \left({p + u}\right) - q
+}}
+{{end-axiom}}
+By the definition of the [[Definition:Standard Affine Structure on Vector Space|standard affine structure]], the addition and subtraction operations are simply those in the [[Definition:Vector Space|vector space]] $E$.
+That is, we want to show that:
+{{begin-axiom}}
+{{axiom|n = 1
+ |q = \forall u,v \in E
+ |m = u + \left({v - u}\right) = v
+}}
+{{axiom|n = 2
+ |q = \forall u,v,w \in E
+ |m = \left({u + v}\right) + w = u + \left({v \mathop{+} w}\right)
+}}
+{{axiom|n = 3
+ |q = \forall u,v,w \in E
+ |m = \left({v - u}\right) \mathop{+} w = \left({v + w}\right) - u
+}}
+{{end-axiom}}
+By definition the addition operation on a [[Definition:Vector Space|vector space]] is [[Definition:Commutative Operation|commutative]] and [[Definition:Associative|associative]].
+But all three axioms are immediate consequences of [[Definition:Commutative Operation|commutativity]] and [[Definition:Associative|associativity]].
+This concludes the proof.
+{{Qed}}
+{{MissingLinks|particularly to addition, subtraction etc.}}
+[[Category:Affine Geometry]]
+1mwu9z9v1i4kpuxgvjfopf2qxpv9z8i
+\end{proof}<|endoftext|>
+\section{Reflexive Reduction of Ordering is Strict Ordering}
+Tags: Order Theory, Reflexive Reductions, Reflexive Reduction of Ordering is Strict Ordering
+
+\begin{theorem}
+Let $\mathcal R$ be an [[Definition:Ordering|ordering]] on a [[Definition:Set|set]] $S$.
+Let $\mathcal R^\ne$ be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\mathcal R$.
+Then $\mathcal R^\ne$ is a [[Definition:Strict Ordering|strict ordering]] on $S$.
+\end{theorem}
+
+\begin{proof}
+=== Antireflexivity ===
+Follows from [[Reflexive Reduction is Antireflexive]].
+{{qed|lemma}}
+=== Transitivity ===
+Suppose $\tuple {x, y}, \tuple {y, z} \in \RR^\ne$.
+By [[Definition:Antireflexive Relation|antireflexivity]] $x \ne y$ and $y \ne z$.
+We consider the two remaining cases.
+==== Case 1: $x = z$ ====
+If $x = z$ then:
+:$\tuple {x, y}, \tuple {y, x} \in \RR^\ne$
+and so:
+:$\tuple {x, y}, \tuple {y, x} \in \RR$
+Then by the [[Definition:Antisymmetric Relation|antisymmetry]] of $\RR$:
+:$x = y$
+and:
+:$\tuple {x, x} \in \RR^\ne$
+which contradicts that $\RR^\ne$ is [[Definition:Antireflexive Relation|antireflexive]].
+==== Case 2: $x \ne z$ ====
+By the [[Definition:Transitive Relation|transitivity]] of $\RR$:
+:$\tuple {x, z} \in \RR$
+and by $x$ and $z$ being distinct:
+:$\tuple {x, z} \notin \Delta_S$
+It follows by the definition of [[Definition:Reflexive Reduction|reflexive reduction]]:
+:$\tuple {x, z} \in \RR^\ne$
+Hence $\RR^\ne$ is [[Definition:Transitive Relation|transitive]].
+{{qed}}
+\end{proof}
+
+\begin{proof}
+By definition, an [[Definition:Ordering|ordering]] is both [[Definition:Reflexive Relation|reflexive]] and [[Definition:Transitive Relation|transitive]].
+The result then follows from [[Reflexive Reduction of Transitive Antisymmetric Relation is Strict Ordering]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Vectorialization of Affine Space is Vector Space}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\mathcal E$ be an [[Definition:Affine Space|affine space]] over a [[Definition:Field (Abstract Algebra)|field]] $k$ with [[Definition:Difference Space|difference space]] $E$.
+Let $\mathcal R = \tuple {p_0, e_1, \ldots, e_n}$ be an [[Definition:Affine Frame|affine frame]] in $\mathcal E$.
+Let $\struct {\mathcal E, +, \cdot}$ be the [[Definition:Vectorialization of Affine Space|vectorialization]] of $\mathcal E$.
+Then $\struct {\mathcal E, +, \cdot}$ is a [[Definition:Vector Space|vector space]].
+\end{theorem}
+
+\begin{proof}
+By the definition of the [[Definition:Vectorialization of Affine Space|vectorialization of an affine space]], the [[Definition:Mapping|mapping]] $\Theta_\mathcal R : k^n \to \mathcal E$ defined by:
+:$\displaystyle \map {\Theta_\mathcal R} {\lambda_1, \ldots, \lambda_n} = p_0 + \sum_{i \mathop = 1}^n \lambda_i e_i$
+is a [[Definition:Bijection|bijection]] from $k^n$ to $\mathcal E$.
+Therefore, by [[Homomorphic Image of Vector Space]], it suffices to prove that $\Theta_\mathcal R$ is a [[Definition:Linear Transformation on Vector Space|linear transformation]].
+By [[General Linear Group is Group]]:
+:$\Theta_\mathcal R$ is a [[Definition:Linear Transformation on Vector Space|linear transformation]] {{iff}} its [[Definition:Inverse Mapping|inverse]] ${\Theta_\mathcal R}^{-1}$ is a [[Definition:Linear Transformation on Vector Space|linear transformation]].
+Therefore, it suffices to show that:
+:$\forall p, q \in \mathcal E, \mu \in k: \map { {\Theta_\mathcal R}^{-1} } {\mu \cdot p + q} = \mu \cdot \map { {\Theta_\mathcal R}^{-1} } p + \map { {\Theta_\mathcal R}^{-1} } g$
+Thus:
+{{begin-eqn}}
+{{eqn | l = \map { {\Theta_\mathcal R}^{-1} } {\mu \cdot p + q}
+ | r = \map { {\Theta_\mathcal R}^{-1} } {\map {\Theta_\mathcal R} {\mu \cdot \map { {\Theta_\mathcal R}^{-1} } p} + q}
+ | c = Definition of $\mu \cdot p$
+}}
+{{eqn | r = {\Theta_\mathcal R}^{-1} \map {\Theta_\mathcal R} {\mu \cdot \map { {\Theta_\mathcal R}^{-1} } p + \map { {\Theta_\mathcal R}^{-1} } q}
+ | c = Definition of $+$ in $\mathcal E$
+}}
+{{eqn | r = \mu \cdot \map { {\Theta_\mathcal R}^{-1} } p + \map { {\Theta_\mathcal R}^{-1} } q
+ | c = because $\map { {\Theta_\mathcal R}^{-1} } {\Theta_\mathcal R}$ is the [[Definition:Identity Mapping|identity mapping]]
+}}
+{{end-eqn}}
+This is the required identity.
+{{Qed}}
+[[Category:Affine Geometry]]
+c1s8miwk3qonr3mfv14spg8gq6tz8co
+\end{proof}<|endoftext|>
+\section{Reflexive Reduction is Antireflexive}
+Tags: Reflexive Reductions
+
+\begin{theorem}
+Let $\mathcal R$ be a [[Definition:Endorelation|relation]] on a [[Definition:Set|set]] $S$.
+Let $\mathcal R^\ne$, be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\mathcal R$.
+Then $\mathcal R^\ne$ is [[Definition:Antireflexive Relation|antireflexive]].
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Reflexive Reduction|reflexive reduction]]:
+:$\mathcal R^\ne = \mathcal R \setminus \Delta_S$
+where $\Delta_S$ denotes the [[Definition:Diagonal Relation|diagonal relation]] on $S$.
+By [[Set Difference Intersection with Second Set is Empty Set]]:
+:$\left({\mathcal R \setminus \Delta_S}\right) \cap \Delta_S = \varnothing$
+Hence by [[Relation is Antireflexive iff Disjoint from Diagonal Relation]], $\mathcal R^\ne$ is [[Definition:Antireflexive Relation|antireflexive]].
+{{qed}}
+[[Category:Reflexive Reductions]]
+07pom45czxxv2l3lexaqcs1zqac5e8e
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Strict Ordering}
+Tags: Order Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\RR$ be a [[Definition:Endorelation|relation]] on $S$.
+{{TFAE|def = Strict Ordering}}
+\end{theorem}
+
+\begin{proof}
+Let $\RR$ be [[Definition:Transitive|transitive]].
+Then by [[Transitive Relation is Antireflexive iff Asymmetric]] it follows directly that:
+:$(1): \quad$ If $\RR$ is [[Definition:Antireflexive Relation|antireflexive]] then it is [[Definition:Asymmetric Relation|asymmetric]]
+:$(2): \quad$ If $\RR$ is [[Definition:Asymmetric Relation|asymmetric]] then it is [[Definition:Antireflexive Relation|antireflexive]].
+{{qed}}
+[[Category:Order Theory]]
+rp8tzzdaz1n1ad5581rq06y42eiwj2g
+\end{proof}<|endoftext|>
+\section{Subband iff Idempotent under Induced Operation}
+Tags: Abstract Algebra, Idempotence
+
+\begin{theorem}
+Let $\left({S, \circ}\right)$ be a [[Definition:Band|band]].
+Let $\left({\mathcal P \left({S}\right), \circ_\mathcal P}\right)$ be the [[Definition:Algebraic Structure|algebraic structure]] consisting of the [[Definition:Power Set|power set]] of $S$ and the [[Definition:Subset Product|operation induced on $\mathcal P \left({S}\right)$ by $\circ$]].
+Let $X \in \mathcal P \left({S}\right)$.
+Then $X$ is [[Definition:Idempotent Element|idempotent]] {{iff}} $\left({X, \circ}\right)$ is a [[Definition:Subband|subband]] of $\left({S, \circ}\right)$.
+\end{theorem}
+
+\begin{proof}
+=== Subbandhood implies Idempotency ===
+==== Proving $\left({X \circ_{\mathcal P} X}\right) \subseteq X$ ====
+Let $c \in X \circ_{\mathcal P} X$.
+By the definition of [[Definition:Subset Product|subset product]] for some $a, b \in X$ we have:
+:$a \circ b = c$
+Suppose $c \notin X$.
+Then:
+:$a \circ b \notin X$
+Which contradicts that $\left({X, \circ}\right)$ is a subband.
+==== Proving $X \subseteq \left({X \circ_{\mathcal P} X}\right)$ ====
+Let $a \in X$.
+By the definition of [[Definition:Subset Product|subset product]]:
+:$X \circ_{\mathcal P} X = \{ a \circ b: a, b \in X \}$
+As $\circ$ is [[Definition:Idempotent Operation|idempotent]]:
+:$a \circ a = a$.
+Thus:
+:$a \in \left({X \circ_{\mathcal P} X}\right)$
+Hence by the definition of [[Definition:Subset|subset]]:
+:$X \subseteq \left({X \circ_{\mathcal P} X}\right)$
+{{qed|lemma}}
+=== Idempotency implies Subbandhood ===
+Let $X \in \mathcal P \left({S}\right)$.
+Suppose $X$ is [[Definition:Idempotent Element|idempotent]]:
+That is suppose:
+:$X \circ_{\mathcal P} X = X$
+Let $a, b \in X$.
+By the definition of [[Definition:Subset Product|subset product]]:
+:$X \circ_{\mathcal P} X = \{ a \circ b: a, b \in X \}$
+Then $a \circ b \in X$.
+Hence $\left({X, \circ}\right)$ is a [[Definition:Magma|magma]].
+By [[Restriction of Associative Operation is Associative]] it is a [[Definition:Semigroup|semigroup]].
+Finally by [[Restriction of Idempotent Operation is Idempotent]] it is a [[Definition:Band|band]].
+{{qed}}
+[[Category:Abstract Algebra]]
+[[Category:Idempotence]]
+71ppa6bwguziz1w5pc0ba576pb8l322
+\end{proof}<|endoftext|>
+\section{Restriction of Idempotent Operation is Idempotent}
+Tags: Abstract Algebra, Idempotence
+
+\begin{theorem}
+Let $\left({S, \circ}\right)$ be an [[Definition:Algebraic Structure|algebraic structure]].
+Let $T \subseteq S$.
+Let the [[Definition:Binary Operation|operation]] $\circ$ be [[Definition:Idempotent Operation|idempotent]].
+Then $\circ$ is also [[Definition:Idempotent Operation|idempotent]] upon [[Definition:Restriction of Operation|restriction]] to $\left({T, \circ \restriction_T}\right)$.
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = T
+ | o = \subseteq
+ | r = S
+ | c =
+}}
+{{eqn | ll= \implies
+ | lo= \forall a \in T:
+ | l = a
+ | o = \in
+ | r = S
+ | c = {{Defof|Subset}}
+}}
+{{eqn | ll= \implies
+ | l = a \mathop{\circ_T} a
+ | r = a \circ a
+ | c =
+}}
+{{eqn | r = a
+ | c =
+}}
+{{end-eqn}}
+{{qed}}
+[[Category:Abstract Algebra]]
+[[Category:Idempotence]]
+o5sxc9opg5nuy0lvdvrzqrlph881vwk
+\end{proof}<|endoftext|>
+\section{Subband of Induced Operation is Set of Subbands}
+Tags: Abstract Algebra
+
+\begin{theorem}
+Let $\left({S, \circ}\right)$ be a [[Definition:Band|band]].
+Let $\left({\mathcal P \left({S}\right), \circ_\mathcal P}\right)$ be the [[Definition:Algebraic Structure|algebraic structure]] consisting of:
+: the [[Definition:Power Set|power set]] $\mathcal P \left({S}\right)$ of $S$
+and
+: the [[Definition:Operation Induced on Power Set|operation $\circ_\mathcal P$ induced on $\mathcal P \left({S}\right)$ by $\circ$]].
+Let $T \subseteq \mathcal P \left({S}\right)$.
+Let $\left({T, \circ_\mathcal P}\right)$ be a [[Definition:Subband|subband]] of $\left({\mathcal P \left({S}\right), \circ_\mathcal P}\right)$.
+Then every [[Definition:Element|element]] of $T$ is a subband of $\left({S, \circ}\right)$.
+\end{theorem}
+
+\begin{proof}
+{{improve|See talk page}}
+=== Case 1: $T$ is the Empty Set ===
+By:
+: [[Empty Set is Submagma of Magma]]
+: [[Restriction of Associative Operation is Associative]]
+: [[Restriction of Idempotent Operation is Idempotent]]
+it follows that $\left({\varnothing, \circ_\mathcal P}\right)$ is a [[Definition:Subband|subband]] of $\left({T, \circ_\mathcal P}\right)$.
+Let $X \in \varnothing$.
+Then by the definition of the [[Definition:Empty Set|empty set]] it follows that $\left({X, \circ}\right)$ is a [[Definition:Subband|subband]] of $\left({S, \circ}\right)$ [[Definition:Vacuous Truth|vacuously]].
+{{qed|lemma}}
+=== Case 2: $T$ is Non-Empty ===
+Let $X \in T$.
+Then by definition of a [[Definition:Subband|subband]] $X$ is [[Definition:Idempotent Element|idempotent]] under $\circ_\mathcal P$.
+That is:
+:$X \circ_\mathcal P X = X$
+By [[Subband iff Idempotent under Induced Operation]] we have that $\left({X, \circ}\right)$ is a [[Definition:Subband|subband]] of $\left({S, \circ}\right)$.
+Hence the result.
+{{qed}}
+[[Category:Abstract Algebra]]
+rzucm5udvldrjrsxyvx0v6pqqp1dcu0
+\end{proof}<|endoftext|>
+\section{Composition of Commuting Idempotent Mappings is Idempotent}
+Tags: Idempotence, Mapping Theory, Composite Mappings, Composition of Commuting Idempotent Mappings is Idempotent
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $f, g: S \to S$ be [[Definition:Idempotent Mapping|idempotent mappings]] from $S$ to $S$.
+Let:
+:$f \circ g = g \circ f$
+where $\circ$ denotes [[Definition:Composition of Mappings|composition]].
+Then $f \circ g$ is [[Definition:Idempotent Mapping|idempotent]].
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = \paren {f \circ g} \circ \paren {f \circ g}
+ | r = f \circ \paren {g \circ f} \circ g
+ | c = [[Composition of Mappings is Associative]]
+}}
+{{eqn | r = f \circ \paren {f \circ g} \circ g
+ | c = [[Definition:By Hypothesis|by hypothesis]]
+}}
+{{eqn | r = \paren {f \circ f} \circ \paren {g \circ g}
+ | c = [[Composition of Mappings is Associative]]
+}}
+{{eqn | r = f \circ g
+ | c = $f$ and $g$ are [[Definition:Idempotent Mapping|idempotent]] [[Definition:By Hypothesis|by hypothesis]]
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}
+
+\begin{proof}
+By [[Set of All Self-Maps is Semigroup]], the [[Definition:Set|set]] of all [[Definition:Self-Map|self-maps]] on $S$ forms a [[Definition:Semigroup|semigroup]] under [[Definition:Composition of Mappings|composition]].
+The result follows from [[Product of Commuting Idempotent Elements is Idempotent]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Compositions of Closure Operators are both Closure Operators iff Operators Commute}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $f$ and $g$ be [[Definition:Closure Operator (Order Theory)|closure operators]] on $S$.
+Then the following are equivalent:
+:$(1): \quad f \circ g$ and $g \circ f$ are both [[Definition:Closure Operator (Order Theory)|closure operators]].
+:$(2): \quad f$ and $g$ [[Definition:Commute|commute]] (that is, $f \circ g = g \circ f$).
+:$(3): \quad \operatorname{img}\left({f \circ g}\right) = \operatorname{img}\left({g \circ f}\right)$
+where $\operatorname{img}$ represents the [[Definition:Image of Mapping|image of a mapping]].
+\end{theorem}
+
+\begin{proof}
+By [[Composition of Inflationary Mappings is Inflationary]]:
+: $f \circ g$ and $g \circ f$ are [[Definition:Inflationary Mapping|inflationary]].
+By [[Composition of Increasing Mappings is Increasing]]:
+: $f \circ g$ and $g \circ f$ are [[Definition:Increasing Mapping|increasing]].
+Thus each of the two [[Definition:Composition of Mappings|composite mappings]] will be a [[Definition:Closure Operator (Order Theory)|closure operator]] {{iff}} it is [[Definition:Idempotent Mapping|idempotent]].
+Therefore the equivalences follow from [[Composition of Inflationary and Idempotent Mappings]].
+{{qed}}
+[[Category:Closure Operators]]
+oucxe64yg144ymp7baexi0vhnaqojit
+\end{proof}<|endoftext|>
+\section{Composition of Increasing Mappings is Increasing}
+Tags: Increasing Mappings
+
+\begin{theorem}
+Let $\struct {S, \preceq_S}$, $\struct {T, \preceq_T}$ and $\struct {U, \preceq_U}$ be [[Definition:Ordered Set|ordered sets]].
+Let $g: S \to T$ and $f: T \to U$ be [[Definition:Increasing Mapping|increasing mappings]].
+Then their [[Definition:Composition of Mappings|composition]] $f \circ g: S \to U$ is also [[Definition:Increasing Mapping|increasing]].
+\end{theorem}
+
+\begin{proof}
+Let $x, y \in S$ with $x \preceq_S y$.
+Since $g$ is [[Definition:Increasing Mapping|increasing]]:
+:$\map g x \preceq_T \map g y$
+Since $f$ is increasing:
+:$\map f {\map g x} \preceq_U \map f {\map g y}$
+By the definition of [[Definition:Composition of Mappings|composition]]:
+:$\map {\paren {f \circ g} } x \preceq_U \map {\paren {f \circ g} } y$
+Since this holds for all such $x$ and $y$, $f \circ g$ is [[Definition:Increasing Mapping|increasing]].
+{{qed}}
+[[Category:Increasing Mappings]]
+9140tydzpn8oxb1j16avgpnzfzhls01
+\end{proof}<|endoftext|>
+\section{Composition of Inflationary Mappings is Inflationary}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $f, g: S \to S$ be [[Definition:Inflationary Mapping|inflationary mappings]].
+Then $f \circ g$, the [[Definition:Composition of Mappings|composition]] of $f$ and $g$, is also [[Definition:Inflationary Mapping|inflationary]].
+\end{theorem}
+
+\begin{proof}
+Let $x \in S$.
+{{begin-eqn}}
+{{eqn |n = 1
+ |l = x
+ |o = \preceq
+ |r = g \left({x}\right)
+ |c = $g$ is [[Definition:Inflationary Mapping|inflationary]]
+}}
+{{eqn |n = 2
+ |l = g \left({x}\right)
+ |o = \preceq
+ |r = f \left({g \left({x}\right)}\right)
+ |c = $f$ is [[Definition:Inflationary Mapping|inflationary]]
+}}
+{{eqn |l = x
+ |o = \preceq
+ |r = f \left({g \left({x}\right)}\right)
+ |c = $(1)$ and $(2)$ and $\preceq$ is an [[Definition:Ordering|ordering]] and hence [[Definition:Transitive Relation|transitive]]
+}}
+{{eqn |ll = \implies
+ |l = x
+ |o = \preceq
+ |r = \left({f \circ g}\right) \left({x}\right)
+ |c = Definition of [[Definition:Composition of Mappings|composition]]
+}}
+{{end-eqn}}
+Since this holds for all $x \in S$, $f \circ g$ is [[Definition:Inflationary Mapping|inflationary]].
+{{qed}}
+[[Category:Order Theory]]
+7kdh1yq2liq0v5xy1cfcyjy0dhfbzec
+\end{proof}<|endoftext|>
+\section{Fixed Point of Idempotent Mapping}
+Tags: Idempotent Mappings
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $f: S \to S$ be an [[Definition:Idempotent Mapping|idempotent mapping]].
+Let $f \left[{S}\right]$ be the [[Definition:Image of Mapping|image]] of $S$ under $f$.
+Let $x \in S$.
+Then $x$ is a [[Definition:Fixed Point|fixed point]] of $f$ {{iff}} $x \in f \left[{S}\right]$.
+\end{theorem}
+
+\begin{proof}
+=== Necessary Condition ===
+Let $x$ be a [[Definition:Fixed Point|fixed point]] of $f$.
+Then:
+: $f \left({x}\right) = x$
+and so by the definition of [[Definition:Image of Mapping|image of mapping]]:
+: $x \in f \left[{S}\right]$
+{{qed|lemma}}
+=== Sufficient Condition ===
+Let $x \in f \left[{S}\right]$.
+Then by the definition of [[Definition:Image of Mapping|image]]:
+:$\exists y \in S: f \left({y}\right) = x$
+Then:
+{{begin-eqn}}
+{{eqn | l = x
+ | r = f \left({y}\right)
+ | c =
+}}
+{{eqn | ll= \implies
+ | l = f \left({x}\right)
+ | r = f \left({f \left({y}\right)}\right)
+ | c = Definition of [[Definition:Mapping|Mapping]]
+}}
+{{eqn | r = f \left({y}\right)
+ | c = Definition of [[Definition:Idempotent Mapping|Idempotent Mapping]]
+}}
+{{eqn | r = x
+ | c =
+}}
+{{end-eqn}}
+Thus by definition $x$ is a [[Definition:Fixed Point|fixed point]] of $f$.
+{{qed}}
+[[Category:Idempotent Mappings]]
+qpaykkfuia110sfxp5wukrwvj13eviq
+\end{proof}<|endoftext|>
+\section{Symmetric Closure of Ordering may not be Transitive}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $\preceq^\leftrightarrow$ be the [[Definition:Symmetric Closure|symmetric closure]] of $\preceq$.
+Then it is not necessarily the case that $\preceq^\leftrightarrow$ is [[Definition:Transitive Relation|transitive]].
+\end{theorem}
+
+\begin{proof}
+[[Proof by Counterexample]]:
+Let $S = \left\{{a, b, c}\right\}$ where $a$, $b$, and $c$ are [[Definition:Distinct|distinct]].
+Let:
+: ${\preceq} = \left\{{\left({a, a}\right), \left({b, b}\right), \left({c, c}\right), \left({a, c}\right), \left({b, c}\right)}\right\}$:
+Then $\preceq$ is an [[Definition:Ordering|ordering]], but $\preceq^\leftrightarrow$ is not [[Definition:Transitive Relation|transitive]], as follows:
+$\preceq$ is [[Definition:Reflexive Relation|reflexive]] because it [[Definition:Subset|contains]] the [[Definition:Diagonal Relation|diagonal relation]] on $S$.
+That $\preceq$ is [[Definition:Transitive Relation|transitive]] and [[Definition:Antisymmetric Relation|antisymmetric]] can be verified by inspecting all [[Definition:Ordered Pair|ordered pairs]] of its [[Definition:Element|elements]].
+Thus $\preceq$ is an [[Definition:Ordering|ordering]].
+Now consider $\preceq^\leftrightarrow$, the [[Definition:Symmetric Closure|symmetric closure]] of $\preceq$:
+: ${\preceq^\leftrightarrow} = {\preceq} \cup {\preceq}^{-1} = \left\{{\left({a, a}\right), \left({b, b}\right), \left({c, c}\right), \left({a, c}\right), \left({c, a}\right), \left({b, c}\right), \left({c, b}\right)}\right\}$
+by inspection.
+Now $\left({a, c}\right) \in {\preceq^\leftrightarrow}$ and $\left({c, b}\right) \in {\preceq^\leftrightarrow}$, but $\left({a, b}\right) \notin {\preceq^\leftrightarrow}$.
+Thus $\preceq^\leftrightarrow$ is not [[Definition:Transitive Relation|transitive]].
+{{qed}}
+[[Category:Closure Operators]]
+e6yrejzm7xnk4pm1f0q9c2yo0ztm679
+\end{proof}<|endoftext|>
+\section{Composition of Idempotent Mappings}
+Tags: Idempotent Mappings, Composite Mappings
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $f, g: S \to S$ be [[Definition:Idempotent Mapping|idempotent mappings]].
+Suppose that $f \circ g$ and $g \circ f$ have the same [[Definition:Image of Mapping|images]].
+That is, suppose that $f \sqbrk {g \sqbrk S} = g \sqbrk {f \sqbrk S}$.
+Then $f \circ g$ and $g \circ f$ are [[Definition:Idempotent Mapping|idempotent]].
+\end{theorem}
+
+\begin{proof}
+Let $x \in S$.
+By the premise:
+:$\map f {\map g x} \in g \sqbrk {f \sqbrk S}$
+Since $f \sqbrk S \subseteq S$:
+:$\map f {\map g x} \in g \sqbrk S$
+Thus for some $y \in S$:
+:$\map f {\map g x} = \map g y$
+Since $g$ is [[Definition:Idempotent Mapping|idempotent]]:
+:$\map g {\map g y} = \map g y$
+By the choice of $y$:
+:$\map g {\map f {\map g x} } = \map g {\map g y} = \map g y = \map f {\map g x}$
+Thus:
+:$\map f {\map g {\map f {\map g x} } } = \map f {\map f {\map g x} } = \map f {\map g x}$
+That is, $f \circ g$ is [[Definition:Idempotent Mapping|idempotent]].
+{{qed}}
+[[Category:Idempotent Mappings]]
+[[Category:Composite Mappings]]
+188o1vw8v5s7jzlw3xg1s414okta2j3
+\end{proof}<|endoftext|>
+\section{Composition of Inflationary and Idempotent Mappings}
+Tags: Mapping Theory
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $f$ and $g$ be [[Definition:Inflationary Mapping|inflationary]] and [[Definition:Idempotent Mapping|idempotent]] [[Definition:Mapping|mappings]] on $S$.
+Then the following are equivalent:
+:$(1): \quad f \circ g$ and $g \circ f$ are both [[Definition:Idempotent Mapping|idempotent]]
+:$(2): \quad f$ and $g$ [[Definition:Commute|commute]] (that is, $f \circ g = g \circ f$)
+:$(3): \quad \Img {f \circ g} = \Img {g \circ f}$
+where:
+:$\circ$ represents [[Definition:Composition of Mappings|composition]]
+:$\Img f$ represents the [[Definition:Image of Mapping|image]] of a [[Definition:Mapping|mapping]] $f$.
+\end{theorem}
+
+\begin{proof}
+=== $(2)$ implies $(1)$ ===
+Follows from [[Composition of Commuting Idempotent Mappings is Idempotent]].
+{{qed|lemma}}
+=== $(1)$ implies $(2)$ ===
+Suppose that $f \circ g$ and $g \circ f$ are [[Definition:Idempotent Mapping|idempotent]].
+Then $\paren {f \circ g} \circ \paren {f \circ g} = f \circ g$.
+By [[Composition of Mappings is Associative]] and the definition of [[Definition:Composition of Mappings|composition]], we have for each $x \in S$:
+:$\map f {\map g {\map f {\map g x} } } = \map f {\map g x}$
+Because $\preceq$ is an [[Definition:Ordering|ordering]] and hence [[Definition:Reflexive Relation|reflexive]]:
+:$\map f {\map g {\map f {\map g x} } } \preceq \map f {\map g x}$
+Since $f$ is [[Definition:Inflationary Mapping|inflationary]]:
+:$\map g {\map f {\map g x} } \preceq \map f {\map g {\map f {\map g x} } }$
+Thus since $\preceq$ is an [[Definition:Ordering|ordering]] and hence [[Definition:Transitive Relation|transitive]]:
+:$\map g {\map f {\map g x} } \preceq \map f {\map g x}$
+Since $g$ is [[Definition:Inflationary Mapping|inflationary]]:
+:$\map f {\map g x} \preceq \map g {\map f {\map g x} }$
+Thus since $\preceq$ is an [[Definition:Ordering|ordering]] and hence [[Definition:Antisymmetric Relation|antisymmetric]]:
+:$\map g {\map f {\map g x} } = \map f {\map g x}$
+Since this holds for all $x \in S$, [[Equality of Mappings]] shows that:
+:$g \circ f \circ g = f \circ g$
+
+The same argument, with the roles of $f$ and $g$ reversed, shows that:
+:$f \circ g \circ f = g \circ f$
+Combining everything, we obtain:
+:$f \circ g = f \circ \paren {g \circ f \circ g} = f \circ \paren {g \circ f} = g \circ f$
+Thus $f \circ g = g \circ f$, so $f$ and $g$ [[Definition:Commute|commute]].
+{{qed|lemma}}
+=== $(2)$ implies $(3)$ ===
+Equals substitute for equals.
+{{qed|lemma}}
+=== $(3)$ implies $(1)$ ===
+Follows from [[Composition of Idempotent Mappings]].
+{{qed}}
+[[Category:Mapping Theory]]
+jsjv4nlgl8koimfkb9zq9tqoq98tor4
+\end{proof}<|endoftext|>
+\section{Closure is Smallest Closed Successor}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $f: S \to S$ be a [[Definition:Closure Operator (Order Theory)|closure operator]] on $S$.
+Let $x \in S$.
+Then $\map f x$ is the [[Definition:Smallest Element|smallest]] [[Definition:Closed Element|closed element]] that [[Definition:Succeed|succeeds]] $x$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Closure Operator (Order Theory)|closure operator]], $f$ is [[Definition:Inflationary Mapping|inflationary]].
+Thus $x \preceq \map f x$.
+By definition, $\map f x$ is [[Definition:Closed Element|closed]].
+So $\map f x$ is a closed element that [[Definition:Succeed|succeeds]] $x$.
+We will now show that it is the [[Definition:Smallest Element|smallest]] such.
+Let $k$ be a closed element of $S$ such that $x \preceq k$.
+Since $f$ is a [[Definition:Closure Operator (Order Theory)|closure operator]], it is [[Definition:Increasing Mapping|increasing]].
+Therefore $\map f x \preceq \map f k$.
+Since $k$ is [[Definition:Closed Element|closed]], $\map f k = k$.
+Thus $\map f x \preceq k$.
+{{qed}}
+[[Category:Closure Operators]]
+1955mm4t98ditsvzrbvqb6rtpq7y9zu
+\end{proof}<|endoftext|>
+\section{Closed Elements Uniquely Determine Closure Operator}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $f, g: S \to S$ be [[Definition:Closure Operator (Order Theory)|closure operators]] on $S$.
+Suppose that $f$ and $g$ have the same [[Definition:Closed Element|closed elements]].
+Then $f = g$.
+\end{theorem}
+
+\begin{proof}
+Let $x \in S$.
+Let $C$ be the [[Definition:Set|set]] of [[Definition:Closed Element|closed elements]] of $S$ (with respect to either $f$ or $g$) that [[Definition:Succeed|succeed]] $x$.
+By [[Closure is Smallest Closed Successor]], $f \left({x}\right)$ and $g \left({x}\right)$ are [[Definition:Smallest/Ordered Set|smallest]] closed [[Definition:Successor|successors]] of $x$.
+That is, $f \left({x}\right)$ and $g \left({x}\right)$ are smallest elements of $C \cap \bar\uparrow x$, where $\bar\uparrow x$ is the [[Definition:Upper Closure of Element|upper closure]] of $x$.
+By [[Smallest Element is Unique]], $f \left({x}\right) = g \left({x}\right)$.
+Since this holds for all $x \in S$, $f = g$ by [[Equality of Mappings]].
+{{qed}}
+[[Category:Closure Operators]]
+2kotny5ma94x5wbj6yj7udjqagueqdr
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Closed Element}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $\cl$ be a [[Definition:Closure Operator (Order Theory)|closure operator]] on $S$.
+Let $x \in S$.
+{{TFAE|def = Closed Element}}
+\end{theorem}
+
+\begin{proof}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Let $\cl: S \to S$ be a [[Definition:Closure Operator|closure operator]] on $S$.
+Let $x \in S$.
+By the definition of [[Definition:Closure Operator|closure operator]], $\cl$ is [[Definition:Idempotent Mapping|idempotent]].
+Thus by [[Fixed Point of Idempotent Mapping]]:
+:An element of $S$ is a [[Definition:Fixed Point|fixed point]] of $\cl$ {{iff}} it is in the [[Definition:Image of Mapping|image]] of $\cl$.
+Thus the above definitions are equivalent.
+{{qed}}
+[[Category:Closure Operators]]
+51loub76vimcrpl17rqj45v5kghjuk3
+\end{proof}<|endoftext|>
+\section{Square of Number Always Exists}
+Tags: Numbers
+
+\begin{theorem}
+Let $x$ be a [[Definition:Number|number]].
+Then its [[Definition:Square (Algebra)|square]] $x^2$ is guaranteed to exist.
+\end{theorem}
+
+\begin{proof}
+Whatever flavour of [[Definition:Number|number]] under discussion, the [[Definition:Algebraic Structure|algebraic structure]] $\struct {\mathbb K, +, \times}$ in which this number sits is at least a [[Definition:Semiring|semiring]].
+The [[Definition:Binary Operation|binary operation]] that is [[Definition:Multiplication|multiplication]] is therefore [[Definition:Closed Algebraic Structure|closed]] on that [[Definition:Algebraic Structure|algebraic structure]].
+Therefore:
+: $\forall x \in \mathbb K: x \times x \in \mathbb K$
+{{qed}}
+[[Category:Numbers]]
+h9o91tir35n4ut6k92frbx01j9dqge4
+\end{proof}<|endoftext|>
+\section{Schröder Rule}
+Tags: Relation Theory
+
+\begin{theorem}
+Let $A$, $B$ and $C$ be [[Definition:Endorelation|relations]] on a [[Definition:Set|set]] $S$.
+Then the following are [[Definition:Logical Equivalence|equivalent]] statements:
+:$(1): \quad A \circ B \subseteq C$
+:$(2): \quad A^{-1} \circ \overline C \subseteq \overline B$
+:$(3): \quad \overline C \circ B^{-1} \subseteq \overline A$
+where:
+: $\circ$ denotes [[Definition:Composition of Relations|relation composition]]
+: $A^{-1}$ denotes the [[Definition:Inverse Relation|inverse]] of $A$
+: $\overline A$ denotes the [[Definition:Complement of Relation|complement]] of $A$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Composition of Relations|relation composition]] and [[Definition:Subset|subset]] we have that statement $(1)$ may be written as:
+:$(1')\quad \forall x, y, z \in S: \left({ (y, z) \in A \land (x, y) \in B \implies (x, z) \in C }\right)$
+{{explain|Actually, that only gets us to $\forall x, z \in S: ((\exists y: (y, z) \in A \land (x, y) \in B) \implies (x,z) \in C)$.}}
+Using a different arrangement of variable names, statement $(2)$ can be written:
+:$(2')\quad \forall x, y, z \in S: \left({ (z, y) \in A^{-1} \land (x, z) \in \overline C \implies (x, y) \in \overline B }\right)$
+By the definition of the [[Definition:Inverse Relation|inverse]] and the [[Definition:Complement of Relation|complement]] of a relation we can rewrite this as:
+:$(2'')\quad \forall x, y, z \in S: \left({ (y, z) \in A \land (x, z) \notin C) \implies (x, y) \notin B }\right)$
+Similarly, statement $(3)$ can be written:
+:$(3')\quad \forall x, y, z \in S: \left({ (x, z) \in \overline C \land (y, x) \in B^{-1} \implies (y, z) \in \overline A }\right)$
+By the definition of the [[Definition:Inverse Relation|inverse]] and the [[Definition:Complement of Relation|complement]] of a relation we can rewrite this as:
+:$(3'')\quad \forall x, y, z \in S: \left({ (x, z) \notin C \land (x, y) \in B \implies (y, z) \notin A }\right)$
+So in all we have:
+:$(1')\quad \forall x, y, z \in S: \left({ (y, z) \in A \land (x, y) \in B \implies (x, z) \in C }\right)$
+:$(2'')\quad \forall x, y, z \in S: \left({ (y, z) \in A \land (x, z) \notin C) \implies (x, y) \notin B }\right)$
+:$(3'')\quad \forall x, y, z \in S: \left({ (x, z) \notin C \land (x, y) \in B \implies (y, z) \notin A }\right)$
+{{finish}}
+\end{proof}
+
+\begin{proof}
+=== $(1)$ iff $(2)$ ===
+By the definition of [[Definition:Composition of Relations|relation composition]] and [[Definition:Subset|subset]] we have that statement $(1)$ may be written as:
+:$(1'):\quad \forall x, y, z \in S: \left({ (y, z) \in A \land (x, y) \in B \implies (x, z) \in C }\right)$
+Using a different arrangement of variable names, statement $(2)$ can be written:
+:$(2'):\quad \forall x, y, z \in S: \left({ (z, y) \in A^{-1} \land (x, z) \in \overline C \implies (x, y) \in \overline B }\right)$
+By the definition of [[Definition:Inverse Relation|inverse relation]] and the [[Definition:Complement of Relation|complement]] of a relation we can rewrite this as:
+:$(2''):\quad \forall x, y, z \in S: \left({ (y, z) \in A \land (x, z) \notin C) \implies (x, y) \notin B }\right)$
+We shall use the [[Method of Truth Tables/Proof of Interderivability|method of truth tables]].
+The two statements will be [[Definition:Logical Equivalence|equivalent]] [[Definition:Iff|iff]] the columns under the [[Definition:Main Connective|main connectives]], which is $\implies$ in each case, are identical.
+'''Statement 1:'''
+$\begin{array}{ccccc}
+((y, z) \in A & \land & (x, y) \in B) & \implies & (x, z) \in C \\
+\hline
+T & T & T & T & T \\
+T & T & T & F & F \\
+T & F & F & T & T \\
+T & F & F & T & F \\
+F & F & T & T & T \\
+F & F & T & T & F \\
+F & F & F & T & T \\
+F & F & F & T & F \\
+\end{array}$
+'''Statement 2:'''
+$\begin{array}{ccccc}
+((y, z) \in A & \land & (x, z) \notin C) & \implies & (x, y) \notin B \\
+\hline
+T & F & F & T & F \\
+T & T & T & F & F \\
+T & F & F & T & T \\
+T & T & T & T & T \\
+F & F & F & T & F \\
+F & F & T & T & F \\
+F & F & F & T & T \\
+F & F & T & T & T \\
+\end{array}$
+{{qed|lemma}}
+=== $(2)$ iff $(3)$ ===
+{{finish}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Dual Relation}
+Tags: Relation Theory
+
+\begin{theorem}
+{{TFAE|def = Dual Relation}}
+Let $\RR \subseteq S \times T$ be a [[Definition:Binary Relation|relation]].
+\end{theorem}
+
+\begin{proof}
+Let $\tuple {x, y} \in \paren {\overline \RR}^{-1}$.
+Then:
+{{begin-eqn}}
+{{eqn | l = \tuple {x, y}
+ | o = \in
+ | r = \overline \RR
+ | c =
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \tuple {y, x}
+ | o = \in
+ | r = \overline \RR
+ | c = {{Defof|Inverse Relation}}
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \tuple {y, x}
+ | o = \notin
+ | r = \RR
+ | c = {{Defof|Complement of Relation}}
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \tuple {x, y}
+ | o = \notin
+ | r = \RR^{-1}
+ | c = {{Defof|Inverse Relation}}
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \tuple {x, y}
+ | o = \in
+ | r = \overline {\paren {\RR^{-1} } }
+ | c = {{Defof|Complement of Relation}}
+}}
+{{end-eqn}}
+{{qed}}
+[[Category:Relation Theory]]
+ensi99c6zqi2h5m2vwmz30jqqt18exa
+\end{proof}<|endoftext|>
+\section{Trivial Gradation is Gradation}
+Tags: Ring Theory
+
+\begin{theorem}
+Let $\left({R, +, \circ}\right)$ be a [[Definition:Ring (Abstract Algebra)|ring]].
+Let $\left({M, e,\cdot}\right)$ be a [[Definition:Monoid|monoid]].
+Let
+:$\displaystyle R = \bigoplus_{m \mathop \in M} R_m$
+be the [[Definition:Trivial Gradation|trivial $M$-gradation]] on $R$.
+This is a [[Definition:Gradation Compatible with Ring Structure|gradation]] on $R$.
+\end{theorem}
+
+\begin{proof}
+We are required to show that:
+:$\forall x \in R_m, y \in R_n: x \circ y \in R_{m \cdot n}$
+First suppose that $m = n = e$ are both the [[Definition:Identity Element|identity]].
+In this case, $R_m = R_n = R$.
+Since by definition, $R$ is [[Definition:Closed Algebraic Structure|closed]] under $\circ$, it follows that
+:$\forall x \in R, y \in R: x \circ y \in R$
+as required.
+Now suppose that either $m \neq e$ or $n \neq e$.
+After possibly exchanging $m$ and $n$, we may as well assume that $n \neq e$.
+In particular, $R_n = \mathbf 0$ is the [[Definition:Null Ring|zero ring]].
+So if $y \in R_n$, then $y = 0$.
+Therefore, for every $x \in R_m$, by [[Ring Product with Zero]], we must have
+:$x \circ y = x \circ 0 = 0$
+Since $R_{m \cdot n}$ is an [[Definition:Abelian Group|abelian group]] it must by definition contain $0$.
+Therefore $x \circ y \in R_{m \cdot n}$ as required.
+{{Qed}}
+[[Category:Ring Theory]]
+l0jo8rrsxtsep3cb7ip1u31nhyifjba
+\end{proof}<|endoftext|>
+\section{Fixed Point of Mappings is Fixed Point of Composition}
+Tags: Mapping Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $f, g: S \to S$ be [[Definition:Mapping|mappings]].
+Let $x \in S$ be a [[Definition:Fixed Point|fixed point]] of both $f$ and $g$.
+Then $x$ is also a [[Definition:Fixed Point|fixed point]] of $f \circ g$, the [[Definition:Composition of Mappings|composition]] of $f$ and $g$.
+\end{theorem}
+
+\begin{proof}
+Since $x$ is a [[Definition:Fixed Point|fixed point]] of $g$:
+: $g \left({x}\right) = x$
+Thus:
+: $f \left({g \left({x}\right)}\right) = f \left({x}\right)$
+Since $x$ is a [[Definition:Fixed Point|fixed point]] of $f$:
+: $f \left({x}\right) = x$
+It follows that:
+: $\left({f \circ g}\right) \left({x}\right) = f \left({g \left({x}\right)}\right) = x$
+Thus $x$ is a [[Definition:Fixed Point|fixed point]] of $f \circ g$.
+{{qed}}
+[[Category:Mapping Theory]]
+5xe9ki10ex12x7axmxtisa1dllpha31
+\end{proof}<|endoftext|>
+\section{Fixed Point of Mappings is Fixed Point of Composition/General Result}
+Tags: Mapping Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $n \in \N$ be a [[Definition:Strictly Positive Integer|strictly positive integer]].
+Let $\N_n$ be the [[Definition:Initial Segment of Natural Numbers|initial segment]] of $n$ in $\N$.
+That is, let $\N_n = \left\{{0, 1, \dots, n-1}\right\}$.
+For each $i \in \N_n$, let $f_i: S \to S$ be a [[Definition:Mapping|mapping]].
+Let $x \in S$ be a [[Definition:Fixed Point|fixed point]] of $f_i$ for each $i \in \N_n$.
+Let $g = f_0 \circ f_1 \circ \dots \circ f_{n-1}$ be the [[Definition:Composition of Mappings|composition]] of all the $f_i$s.
+Then $x$ is a [[Definition:Fixed Point|fixed point]] of $g$.
+\end{theorem}
+
+\begin{proof}
+The proof proceeds by [[Principle of Mathematical Induction|mathematical induction]] on $n$, the number of [[Definition:Mapping|mappings]].
+=== Base Case ===
+{{questionable|Flawed. The base case needs to be the two-element case for obvious reasons.}}
+If $n = 1$, then $g = f_0$.
+Since $x$ is a [[Definition:Fixed Point|fixed point]] of $f_0$, it is also a fixed point of $g$.
+{{qed|lemma}}
+=== Inductive Step ===
+Suppose that the theorem holds for $n$. We will show that it holds for $n+1$.
+Let $x \in S$ be a [[Definition:Fixed Point|fixed point]] of $f_i$ for each $i \in \N_{n+1}$.
+Let $g = f_0 \circ f_1 \circ \dots \circ f_{n-1} \circ f_n$ be the [[Definition:Composition of Mappings|composition]] of all the $f_i$s.
+Since the theorem holds for $n$, $x$ is a [[Definition:Fixed Point|fixed point]] of $f_0 \circ f_1 \circ \dots \circ f_{n-1}$.
+By [[Composition of Mappings is Associative]]:
+: $g = \left({ f_0 \circ f_1 \circ \dots \circ f_{n-1} }\right) \circ f_n$
+Thus by [[Fixed Point of Mappings is Fixed Point of Composition]] (for two mappings), $x$ is a [[Definition:Fixed Point|fixed point]] of $g$.
+{{qed}}
+[[Category:Mapping Theory]]
+h1jv5hvio53fngdgbwydje3btthk4dg
+\end{proof}<|endoftext|>
+\section{Polynomial has Integer Coefficients iff Content is Integer}
+Tags: Content of Polynomial
+
+\begin{theorem}
+$f$ has [[Definition:Integer|integer]] [[Definition:Polynomial Coefficient|coefficients]] {{iff}} $\cont f$ is an [[Definition:Integer|integer]].
+\end{theorem}
+
+\begin{proof}
+If $f \in \Z \sqbrk X$ then $\cont f \in \Z$ by definition of [[Definition:Content of Rational Polynomial|content]].
+Conversely, suppose that:
+:$f = a_d X^d + \cdots + a_1 X + a_0 \notin \Z \sqbrk X$
+Let $m = \min \set {n \in \N : n f \in \Z \sqbrk X}$.
+Then, by definition of [[Definition:Content of Rational Polynomial|content]]:
+:$\cont f = \dfrac 1 m \gcd \set {m a_d, \ldots, m a_0}$
+So $\cont f \in \Z$ would mean that this [[Definition:Greatest Common Divisor of Integers|GCD]] is a [[Definition:Multiple|multiple]] of $m$.
+This, however, means that for each $i$, $\dfrac {m a_i} m = a_i$ is an [[Definition:Integer|integer]], which contradicts our assumption that $f \notin \Z \sqbrk X$.
+{{Qed}}
+[[Category:Content of Polynomial]]
+tle7se59a7q909dvaxtit66n56tgio1
+\end{proof}<|endoftext|>
+\section{Content of Monic Polynomial}
+Tags: Content of Polynomial, Monic Polynomials
+
+\begin{theorem}
+If $f$ is [[Definition:Monic Polynomial|monic]], then $\cont f = \dfrac 1 n$ for some [[Definition:Integer|integer]] $n$.
+\end{theorem}
+
+\begin{proof}
+Since $f$ is [[Definition:Monic Polynomial|monic]], it can be written as:
+:$f = X^r + \cdots + a_1 X + a_0$
+Let $n = \inf \set {n \in \N : n f \in \Z \sqbrk X}$.
+Let $d = \cont {n f}$.
+Then by definition of [[Definition:Content of Rational Polynomial|content]]:
+:$d = \gcd \set {n, n a_{r - 1}, \ldots, n a_1, n a_0}$
+Therefore, by definition of [[Definition:Greatest Common Divisor of Integers|GCD]], $d$ [[Definition:Divisor of Integer|divides]] $n$.
+So say $n = k d$ with $k \in \Z$.
+Then:
+:$\cont f = \dfrac d {k d} = \dfrac 1 k$
+as required.
+{{Qed}}
+[[Category:Content of Polynomial]]
+[[Category:Monic Polynomials]]
+kblu9qebsdbv86z0a5zd2jhnb86ubfh
+\end{proof}<|endoftext|>
+\section{Content of Scalar Multiple}
+Tags: Polynomial Theory, Content of Polynomial
+
+\begin{theorem}
+:$\cont {q f} = q \cont f$
+\end{theorem}
+
+\begin{proof}
+Let $q = \dfrac a b$ with $a, b \in \Z$.
+Let $\Z \sqbrk X$ denote the [[Definition:Ring of Polynomials|ring of polynomials]] over $\Z$.
+Let $n \in \Z$ such that $n f \in \Z \sqbrk X$.
+Then we have:
+:$b n \paren {q f} = a n f \in \Z \sqbrk X$
+By the definition of [[Definition:Content of Rational Polynomial|content]], and using that $a \in \Z$:
+:$\cont {b n q f} = a \cont {n f}$
+{{handwaving|why is $b n$ the infimum mentioned in the definition?}}
+By definition of [[Definition:Content of Rational Polynomial|content]]:
+:$\cont {q f} = \dfrac 1 {b n} \cont {b n q f}$
+Combining the above with the definition of $\cont f$:
+:$\cont {q f} = \dfrac a b \dfrac 1 n \cont {n f} = q \cont f$
+{{Qed}}
+[[Category:Polynomial Theory]]
+[[Category:Content of Polynomial]]
+jydm79bz6zwzzp09mabap9kjb8p52c8
+\end{proof}<|endoftext|>
+\section{Fixed Point of Composition of Inflationary Mappings}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be a [[Definition:Ordered Set|ordered set]].
+Let $f, g: S \to S$ be [[Definition:Inflationary Mapping|inflationary mappings]].
+Let $x \in S$.
+Then:
+: $x$ is a [[Definition:Fixed Point|fixed point]] of $f \circ g$
+{{iff}}:
+: $x$ is a [[Definition:Fixed Point|fixed point]] of both $f$ and $g$.
+\end{theorem}
+
+\begin{proof}
+=== Necessary Condition ===
+Follows from [[Fixed Point of Mappings is Fixed Point of Composition]].
+{{qed|lemma}}
+=== Sufficient Condition ===
+Let $h = f \circ g$.
+Let $x$ be a [[Definition:Fixed Point|fixed point]] of $h$.
+Then by the definition of [[Definition:Composition of Mappings|composition]]:
+: $f \left({g \left({x}\right)}\right) = x$
+Since $f$ is [[Definition:Inflationary Mapping|inflationary]]:
+: $x \preceq g \left({x}\right)$
+Suppose [[Proof by Contradiction|for the sake of contradiction]] that $x \ne g \left({x}\right)$.
+Then $x \prec g \left({x}\right)$.
+Since $f$ is also [[Definition:Inflationary Mapping|inflationary]]:
+: $g \left({x}\right) \preceq f \left({g \left({x}\right)}\right)$
+Thus by [[Extended Transitivity]]:
+: $x \prec f \left({g \left({x}\right)}\right)$
+But this contradicts the assumption that $x$ is a [[Definition:Fixed Point|fixed point]] of $f \circ g$.
+Therefore, $x = g \left({x}\right)$, and $x$ is a [[Definition:Fixed Point|fixed point]] of $g$.
+Suppose [[Proof by Contradiction|for the sake of contradiction]] that $f \left({x}\right) \ne x$.
+Then $x \prec f \left({x}\right)$.
+As we have shown that $x = g \left({x}\right)$, it follows that:
+: $x \prec f \left({g \left({x}\right)}\right)$
+But this contradicts assumption that $x$ is a [[Definition:Fixed Point|fixed point]] of $f \circ g$.
+Hence, $x$ is also a [[Definition:Fixed Point|fixed point]] of $f$.
+{{qed}}
+[[Category:Order Theory]]
+fgaz4yr9gvacrktsbz58kviss2ovnsl
+\end{proof}<|endoftext|>
+\section{Group of Units is Group}
+Tags: Rings with Unity
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Ring with Unity|ring with unity]].
+Then the [[Definition:Set|set]] of [[Definition:Unit of Ring|units]] of $\struct {R, +, \circ}$ forms a [[Definition:Group|group]] under $\circ$.
+Hence the justification for referring to the [[Definition:Group of Units of Ring|group of units]] of $\struct {R, +, \circ}$.
+\end{theorem}
+
+\begin{proof}
+Follows directly from [[Invertible Elements of Monoid form Subgroup of Cancellable Elements]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Principal Ideal of Principal Ideal Domain is of Irreducible Element iff Maximal/Forward Implication}
+Tags: Principal Ideal of Principal Ideal Domain is of Irreducible Element iff Maximal
+
+\begin{theorem}
+Let $\struct {D, +, \circ}$ be a [[Definition:Principal Ideal Domain|principal ideal domain]].
+Let $p$ be an [[Definition:Irreducible Element of Ring|irreducible element]] of $D$.
+Let $\ideal p$ be the [[Definition:Principal Ideal of Ring|principal ideal of $D$ generated by $p$]].
+Then $\ideal p$ is a [[Definition:Maximal Ideal of Ring|maximal ideal]] of $D$.
+\end{theorem}
+
+\begin{proof}
+Let $p$ be [[Definition:Irreducible Element of Ring|irreducible]] in $D$.
+Let $U_D$ be the [[Definition:Group of Units of Ring|group of units]] of $D$.
+By definition, an [[Definition:Irreducible Element of Ring|irreducible element]] is not a [[Definition:Unit of Ring|unit]].
+So from [[Principal Ideals in Integral Domain]]:
+:$\ideal p \subset D$
+Suppose the [[Definition:Principal Ideal of Ring|principal ideal]] $\ideal p$ is not [[Definition:Maximal Ideal of Ring|maximal]].
+Then there exists an [[Definition:Ideal of Ring|ideal]] $K$ of $D$ such that:
+:$\ideal p \subset K \subset R$
+Because $D$ is a [[Definition:Principal Ideal Domain|principal ideal domain]]:
+:$\exists x \in R: K = \ideal x$
+Thus:
+:$\ideal p \subset \ideal x \subset D$
+Because $\ideal p \subset \ideal x$:
+:$x \divides p$
+by [[Principal Ideals in Integral Domain]].
+That is:
+:$\exists t \in D: p = t \circ x$
+But $p$ is [[Definition:Irreducible Element of Ring|irreducible]] in $D$, so $x \in U_D$ or $t \in U_D$.
+That is, either $x$ is a [[Definition:Unit of Ring|unit]] or $x$ is an [[Definition:Associate in Integral Domain|associate]] of $p$.
+But since $K \subset D$:
+:$\ideal x \ne D$ so $x \notin U_D$
+by [[Principal Ideals in Integral Domain]].
+Also, since $\ideal p \subset \ideal x$:
+:$\ideal p \ne \ideal x$
+so $x$ is not an [[Definition:Associate in Integral Domain|associate]] of $p$, by [[Principal Ideals in Integral Domain]].
+This contradiction shows that $\ideal p$ is a [[Definition:Maximal Ideal of Ring|maximal ideal]] of $D$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Principal Ideal of Principal Ideal Domain is of Irreducible Element iff Maximal/Reverse Implication}
+Tags: Principal Ideal of Principal Ideal Domain is of Irreducible Element iff Maximal
+
+\begin{theorem}
+Let $\left({D, +, \circ}\right)$ be a [[Definition:Principal Ideal Domain|principal ideal domain]].
+Let $\left({p}\right)$ be the [[Definition:Principal Ideal of Ring|principal ideal of $D$ generated by $p$]].
+Let $\left({p}\right)$ be a [[Definition:Maximal Ideal of Ring|maximal ideal]] of $D$.
+Then $p$ is [[Definition:Irreducible Element of Ring|irreducible]].
+\end{theorem}
+
+\begin{proof}
+Let $\left({p}\right)$ be a [[Definition:Maximal Ideal of Ring|maximal ideal]] of $D$.
+Let $p = f g$ be any [[Definition:Factorization|factorization]] of $p$.
+We must show that one of $f, g$ is a [[Definition:Unit of Ring|unit]].
+Suppose that neither of $f, g$ is a [[Definition:Unit of Ring|unit]].
+First it will be shown that:
+:$\left({p}\right) \subsetneqq \left({f}\right)$
+Let $x \in \left({p}\right)$.
+That is:
+:$\exists q \in D: x = p q$
+Then:
+:$x = f g q \in \left({f}\right)$
+so:
+:$\left({p}\right) \subseteq \left({f}\right)$
+Now suppose $f \in \left({p}\right)$.
+Then:
+:$\exists r \in D: f = r p$
+and so from $p = f g$ above:
+:$f = r g f$
+Therefore:
+:$r g = 1$
+and so $g$ is a [[Definition:Unit of Ring|unit]].
+This is a contradiction.
+Thus:
+:$f \notin \left({p}\right)$
+and clearly:
+:$f \in \left({f}\right)$
+so:
+:$\left({p}\right) \subsetneqq \left({f}\right)$
+as claimed.
+{{Handwaving|"Clearly" needs to be replaced by a link to the definition which specifies this fact.}}
+Therefore, since $\left({p}\right)$ is [[Definition:Maximal Ideal of Ring|maximal]], we must have:
+:$\left({f}\right) = D$
+But we assumed that $f$ is not a [[Definition:Unit of Ring|unit]].
+So there is no $h \in D$ such that $f h = 1$.
+Therefore:
+:$1 \notin \left({f}\right) = \left\{{f h: h \in D}\right\}$
+and:
+:$\left({f}\right) \subsetneqq D$
+This is a contradiction.
+Therefore at least one of $f, g$ must be a [[Definition:Unit of Ring|unit]].
+This completes the proof.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Subring of Polynomials over Integral Domain Contains that Domain}
+Tags: Polynomial Theory, Subrings
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Commutative Ring|commutative ring]].
+Let $\struct {D, +, \circ}$ be an [[Definition:Subdomain|integral subdomain]] of $R$.
+Let $x \in R$.
+Let $D \sqbrk x$ denote the [[Definition:Ring of Polynomials in Ring Element|ring of polynomials]] in $x$ over $D$.
+Then $D \sqbrk x$ contains $D$ as a [[Definition:Subring|subring]] and $x$ as an [[Definition:Element|element]].
+\end{theorem}
+
+\begin{proof}
+We have that $\displaystyle \sum_{k \mathop = 0}^m a_k \circ x^k$ is a [[Definition:Polynomial over Integral Domain|polynomial]] for all $m \in \Z_{\ge 0}$.
+Set $m = 0$:
+: $\displaystyle \sum_{k \mathop = 0}^0 a_k \circ x^k = a_k \circ x^0 = a_k \circ 1_D = a_k$
+Thus:
+:$\displaystyle \forall a_k \in D: \sum_{k \mathop = 0}^0 a_k \circ x^k \in D$
+It follows directly that $D$ is a subring of $D \left[{x}\right]$ by applying the [[Subring Test]] on elements of $D$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Subring of Polynomials over Integral Domain is Smallest Subring containing Element and Domain}
+Tags: Polynomial Theory, Subrings
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Commutative Ring|commutative ring]].
+Let $\struct {D, +, \circ}$ be an [[Definition:Subdomain|integral subdomain]] of $R$.
+Let $x \in R$.
+Let $D \sqbrk x$ denote the [[Definition:Ring of Polynomials in Ring Element|ring of polynomials]] in $x$ over $D$.
+Then $D \sqbrk x$ is the [[Definition:Smallest Set by Set Inclusion|smallest]] [[Definition:Subring|subring]] of $R$ which contains $D$ as a [[Definition:Subring|subring]] and $x$ as an [[Definition:Element|element]].
+\end{theorem}
+
+\begin{proof}
+{{proof wanted|Whitelaw says "fairly obviously", so should be more or less straightforward.}}
+\end{proof}<|endoftext|>
+\section{Cantor-Bernstein-Schröder Theorem/Lemma}
+Tags: Cantor-Bernstein-Schröder Theorem
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $T \subseteq S$.
+Suppose that $f: S \to T$ is an [[Definition:Injection|injection]].
+Then there is a [[Definition:Bijection|bijection]] $g: S \to T$.
+\end{theorem}<|endoftext|>
+\section{Cantor-Bernstein-Schröder Theorem/Proof 5}
+Tags: Cantor-Bernstein-Schröder Theorem
+
+\begin{theorem}
+Let $S$ and $T$ be [[Definition:Set|sets]].
+Let $f: S \to T$ and $g: T \to S$ be [[Definition:Injection|injections]].
+Then there exists a [[Definition:Bijection|bijection]] $\phi: S \to T$.
+\end{theorem}
+
+\begin{proof}
+By [[Injection to Image is Bijection]]:
+:$g: T \to g \sqbrk T$ is a [[Definition:Bijection|bijection]].
+Thus $T$ is [[Definition:Set Equivalence|equivalent]] to $g \sqbrk T$.
+By [[Composite of Injections is Injection]] $g \circ f: S \to g \sqbrk T \subset S$ is also an [[Definition:Injection|injection]] (to a [[Definition:Subset|subset]] of the [[Definition:Domain of Mapping|domain]] of $g \circ f$).
+Then by [[Cantor-Bernstein-Schröder Theorem/Lemma|Cantor-Bernstein-Schröder Theorem: Lemma]]:
+:There exists a [[Definition:Bijection|bijection]] $h: S \to g \sqbrk T$.
+Thus $S$ is [[Definition:Set Equivalence|equivalent]] to $g \sqbrk T$.
+We already know that $T$ is equivalent to $g \sqbrk T$.
+Thus by [[Set Equivalence is Equivalence Relation]], $S$ is [[Definition:Set Equivalence|equivalent]] to $T$.
+By the definition of [[Definition:Set Equivalence|set equivalence]]:
+:There is a [[Definition:Bijection|bijection]] $\phi: S \to T$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Quotient Space of Real Line may be Indiscrete}
+Tags: Quotient Spaces, Real Number Line with Euclidean Topology
+
+\begin{theorem}
+Let $\struct {\R, \tau}$ denote the [[Definition:Real Number Line with Euclidean Topology|real number line with the usual (Euclidean) topology]].
+Let $\Q$ be the [[Definition:Rational Number|rational numbers]].
+Let $\mathbb I$ be the [[Definition:Irrational Number|irrational numbers]].
+Then $\set {\Q, \mathbb I}$ is a [[Definition:Partition (Set Theory)|partition]] of $\R$.
+Let $\sim$ be the [[Definition:Equivalence Relation|equivalence relation]] [[Definition:Relation Induced by Partition|induced]] on $\R$ by $\set {\Q, \mathbb I}$.
+Let $T_\sim := \struct {\R / {\sim}, \tau_\sim}
+$ be the [[Definition:Quotient Space (Topology)|quotient space]] of $\R$ by $\sim$.
+Then $T_\sim$ is an [[Definition:Indiscrete Space|indiscrete space]].
+\end{theorem}
+
+\begin{proof}
+Let $\phi: \R \to \R/{\sim}$ be the [[Definition:Quotient Mapping|quotient mapping]].
+Then:
+:$\forall x \in \Q: \map \phi x = \Q$
+:$\forall x \in \mathbb I: \map \phi x = \mathbb I$
+{{AimForCont}} $\set {\mathbb I} \in \tau_\sim$.
+Then by the definition of the [[Definition:Quotient Topology|quotient topology]]:
+:$\O \subsetneqq \mathbb I = \phi^{-1} \sqbrk {\set {\mathbb I} } \in \tau$
+Thus by [[Rationals are Everywhere Dense in Topological Space of Reals]], $\mathbb I$ contains a [[Definition:Rational Number|rational number]].
+This is a [[Definition:Contradiction|contradiction]].
+{{AimForCont}} $\set \Q \in \tau_\sim$.
+Then:
+:$\O \subsetneqq \Q = \phi^{-1} \sqbrk {\set \Q} \in \tau$
+Thus by [[Irrationals are Everywhere Dense in Topological Space of Reals]], $\Q$ contains a [[Definition:Irrational Number|irrational number]].
+This is a [[Definition:Contradiction|contradiction]].
+As $\R / {\sim}$ has exactly two elements, its only [[Definition:Non-Empty Set|non-empty]] [[Definition:Proper Subset|proper subsets]] are $\set \Q$ and $\set {\mathbb I}$.
+As neither of these sets is $\tau_\sim$-open, $\struct {\R / {\sim}, \tau_\sim}$ is [[Definition:Indiscrete Space|indiscrete]].
+{{qed}}
+[[Category:Quotient Spaces]]
+[[Category:Real Number Line with Euclidean Topology]]
+7xylwfsreu0d6btn0gh71xdhpx4qwml
+\end{proof}<|endoftext|>
+\section{Quotient Space of Real Line may not be Kolmogorov}
+Tags: Quotient Spaces, Real Number Line with Euclidean Topology, T0 Spaces
+
+\begin{theorem}
+Let $\struct {\R, \tau}$ denote the [[Definition:Real Number Line with Euclidean Topology|real number line with the usual (Euclidean) topology]].
+Then there exists an [[Definition:Equivalence Relation|equivalence relation]] $\sim$ on $\R$ such that the [[Definition:Quotient Space (Topology)|quotient space]] $\struct {\R / {\sim}, \tau_\sim}$ is not [[Definition:Kolmogorov Space|Kolmogorov]].
+\end{theorem}
+
+\begin{proof}
+By [[Quotient Space of Real Line may be Indiscrete]], there is an [[Definition:Equivalence Relation|equivalence relation]] $\sim$ on $\R$ such that the [[Definition:Quotient Space (Topology)|quotient space]] $\struct {\R / {\sim}, \tau_\sim}$ has two points and is [[Definition:Indiscrete Space|indiscrete]].
+It follows directly from the definition of [[Definition:Kolmogorov Space|Kolmogorov space]] that $\struct {\R / {\sim}, \tau_\sim}$ is not a [[Definition:Kolmogorov Space|Kolmogorov space]].
+{{qed}}
+[[Category:Quotient Spaces]]
+[[Category:Real Number Line with Euclidean Topology]]
+[[Category:T0 Spaces]]
+1k2eb023ogto02shmzysst4j3e45btq
+\end{proof}<|endoftext|>
+\section{Quotient Space of Real Line may be Kolmogorov but not Fréchet}
+Tags: Quotient Spaces, Real Number Line with Euclidean Topology, T0 Spaces, T1 Spaces
+
+\begin{theorem}
+Let $\struct {\R, \tau}$ denote the [[Definition:Real Number Line with Euclidean Topology|real number line with the usual (Euclidean) topology]].
+Define an [[Definition:Equivalence Relation|equivalence relation]] $\sim$ by letting $x \sim y$ {{iff}} either:
+:$x = y$
+or:
+:$x, y \in \Q$
+Let $\struct {\R / {\sim}, \tau_\sim}$ be the [[Definition:Quotient Space (Topology)|quotient space]] of $\R$ by $\sim$.
+Then $\struct {\R / {\sim}, \tau_\sim}$ is a [[Definition:Kolmogorov Space|Kolmogorov space]] but not a [[Definition:Fréchet Space (Topology)|Fréchet space]].
+\end{theorem}
+
+\begin{proof}
+Let $Y = \R / {\sim}$.
+Let $\phi: \R \to Y$ be the [[Definition:Quotient Mapping|quotient mapping]].
+Note that:
+:$\map \phi x = \set x$ if $x$ is [[Definition:Irrational Number|irrational]].
+:$\map \phi x = \Q$ if $x$ is [[Definition:Rational Number|rational]].
+=== Kolmogorov ===
+If $x$ is irrational, then $\phi^{-1} \sqbrk {Y \setminus \set x} = \R \setminus \set x$.
+Thus $Y \setminus \set x$ is [[Definition:Open Set (Topology)|open]] in $Y$.
+Let $p, q \in Y$ such that $p \ne q$.
+Then $\set p$ or $\set q$ must be a [[Definition:Singleton|singleton]] containing an [[Definition:Irrational Number|irrational number]].
+{{WLOG}}, suppose that $\set p$ is a [[Definition:Singleton|singleton]] containing an [[Definition:Irrational Number|irrational number]].
+Then as shown above, $Y \setminus P$ is [[Definition:Open Set (Topology)|open]] in $Y$.
+Thus so $p$ and $q$ are [[Definition:Topologically Distinguishable|distinguishable]].
+Since this holds for any two points in $Y$, the space is [[Definition:Kolmogorov Space|Kolmogorov]].
+{{qed|lemma}}
+=== Not Fréchet ===
+{{AimForCont}} $\set \Q$ is [[Definition:Closed Set (Topology)|closed]] in $Y$.
+By [[Identification Mapping is Continuous]], $\phi$ is [[Definition:Everywhere Continuous Mapping (Topology)|continuous]].
+Thus $\phi^{-1} \sqbrk {\set \Q} = \Q$ is [[Definition:Closed Set (Topology)|closed]] in $\R$.
+But this [[Definition:Contradiction|contradicts]] the fact that $\Q \subsetneqq \R$ and [[Rationals are Everywhere Dense in Topological Space of Reals]].
+Thus the singleton $\set \Q$ is not [[Definition:Closed Set (Topology)|closed]] in $Y$.
+Hence $\struct {Y, \tau_\sim}$ is not a [[Definition:Fréchet Space (Topology)|Fréchet space]].
+{{qed}}
+[[Category:Quotient Spaces]]
+[[Category:Real Number Line with Euclidean Topology]]
+[[Category:T0 Spaces]]
+[[Category:T1 Spaces]]
+axy3wekfsjuhd6sbkmjrs6p3rxqg1bj
+\end{proof}<|endoftext|>
+\section{Unique Representation in Polynomial Forms/General Result}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $f$ be a [[Definition:Polynomial Form|polynomial form]] in the [[Definition:Indeterminate (Polynomial Theory)|indeterminates]] $\set {X_j: j \in J}$ such that $f: \mathbf X^k \mapsto a_k$.
+For $r \in \R$, $\mathbf X^k \in M$, let $r \mathbf X^k$ denote the [[Definition:Polynomial Form|polynomial form]] that takes the value $r$ on $\mathbf X^k$ and zero on all other [[Definition:Monomial|monomials]].
+Let $Z$ denote the set of all [[Definition:Multiindex|multiindices]] indexed by $J$.
+Then the sum representation:
+:$\ds \hat f = \sum_{k \mathop \in Z} a_k \mathbf X^k$
+has only [[Definition:Finite|finitely many]] non-zero [[Definition:Term of Polynomial|terms]].
+Moreover it is everywhere equal to $f$, and is the unique such sum.
+\end{theorem}
+
+\begin{proof}
+Suppose that the sum has infinitely many non-zero terms.
+Then infinitely many $a_k$ are non-zero, which contradicts the definition of a polynomial.
+Therefore the sum consists of finitely many non-zero terms.
+Let $\mathbf X^m \in M$ be arbitrary.
+Then:
+{{begin-eqn}}
+{{eqn | l = \map {\hat f} {\mathbf X^m}
+ | r = \paren {\sum_{k \mathop \in Z} a_k \mathbf X^k} \paren {\mathbf X^m}
+}}
+{{eqn | r = \paren {a_m \mathbf X^m} \paren {\mathbf X^m} + \sum_{k \mathop \ne m \mathop \in Z} \paren {a_k \mathbf X^k} \paren {\mathbf X^m}
+}}
+{{eqn | r = a_m
+}}
+{{end-eqn}}
+So $\hat f = f$.
+Finally suppose that:
+:$\ds \tilde f = \sum_{k \mathop \in Z} b_k \mathbf X^k$
+is another such representation with $b_m \ne a_m$ for some $m \in Z$.
+Then:
+:$\map {\tilde f} {\mathbf X^m} = b_m \ne a_m = \map f {\mathbf X^m}$
+Therefore $\hat f$ as defined above is the only such representation.
+{{qed}}
+[[Category:Polynomial Theory]]
+7o8pvns4dhu5hef6lmp7exgbdq66ayi
+\end{proof}<|endoftext|>
+\section{Closed Element of Composite Closure Operator}
+Tags: Closure Operators
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $f, g: S \to S$ be [[Definition:Closure Operator|closure operators]].
+Let $h = f \circ g$, where $\circ$ represents [[Definition:Composition of Mappings|composition]].
+Suppose that $h$ is also a closure operator.
+Then an element $x \in S$ is [[Definition:Closed Element|closed]] with respect to $h$ [[Definition:Iff|iff]] it is closed with respect to $f$ and with respect to $g$.
+\end{theorem}
+
+\begin{proof}
+An element is [[Definition:Closed Element|closed]] with respect to a [[Definition:Closure Operator|closure operator]] [[Definition:Iff|iff]] it is a [[Definition:Fixed Point|fixed point]] of that operator.
+Since $f$ and $g$ are [[Definition:Closure Operator|closure operators]], they are [[Definition:Inflationary Mapping|inflationary]].
+Thus the result follows from [[Fixed Point of Composition of Inflationary Mappings]].
+{{qed}}
+[[Category:Closure Operators]]
+hwt0nny0li0kzk0oud7lzfroivzhwel
+\end{proof}<|endoftext|>
+\section{Law of Excluded Middle for Two Variables}
+Tags: Propositional Logic
+
+\begin{theorem}
+:$\vdash (p \land q) \lor (\lnot p \land q) \lor (p \land \lnot q) \lor (\lnot p \land \lnot q)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau| \vdash ((p \land q) \lor (\lnot p \land q)) \lor ( (p \land \lnot q) \lor (\lnot p \land \lnot q)) }}
+{{ExcludedMiddle|1|p \lor \lnot p}}
+{{ExcludedMiddle|2|q \lor \lnot q}}
+{{Conjunction|3||(p \lor \lnot p) \land (q \lor \lnot q)|1|2}}
+{{SequentIntro|4||((p \lor \lnot p) \land q) \lor ((p \lor \lnot p) \land \lnot q)|3|[[Rule of Distribution/Conjunction Distributes over Disjunction/Left Distributive/Formulation 1|Conjunction Distributes over Disjunction]]}}
+{{Assumption|5|(p \lor \lnot p) \land q}}
+{{SequentIntro|6|5|(p \land q) \lor (\lnot p \land q)|5|[[Rule of Distribution/Conjunction Distributes over Disjunction/Right Distributive/Formulation 1|Conjunction Distributes over Disjunction]]}}
+{{Implication|7||(p \lor \lnot p) \land q \implies (p \land q) \lor (\lnot p \land q)|5|6 }}
+{{Assumption|8|(p \lor \lnot p) \land \lnot q}}
+{{SequentIntro|9|8| (p \land \lnot q) \lor (\lnot p \land \lnot q)|8| [[Rule of Distribution/Conjunction Distributes over Disjunction/Right Distributive/Formulation 1|Conjunction Distributes over Disjunction]] }}
+{{Implication|10|| (p \lor \lnot p) \land \lnot q \implies (p \land \lnot q) \lor (\lnot p \land \lnot q)|8|9}}
+{{SequentIntro|11|| ((p \lor \lnot p) \land q) \lor ((p \lor \lnot p) \land \lnot q) \implies ((p \land q) \lor (\lnot p \land q)) \lor ( (p \land \lnot q) \lor (\lnot p \land \lnot q))|7,10|[[Constructive Dilemma/Formulation 1|Constructive Dilemma]]}}
+{{ModusPonens|12|| ((p \land q) \lor (\lnot p \land q)) \lor ( (p \land \lnot q) \lor (\lnot p \land \lnot q))|11|4 }}
+{{EndTableau}}
+{{qed}}
+{{LEM}}
+[[Category:Propositional Logic]]
+48jjx3jhedwa77osqylwqo68vmo427a
+\end{proof}<|endoftext|>
+\section{Cardinality of Finite Set is Well-Defined}
+Tags: Set Theory, Cardinality
+
+\begin{theorem}
+Let $S$ be a [[Definition:Finite Set|finite set]].
+Then there is a unique [[Definition:Natural Number|natural number]] $n$ such that $S \sim \N_n$, where:
+:$\sim$ represents [[Definition:Set Equivalence|set equivalence]]
+and:
+:$\N_n = \set {0, 1, \dotsc, n - 1}$ is the [[Definition:Initial Segment of Natural Numbers|initial segment of $\N$]] determined by $n$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Finite Set|finite set]], there is an $n \in \N$ such that $S \sim \N_n$.
+Suppose $m \in \N$ and $S \sim \N_m$.
+It follows from [[Set Equivalence is Equivalence Relation]] that $\N_n \sim \N_m$.
+Thus by [[Equality of Natural Numbers]], $n = m$.
+Therefore the [[Definition:Cardinality|cardinality]] of a [[Definition:Finite|finite set]] is [[Definition:Well-Defined Mapping|well-defined]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ring of Polynomial Forms is Commutative Ring with Unity}
+Tags: Polynomial Rings
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+Let $A = R \sqbrk {\set {X_j: j \in J} }$ be the set of all [[Definition:Polynomial Form|polynomial forms]] over $R$ in the indeterminates $\set {X_j: j \in J}$.
+Then $\struct {A, +, \circ}$ is a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+\end{theorem}
+
+\begin{proof}
+We must show that the [[Definition:Commutative and Unitary Ring Axioms|commutative and unitary ring axioms]] are satisfied:
+{{:Definition:Commutative and Unitary Ring Axioms}}
+=== Proof of the additive axioms ===
+'''A1:'''
+This is shown by [[Polynomials Closed under Addition]].
+'''A2-A5:'''
+According to the formal definition, a polynomial is a map from the [[Definition:Free Commutative Monoid|free commutative monoid]] to $R$.
+Now observe that addition of [[Definition:Polynomial Form|polynomial forms]] is [[Definition:Induced Structure|induced]] by addition in $R$.
+Therefore:
+:'''A2''' is shown by [[Structure Induced by Associative Operation is Associative]]
+:'''A3''' is shown by [[Induced Structure Identity]]
+:'''A4''' is shown by [[Induced Structure Inverse]]
+:'''A5''' is shown by [[Structure Induced by Commutative Operation is Commutative]]
+=== Proof of the multiplicative axioms ===
+'''M1:'''
+This is shown by [[Polynomials Closed under Ring Product]].
+Multiplication of polynomial forms is not [[Definition:Induced Structure|induced]] by multiplication in $R$, so we must show the multiplicative axioms by hand.
+'''M2:'''
+This is shown by [[Multiplication of Polynomials is Associative]].
+'''M3:'''
+This is shown by [[Polynomials Contain Multiplicative Identity]].
+'''M4:'''
+This is shown by [[Multiplication of Polynomials is Commutative]].
+'''D:'''
+This is shown by [[Multiplication of Polynomials Distributes over Addition]].
+Therefore, all of the axioms of a [[Definition:Commutative and Unitary Ring|commutative ring with unity]] are satisfied.
+{{qed}}
+[[Category:Polynomial Rings]]
+ld9w9xriw4vtobh8neg5cpkgnx0kh3k
+\end{proof}<|endoftext|>
+\section{Idempotent Elements form Subsemigroup of Commutative Semigroup}
+Tags: Subsemigroups, Idempotence, Idempotent Elements form Subsemigroup of Commutative Semigroup
+
+\begin{theorem}
+Let $\struct {S, \circ}$ be a [[Definition:Semigroup|semigroup]] such that $\circ$ is [[Definition:Commutative Operation|commutative]].
+Let $I$ be the [[Definition:Set|set]] of all [[Definition:Element|elements]] of $S$ that are [[Definition:Idempotent Element|idempotent]] under $\circ$.
+That is:
+:$I = \set {x \in S: x \circ x = x}$
+Then $\struct {I, \circ}$ is a [[Definition:Subsemigroup|subsemigroup]] of $\struct {S, \circ}$.
+\end{theorem}
+
+\begin{proof}
+By [[Subsemigroup Closure Test]] we need only show that:
+:For all $x, y \in I$: $x \circ y \in I$.
+That is:
+:$\paren {x \circ y} \circ \paren {x \circ y} = x \circ y$
+We reason as follows:
+{{begin-eqn}}
+{{eqn | l = \paren {x \circ y} \circ \paren {x \circ y}
+ | r = \paren {x \circ y} \circ \paren {y \circ x}
+ | c = $\circ$ is [[Definition:Commutative Operation|commutative]]
+}}
+{{eqn | r = \paren {x \circ \paren {y \circ x} } \circ y
+ | c = $\circ$ is [[Definition:Associative Operation|associative]]
+}}
+{{eqn | r = \paren {x \circ \paren {x \circ y} } \circ y
+ | c = $\circ$ is [[Definition:Commutative Operation|commutative]]
+}}
+{{eqn | r = \paren {x \circ x} \circ \paren {y \circ y}
+ | c = $\circ$ is [[Definition:Associative Operation|associative]]
+}}
+{{eqn | r = x \circ y
+ | c = $x$ and $y$ are [[Definition:Idempotent Element|idempotent]]
+}}
+{{end-eqn}}
+Hence the result.
+{{qed}}
+\end{proof}
+
+\begin{proof}
+By [[Subsemigroup Closure Test]] we need only show that:
+: For all $x, y \in I$: $x \circ y \in I$.
+As $x, y \in I$, they are [[Definition:Idempotent Element|idempotent]].
+We have that $\circ$ is [[Definition:Commutative Operation|commutative]].
+Thus, by definition, $x$ and $y$ [[Definition:Commute|commute]].
+From [[Product of Commuting Idempotent Elements is Idempotent]], $\left({x \circ y}\right)$ is [[Definition:Idempotent Element|idempotent]].
+That is:
+:$x \circ y \in I$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Product of Commuting Idempotent Elements is Idempotent}
+Tags: Semigroups, Idempotence
+
+\begin{theorem}
+Let $\struct {S, \circ}$ be a [[Definition:Semigroup|semigroup]].
+Let $a, b \in S$ be [[Definition:Idempotent Element|idempotent elements]] of $S$.
+Let $a$ and $b$ [[Definition:Commute|commute]]:
+:$a \circ b = b \circ a$
+Then $a \circ b$ is [[Definition:Idempotent Element|idempotent]].
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = \paren {a \circ b} \circ \paren {a \circ b}
+ | r = \paren {a \circ \paren {b \circ a} } \circ b
+ | c = $\circ$ is [[Definition:Associative Operation|associative]] by definition of [[Definition:Semigroup|semigroup]]
+}}
+{{eqn | r = \paren {a \circ \paren {a \circ b} } \circ b
+ | c = $a$ and $b$ [[Definition:Commute|commute]]
+}}
+{{eqn | r = \paren {a \circ a} \circ \paren {b \circ b}
+ | c = $\circ$ is [[Definition:Associative Operation|associative]]
+}}
+{{eqn | r = a \circ b
+ | c = $a$ and $b$ are [[Definition:Idempotent Element|idempotent]] by the premise
+}}
+{{end-eqn}}
+Thus $a \circ b$ is [[Definition:Idempotent Element|idempotent]].
+{{qed}}
+[[Category:Semigroups]]
+[[Category:Idempotence]]
+5hcn0hrqp0efy8tk9dzdvy04zje69lm
+\end{proof}<|endoftext|>
+\section{Set of All Self-Maps is Semigroup}
+Tags: Mapping Theory, Examples of Semigroups
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $S^S$ be the [[Definition:Set of All Mappings|set of all mappings]] from $S$ to itself.
+Let the [[Definition:Binary Operation|operation]] $\circ$ represent [[Definition:Composition of Mappings|composition of mappings]].
+Then the [[Definition:Algebraic Structure|algebraic structure]] $\struct {S^S, \circ}$ is a [[Definition:Semigroup|semigroup]].
+\end{theorem}
+
+\begin{proof}
+Let $f, g \in S^S$.
+As the [[Definition:Domain of Mapping|domain]] of $g$ and [[Definition:Codomain of Mapping|codomain]] of $f$ are the same, the [[Definition:Composition of Mappings|composition]] $f \circ g$ is defined.
+By the definition of composition, $f \circ g$ is a [[Definition:Mapping|mapping]] from the domain of $g$ to the codomain of $f$.
+Thus $f \circ g: S \to S$, so $f \circ g \in S^S$.
+Since this holds for all $f, g \in S^S$, $\struct {S^S, \circ}$ is [[Definition:Closed Algebraic Structure|closed]].
+By [[Composition of Mappings is Associative]], $\circ$ is [[Definition:Associative|associative]].
+Since $\struct {S^S, \circ}$ is [[Definition:Closed Algebraic Structure|closed]] and $\circ$ is associative:
+:$\struct {S^S, \circ}$ is a [[Definition:Semigroup|semigroup]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Integers form Subdomain of Reals}
+Tags: Integers, Real Numbers
+
+\begin{theorem}
+The [[Integers form Integral Domain|integral domain of integers]] $\struct {\Z, +, \times}$ forms a [[Definition:Subdomain|subdomain]] of the [[Definition:Field of Real Numbers|field of real numbers]].
+\end{theorem}
+
+\begin{proof}
+We have that [[Integers form Subdomain of Rationals]].
+We have that [[Rational Numbers form Subfield of Real Numbers]].
+Hence the result, from the definition of [[Definition:Subdomain|subdomain]].
+{{qed}}
+[[Category:Integers]]
+[[Category:Real Numbers]]
+k7dkktib40yaper07azh5367u9i60v8
+\end{proof}<|endoftext|>
+\section{Identity Element is Idempotent}
+Tags: Identity Elements, Idempotence
+
+\begin{theorem}
+Let $\struct {S, \circ}$ be an [[Definition:Algebraic Structure|algebraic structure]].
+Let $e \in S$ be an [[Definition:Identity Element|identity]] with respect to $\circ$.
+Then $e$ is [[Definition:Idempotent Element|idempotent]] under $\circ$.
+\end{theorem}
+
+\begin{proof}
+By the definition of an [[Definition:Identity Element|identity element]]:
+:$\forall x \in S: e \circ x = x$
+Thus in particular:
+:$e \circ e = e$
+Therefore $e$ is [[Definition:Idempotent Element|idempotent]] under $\circ$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Identity of Algebraic Structure is Preserved in Substructure}
+Tags: Identity Elements
+
+\begin{theorem}
+Let $\struct {S, \circ}$ be an [[Definition:Algebraic Structure|algebraic structure]] with [[Definition:Identity Element|identity]] $e$.
+Let $\struct {T, \circ}$ be a [[Definition:Algebraic Substructure|algebraic substructure]] of $\struct {S, \circ}$.
+That is, let $T \subseteq S$.
+Let $e \in T$.
+Then $e$ is an [[Definition:Identity Element|identity]] of $\struct {T, \circ}$.
+\end{theorem}
+
+\begin{proof}
+Let $x \in T$.
+By the definition of [[Definition:Subset|subset]], $x \in S$.
+Since $e$ is an [[Definition:Identity Element|identity]] of $\struct {S, \circ}$:
+:$e \circ x = x \circ e = x$
+Since this holds for all $x \in T$, $e$ is an [[Definition:Identity Element|identity]] of $\struct {T, \circ}$.
+{{qed}}
+[[Category:Identity Elements]]
+n2ie5wgzy61jxufp2t0pdsatrdnts8o
+\end{proof}<|endoftext|>
+\section{Idempotent Elements form Submonoid of Commutative Monoid}
+Tags: Monoids
+
+\begin{theorem}
+Let $\left({S, \circ}\right)$ be a [[Definition:Commutative Monoid|commutative monoid]].
+Let $e \in S$ be the [[Definition:Identity Element|identity element]] of $\left({S, \circ}\right)$.
+Let $I$ be the [[Definition:Set|set]] of all [[Definition:Element|elements]] of $S$ that are [[Definition:Idempotent Element|idempotent]] under $\circ$.
+That is:
+:$I = \left\{{x \in S: x \circ x = x}\right\}$
+Then $\left({I, \circ}\right)$ is a [[Definition:Submonoid|submonoid]] of $\left({S, \circ}\right)$ with identity $e$.
+\end{theorem}
+
+\begin{proof}
+By [[Idempotent Elements form Subsemigroup of Commutative Semigroup]], $\left({I, \circ}\right)$ is a [[Definition:Subsemigroup|subsemigroup]] of $\left({S, \circ}\right)$.
+By [[Identity Element is Idempotent]], $e \in I$.
+By [[Identity of Algebraic Structure is Preserved in Substructure]], $e$ is an [[Definition:Identity Element|identity]] of $(I, \circ)$.
+Since $\left({T, \circ}\right)$ is a [[Definition:Semigroup|semigroup]] and has an [[Definition:Identity Element|identity]], $\left({T, \circ}\right)$ is a [[Definition:Monoid|monoid]].
+Since $T \subseteq S$, $\left({T, \circ}\right)$ is a [[Definition:Submonoid|submonoid]] of $\left({S, \circ}\right)$ with [[Definition:Identity Element|identity]] $e$.
+{{qed}}
+[[Category:Monoids]]
+hf714gnpsngl4li3spzhqmiu6kvh0p7
+\end{proof}<|endoftext|>
+\section{Inverse Image under Order Embedding of Strict Upper Closure of Image of Point}
+Tags: Order Embeddings, Upper Closures
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ and $\struct {T, \preceq'}$ be [[Definition:Ordered Set|ordered sets]].
+Let $\phi: S \to T$ be an [[Definition:Order Embedding|order embedding]] of $\struct {S, \preceq}$ into $\struct {T, \preceq'}$
+Let $p \in S$.
+Then:
+:$\map {\phi^{-1} } {\map \phi p^{\succ'} } = p^\succ$
+where $\cdot^\succ$ and $\cdot^{\succ'}$ represent [[Definition:Strict Upper Closure of Element|strict upper closure]] with respect to $\preceq$ and $\preceq'$, respectively.
+\end{theorem}
+
+\begin{proof}
+Let $x \in \map {\phi^{-1} } {\map \phi p^{\succ'} }$.
+By the definition of [[Definition:Inverse Image|inverse image]]:
+:$\map \phi x \in \map \phi p^{\succ'}$
+By the definition of [[Definition:Strict Upper Closure of Element|strict upper closure]]:
+:$\map \phi p \prec' \map \phi x$
+Since $\phi$ is an [[Definition:Order Embedding|order embedding]]:
+:$p \prec x$
+Thus by the definition of [[Definition:Strict Upper Closure of Element|strict upper closure]]:
+:$x \in p^\succ$
+and so:
+:$\map {\phi^{-1} } {\map \phi p^{\succ'} } \subseteq p^\succ$
+Let $x \in p^\succ$.
+By the definition of [[Definition:Strict Upper Closure of Element|strict upper closure]]:
+:$p \prec x$
+Since $\phi$ is an [[Definition:Order Embedding|order embedding]]:
+:$\map \phi p \prec' \map \phi x$
+Thus by the definition of [[Definition:Strict Upper Closure of Element|strict upper closure]]:
+:$\map \phi x \in \map \phi p^{\succ'}$
+Thus by the definition of [[Definition:Inverse Image|inverse image]]:
+:$x \in \map {\phi^{-1} } {\map \phi p^{\succ'} }$
+and so:
+:$p^\succ \subseteq \map {\phi^{-1} } {\map \phi p^{\succ'} }$
+Thus by definition of [[Definition:Set Equality|set equality]]:
+:$\map {\phi^{-1} } {\map \phi p^{\succ'} } = p^\succ$
+{{qed}}
+[[Category:Order Embeddings]]
+[[Category:Upper Closures]]
+io0oltfff5r2qj4nelhv9cohyuvf1sj
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Order Embedding/Definition 1 implies Definition 3}
+Tags: Order Embeddings
+
+\begin{theorem}
+Let $\struct {S, \preceq_1}$ and $\struct {T, \preceq_2}$ be [[Definition:Ordered Set|ordered sets]].
+Let $\phi: S \to T$ be a [[Definition:Mapping|mapping]].
+Let $\phi: S \to T$ be an [[Definition:Order Embedding|order embedding]] by [[Definition:Order Embedding/Definition 1|Definition 1]]:
+{{:Definition:Order Embedding/Definition 1}}
+Then $\phi: S \to T$ is an [[Definition:Order Embedding|order embedding]] by [[Definition:Order Embedding/Definition 3|Definition 3]]:
+{{:Definition:Order Embedding/Definition 3}}
+\end{theorem}
+
+\begin{proof}
+Let $\phi$ be an [[Definition:Order Embedding/Definition 1|order embedding by definition 1]].
+Then by definition:
+:$\forall x, y \in S: x \preceq_1 y \iff \map \phi x \preceq_2 \map \phi y$
+$\phi$ is [[Definition:Injection|injective]] by [[Order Embedding is Injection]].
+It remains to be shown that:
+: $x \prec_1 y \iff \map \phi x \prec_2 \map \phi y$
+Suppose first that $x \prec_1 y$.
+Then $x \preceq_1 y$ and $x \ne y$.
+Thus by the premise:
+:$\map \phi x \preceq_2 \map \phi y$
+Since $\phi$ is [[Definition:Injection|injective]]:
+:$\map \phi x \ne \map \phi y$
+Therefore:
+:$\map \phi x \prec_2 \map \phi y$
+Suppose instead that $\map \phi x \prec_2 \map \phi y$
+Then:
+:$\map \phi x \preceq_2 \map \phi y$
+and:
+:$\map \phi x \ne \map \phi y$
+By the premise:
+:$x \preceq_1 y$
+By the substitutive property of equality:
+:$x \ne y$
+Thus:
+:$x \prec_1 y$
+Thus $\phi$ is an [[Definition:Order Embedding/Definition 3|order embedding by definition 3]].
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Order Embedding/Definition 3 implies Definition 1}
+Tags: Order Embeddings
+
+\begin{theorem}
+Let $\struct {S, \preceq_1}$ and $\struct {T, \preceq_2}$ be [[Definition:Ordered Set|ordered sets]].
+Let $\phi: S \to T$ be a [[Definition:Mapping|mapping]].
+Let $\phi: S \to T$ be an [[Definition:Order Embedding|order embedding]] by [[Definition:Order Embedding/Definition 3|Definition 3]]:
+{{:Definition:Order Embedding/Definition 3}}
+Then $\phi: S \to T$ is an [[Definition:Order Embedding|order embedding]] by [[Definition:Order Embedding/Definition 1|Definition 1]]:
+{{:Definition:Order Embedding/Definition 1}}
+\end{theorem}
+
+\begin{proof}
+Let $\phi$ be an [[Definition:Order Embedding/Definition 3|order embedding by definition 3]].
+Then by definition:
+:$(1): \quad \phi$ is [[Definition:Injection|injective]]
+:$(2): \quad \forall x, y, \in S: x \prec_1 y \iff \map \phi x \prec_2 \map \phi y$
+Let $x \preceq_1 y$.
+Then $x \prec_1 y$ or $x = y$.
+If $x \prec_1 y$, then by hypothesis:
+:$\map \phi x \prec_2 \map \phi y$
+Thus:
+:$\map \phi x \preceq_2 \map \phi y$
+If $x = y$, then:
+:$\map \phi x = \map \phi y$
+Thus:
+:$\map \phi x \preceq_2 \map \phi y$
+Thus it has been shown that:
+:$x \preceq_1 y \implies \map \phi x \preceq_2 \map \phi y$
+{{qed|lemma}}
+Let $\map \phi x \preceq_2 \map \phi y$.
+Then:
+:$\map \phi x \prec_2 \map \phi y$
+or:
+:$\map \phi x = \map \phi y$
+Suppose $\map \phi x \prec_2 \map \phi y$.
+Then by hypothesis:
+:$x \prec_1 y$
+and so:
+:$x \preceq_1 y$
+Suppose $\map \phi x = \map \phi y$.
+Then since $\phi$ is [[Definition:Injection|injective]]:
+:$x = y$
+and so:
+:$x \preceq_1 y$
+Thus [[Proof by Cases|in both cases]]:
+:$x \preceq_1 y$
+and so:
+:$\map \phi x \preceq_2 \map \phi y \implies x \preceq_1 y$
+{{qed|lemma}}
+Hence the result:
+:$x \preceq_1 y \iff \map \phi x \preceq_2 \map \phi y$
+and so $\phi$ is an [[Definition:Order Embedding/Definition 1|order embedding by definition 1]].
+\end{proof}<|endoftext|>
+\section{Inverse Image under Embedding of Image under Relation of Image of Point}
+Tags: Relation Theory
+
+\begin{theorem}
+Let $S$ and $T$ be [[Definition:Set|sets]].
+Let $\mathcal R_S$ and $\mathcal R_t$ be [[Definition:Endorelation|relations]] on $S$ and $T$, respectively.
+Let $\phi: S \to T$ be a [[Definition:Mapping|mapping]] with the property that:
+: $\forall p, q \in S: \left({ p \mathrel{\mathcal R_S} q \iff \phi(p) \mathrel{\mathcal R_T} \phi(q) }\right)$
+Then for each $p \in S$:
+: $\mathcal R_S (p) = \phi^{-1}\left({\mathcal R_T \left({ \phi(p) }\right) }\right)$
+\end{theorem}
+
+\begin{proof}
+Let $p \in S$.
+{{begin-eqn}}
+{{eqn |o =
+ |r = x \in \mathcal R_S (p)
+}}
+{{eqn |lo= \iff |o=
+ |r = p \mathrel{\mathcal R_S} x
+ |c = Definition of the [[Definition:Image of Element under Relation|image]] of $p$ under $\mathcal R_S$
+}}
+{{eqn |lo= \iff |o=
+ |r = \phi(p) \mathrel{\mathcal R_T} \phi(x)
+ |c = Premise
+}}
+{{eqn |lo= \iff |o=
+ |r = \phi(x) \in \mathcal R_T \left({ \phi(p) }\right)
+ |c = Definition of the image of $\phi(x)$ under $\mathcal R_T$
+}}
+{{eqn |lo = \iff |o=
+ |r = x \in \phi^{-1}\left({\mathcal R_T \left({ \phi(p) }\right) }\right)
+ |c = Definition of [[Definition:Inverse Image|inverse image]]
+}}
+{{end-eqn}}
+Thus by the [[Axiom:Axiom of Extension|Axiom of Extension]]:
+: $\mathcal R_S (p) = \phi^{-1}\left({\mathcal R_T \left({ \phi(p) }\right) }\right)$
+{{qed}}
+[[Category:Relation Theory]]
+p01vhur6jk7qpkalxpz316iw3q5m1rh
+\end{proof}<|endoftext|>
+\section{Path as Parameterization of Contour}
+Tags: Contour Integration
+
+\begin{theorem}
+Let $\left[{a \,.\,.\, b}\right]$ be a [[Definition:Closed Real Interval|closed real interval]].
+Let $\gamma: \left[{a \,.\,.\, b}\right] \to \C$ be a [[Definition:Path (Topology)|path]].
+Let there exist $n \in \N$ and a [[Definition:Subdivision (Real Analysis)|subdivision]] $\left\{{a_0, a_1, \ldots, a_n}\right\}$ of $\left[{a \,.\,.\, b}\right]$ such that:
+: $\gamma {\restriction_{ \left[{a_{k - 1} \,.\,.\, a_k}\right] } }$ is a [[Definition:Smooth Path (Complex Analysis)|smooth path]] for all $k \in \left\{ {1, \ldots, n}\right\}$
+where $\gamma {\restriction_{\left[{a_{k - 1} \,.\,.\, a_k}\right]} }$ denotes the [[Definition:Restriction of Mapping|restriction]] of $\gamma$ to $\left[{a_{k - 1} \,.\,.\, a_k}\right]$.
+Then there exists a [[Definition:Contour (Complex Plane)|contour]] $C$ with [[Definition:Parameterization of Contour (Complex Plane)|parameterization]] $\gamma$ and these properties:
+:$(1): \quad$ If $\gamma$ is a [[Definition:Closed Path (Topology)|closed path]], then $C$ is a [[Definition:Closed Contour (Complex Plane)|closed contour]].
+:$(2): \quad$ If $\gamma$ is a [[Definition:Jordan Arc|Jordan arc]], then $C$ is a [[Definition:Simple Contour (Complex Plane)|simple contour]].
+:$(3): \quad$ If $\gamma$ is a [[Definition:Jordan Curve|Jordan curve]], then $C$ is a [[Definition:Simple Contour (Complex Plane)|simple]] [[Definition:Closed Contour (Complex Plane)|closed contour]].
+\end{theorem}
+
+\begin{proof}
+Put $\gamma_k = \gamma {\restriction_{ \left[{a_{k-1} \,.\,.\, a_k}\right] } } : \left[{a_{k-1} \,.\,.\, a_k}\right] \to \C$.
+By definition, it follows that there exists a [[Definition:Directed Smooth Curve|directed smooth curve]] $C_k$ with [[Definition:Directed Smooth Curve/Parameterization|parameterization]] $\gamma_k$.
+For all $k \in \left\{ {1, \ldots, n-1}\right\}$, we have $\gamma_k \left({a_k}\right) = \gamma_{k + 1} \left({a_k}\right)$.
+Define the [[Definition:Contour (Complex Plane)|contour]] $C$ as the [[Definition:Concatenation of Contours|concatenation]] $C_1 \cup C_2 \cup \ldots \cup C_n$.
+Then, it follows by definition that $\gamma$ is a possible [[Definition:Parameterization of Contour (Complex Plane)|parameterization]] of $C$.
+{{qed|lemma}}
+Suppose that $\gamma$ is a [[Definition:Closed Path (Topology)|closed path]].
+Then $\gamma \left({a}\right) = \gamma_1 \left({a_0}\right) = \gamma_n \left({a_n}\right)$, so $C_1$ has [[Definition:Start Point of Contour (Complex Plane)|start point]] $\gamma \left({a}\right)$, and $C_n$ has [[Definition:End Point of Contour (Complex Plane)|end point]] $\gamma \left({a}\right)$.
+By definition, it follows that $C$ is a [[Definition:Closed Contour (Complex Plane)|closed contour]].
+{{qed|lemma}}
+Suppose that $\gamma$ is a [[Definition:Jordan Arc|Jordan arc]].
+Let $k_1, k_2 \in \left\{ {1, \ldots, n}\right\}$, and $t_1 \in \left[{a_{k_1 - 1} \,.\,.\, a_{k_1} }\right), t_2 \in \left[{a_{k_2 - 1} \,.\,.\, a_{k_2} }\right)$.
+Then $\gamma \left({t_1}\right) \ne \gamma \left({t_2}\right)$ by the definition of Jordan arc, so $\gamma_{k_1} \left({t_1}\right) \ne \gamma_{k_2} \left({t_2}\right)$.
+Let instead $k \in \left\{ {1, \ldots, n}\right\}$ and $t \in \left[{a_{k-1} \,.\,.\, a_k}\right)$ with $t \ne a_1$.
+Then $\gamma \left({t}\right) \ne \gamma \left({a_n}\right)$ by the definition of Jordan arc, so $\gamma_k \left({t}\right) \ne \gamma_n \left({a_n}\right)$.
+By definition, it follows that $C$ is a [[Definition:Simple Contour (Complex Plane)|simple contour]].
+{{qed|lemma}}
+Suppose that $\gamma$ is a [[Definition:Jordan Curve|Jordan curve]].
+As a Jordan curve by definition is both a [[Definition:Jordan Arc|Jordan arc]] and a [[Definition:Closed Path (Topology)|closed path]], it follows from what is shown above that $C$ is a [[Definition:Simple Contour (Complex Plane)|simple]] [[Definition:Closed Contour (Complex Plane)|closed contour]].
+{{qed}}
+[[Category:Contour Integration]]
+oy9jy2rtbhg9g4e65kfchpvs7rx5u52
+\end{proof}<|endoftext|>
+\section{Kernel of Induced Homomorphism of Polynomial Forms}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $R$ and $S$ be [[Definition:Commutative and Unitary Ring|commutative rings with unity]].
+Let $\phi: R \to S$ be a [[Definition:Ring Homomorphism|ring homomorphism]].
+Let $K = \ker \phi$.
+Let $R \left[{X}\right]$ and $S \left[{X}\right]$ be the [[Definition:Ring of Polynomial Forms|rings of polynomial forms]] over $R$ and $S$ respectively in the indeterminate $X$.
+Let $\bar\phi: R \left[{X}\right] \to S \left[{X}\right]$ be the [[Induced Homomorphism of Polynomial Forms|induced morphism of polynomial rings]].
+Then the [[Definition:Kernel of Ring Homomorphism|kernel]] of $\bar\phi$ is:
+:$\ker \bar\phi = \left\{{ a_0 + a_1 X + \cdots + a_n X^n \in R \left[{X}\right] : \phi \left({a_i}\right) = 0 \text{ for } i = 0, \ldots, n }\right\}$
+Or, more concisely:
+:$\ker \bar\phi = \left({\ker \phi}\right) \left[{X}\right]$
+\end{theorem}
+
+\begin{proof}
+Let $P \left(X\right) = a_0 + a_1 X + \cdots + a_n X^n \in R \left[{X}\right]$.
+Suppose first that $\phi \left({a_i}\right) = 0$ for $i = 0, \ldots, n$.
+We have by definition that:
+:$\bar\phi \left({a_0 + a_1 X + \cdots + a_n X^n}\right) = \phi \left({a_0}\right) + \phi \left({a_1}\right) X + \cdots + \phi \left({a_n}\right) X^n = 0$
+That is to say, $P \left({X}\right) \in \ker \bar\phi$.
+Conversely, suppose that $P \left({X}\right) \in \ker \bar\phi$.
+That is, $\bar\phi \left({P \left({X}\right)}\right) = \phi \left({a_0}\right) + \phi \left({a_1}\right) X + \cdots + \phi \left({a_n}\right) X^n$ is the [[Definition:Null Polynomial/Polynomial Form|null polynomial]].
+This by definition means that for $i = 0, \ldots, n$ we have $\phi \left({a_i}\right) = 0$.
+Hence, $P \left({X}\right) \in \left({\ker \phi}\right) \left[{X}\right]$.
+This concludes the proof.
+{{qed}}
+[[Category:Polynomial Theory]]
+gn2m9numjm5k02aoqfmo9vc114v0qsk
+\end{proof}<|endoftext|>
+\section{Boundary of Polygon as Contour}
+Tags: Contour Integration
+
+\begin{theorem}
+Let $P$ be a [[Definition:Polygon|polygon]] embedded in the [[Definition:Complex Plane|complex plane]] $\C$.
+Denote the [[Definition:Boundary (Geometry)|boundary]] of $P$ as $\partial P$.
+Then there exists a [[Definition:Simple Contour (Complex Plane)|simple]] [[Definition:Closed Contour (Complex Plane)|closed contour]] $C$ such that:
+: $\operatorname{Im} \left({C}\right) = \partial P$
+where $\operatorname{Im} \left({C}\right)$ denotes the [[Definition:Image of Contour (Complex Plane)|image]] of $C$.
+\end{theorem}
+
+\begin{proof}
+Let $n \in \N$ be the number of [[Definition:Side of Polygon|sides]] of $P$.
+Denote the [[Definition:Vertex of Polygon|vertices]] of $P$ as $A_1, \ldots, A_n$.
+From [[Complex Plane is Metric Space]], it follows that $\C$ is [[Definition:Homeomorphic Metric Spaces|homeomorphic]] to $\R^2$.
+Then, we can consider $\partial P$ as a [[Definition:Subset|subset]] of $\R^2$.
+From [[Boundary of Polygon is Jordan Curve]], it follows that there exists a [[Definition:Jordan Curve|Jordan curve]] $\gamma: \left[{0 \,.\,.\, 1}\right] \to \R^2$ such that $\operatorname{Im} \left({\gamma}\right) = \partial P$.
+The same theorem shows that $\gamma$ is a [[Definition:Concatenation (Topology)|concatenation]] of $n$ [[Definition:Convex Set (Vector Space)/Line Segment|line segments]], parameterized as:
+:$\gamma_k \left({t}\right) = \left({1 - t}\right) A_k + tA_{k+1}$
+where $k \in \left\{ {1, \ldots, n}\right\}$.
+We identify $A_{n + 1}$ with $A_1$.
+Then, $\gamma_k: \left[{a_{k - 1} \,.\,.\, a_k}\right] \to \R^2$, where $a_{k - 1}, a_k \in \left[{0 \,.\,.\, 1}\right]$.
+As $\gamma$ is a [[Definition:Concatenation (Topology)|concatenation]] of $\gamma_1, \ldots, \gamma_n$, it follows that $\left\{ {a_0, a_1, \ldots, a_n}\right\}$ is a [[Definition:Subdivision (Real Analysis)|subdivision]] of $\left[{0 \,.\,.\, 1}\right]$.
+We have:
+:$\dfrac \d {\d t} \gamma_k \left({t}\right) = A_{k + 1} - A_k \ne \mathbf 0$
+as $A_k$ and $A_{k + 1}$ are two different vertices.
+As $\C$ is [[Definition:Homeomorphic Metric Spaces|homeomorphic]] to $\R^2$, we can consider $\gamma$ as a [[Definition:Continuous Complex Function|continuous complex function]] $\gamma: \left[{0 \,.\,.\, 1}\right] \to \C$.
+Then $\gamma_k$ is [[Definition:Complex-Differentiable at Point|complex-differentiable]] for all values of $t \in \left({a_{k - 1} \,.\,.\, a_k}\right)$ with its [[Definition:Derivative of Smooth Path in Complex Plane|derivative]] $\gamma_k'$ defined by:
+:$\gamma_k' \left({t}\right) = x \left({\dfrac \d {\d t} \gamma_k \left({t}\right) }\right) + i y \left({\dfrac \d {\d t} \gamma_k \left({t}\right) }\right)$
+where:
+:$x \left({\dfrac \d {\d t} \gamma_k \left({t}\right) }\right)$ is the [[Definition:Real Part|real part]] of $\dfrac \d {\d t} \gamma_k \left({t}\right)$
+:$y \left({\dfrac \d {\d t} \gamma_k \left({t}\right) }\right)$ is the [[Definition:Imaginary Part|imaginary part]] of $\dfrac \d {\d t} \gamma_k \left({t}\right)$
+As shown above, $\gamma_k' \left({t}\right) \ne 0$.
+By definition, it follows that $\gamma_k = \gamma {\restriction_{\left[{a_{k-1} \,.\,.\, a_k}\right] } }$ is a [[Definition:Smooth Path (Complex Analysis)|smooth path]].
+From [[Path as Parameterization of Contour]], it follows that there exists a [[Definition:Simple Contour (Complex Plane)|simple]] [[Definition:Closed Contour (Complex Plane)|closed contour]] $C$ with $\operatorname{Im} \left({C}\right) = \partial P$.
+{{qed}}
+[[Category:Contour Integration]]
+s08ifxybz9kg9dn79jt3a8qti4hr4mb
+\end{proof}<|endoftext|>
+\section{Zero Simple Staircase Integral Condition for Primitive}
+Tags: Complex Analysis
+
+\begin{theorem}
+Let $f: D \to \C$ be a [[Definition:Continuous Complex Function|continuous complex function]], where $D$ is a [[Definition:Connected Domain (Complex Analysis)|connected domain]].
+Let $\displaystyle \oint_C \map f z \rd z = 0$ for all [[Definition:Simple Contour (Complex Plane)|simple]] [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contours]] $C$ in $D$.
+Then $f$ has a [[Definition:Complex Primitive|primitive]] $F: D \to \C$.
+\end{theorem}
+
+\begin{proof}
+Let $C$ be a [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contour]] in $D$, not necessarily [[Definition:Simple Contour (Complex Plane)|simple]].
+If we show that $\displaystyle \oint_C \map f z \rd z = 0$, then the result follows from [[Zero Staircase Integral Condition for Primitive]].
+The staircase contour $C$ is a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of $C_1, \ldots, C_n$, where the [[Definition:Image of Contour (Complex Plane)|image]] of each $C_k$ is a [[Definition:Convex Set (Vector Space)/Line Segment|line segment]] [[Definition:Parallel Lines|parallel]] with either the [[Definition:Real Axis|real axis]] or the [[Definition:Imaginary Axis|imaginary axis]].
+Denote the [[Definition:Parameterization of Contour (Complex Plane)|parameterization]] of $C$ as $\gamma: \closedint a b \to \C$, where $\closedint a b$ is a [[Definition:Closed Real Interval|closed real interval]].
+Denote the [[Definition:Parameterization of Directed Smooth Curve (Complex Plane)|parameterization]] of $C_k$ as $\gamma_k: \closedint {a_k} {b_k \to \C$.
+=== [[Zero Simple Staircase Integral Condition for Primitive/Lemma|Lemma]] ===
+{{:Zero Simple Staircase Integral Condition for Primitive/Lemma}}
+=== Splitting up the Contour ===
+The lemma shows that given a [[Definition:Staircase Contour|staircase contour]] $C$, we can assume that for $k \in \set {1, \ldots, n - 1}$, the [[Definition:Set Intersection|intersection]] of the [[Definition:Image of Contour (Complex Plane)|images]] of $C_k$ and $C_{k + 1}$ is equal to their common [[Definition:End Point of Contour (Complex Plane)|end point]] $\map {\gamma_k} {b_k}$.
+This means that in order to intersect itself, $C$ must be a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of at least $4$ [[Definition:Directed Smooth Curve (Complex Plane)|directed smooth curves]].
+Now, we prove the main requirement for [[Zero Staircase Integral Condition for Primitive]], that $\displaystyle \oint_C \map f z \rd z = 0$.
+The proof is by [[Principle of Mathematical Induction|induction]] over $n \in \N$, the number of [[Definition:Directed Smooth Curve|directed smooth curves]] that $C$ is a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of.
+=== Basis for the Induction ===
+For $n = 1$, $C$ can only be a [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contour]] if $\gamma$ is [[Definition:Constant Mapping|constant]], so:
+{{begin-eqn}}
+{{eqn | l = \oint_C \map f z \rd z
+ | r = \int_a^b \map f {\map \gamma t} \map {\gamma'} t \rd t
+ | c = {{Defof|Complex Contour Integral}}
+}}
+{{eqn | r = 0
+ | c = [[Derivative of Complex Polynomial]]: $\gamma$ is constant
+}}
+{{end-eqn}}
+For $n = 4$, $C$ can only be a [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contour]] if $C$ is a [[Definition:Simple Contour (Complex Plane)|simple contour]].
+Then, $\displaystyle \oint_C \map f z \rd z = 0$ by the original assumption of this theorem.
+=== Induction Hypothesis ===
+For $N \in \N$, if $C$ is a [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contour]] that is a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of $n$ [[Definition:Directed Smooth Curve (Complex Plane)|directed smooth curves]] with $n \le N$, then:
+:$\displaystyle \oint_C \map f z \rd z = 0$
+=== Induction Step ===
+Suppose that $C$ is a [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contour]] that is a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of $n + 1$ [[Definition:Directed Smooth Curve (Complex Plane)|directed smooth curves]].
+If $C$ is a [[Definition:Simple Contour (Complex Plane)|simple contour]], the [[Definition:Induction Hypothesis|induction hypothesis]] is true by the original assumption of this theorem.
+Otherwise, define $t_0 = a$, and $t_3 = b$.
+Define $t_1 \in \closedint a b$ as the [[Definition:Infimum of Set|infimum]] of all $t \in \closedint a b$ for which $\gamma$ intersects itself.
+Then define $t_2 \in \hointl {t_1} b$ as the [[Definition:Infimum of Set|infimum]] of all $t \in \hointl {t_1} b$ for which $\map \gamma t = \map \gamma {t_1}$.
+For $k \in \set {1, \ldots, 3}$, define $\tilde C_k$ as the [[Definition:Staircase Contour|staircase contour]] with [[Definition:Parameterization of Contour (Complex Plane)|parameterization]] $\gamma \restriction {\closedint {t_{k - 1} } {t_k} }$.
+Then $\tilde C_2$ is a [[Definition:Closed Contour (Complex Plane)|closed]] [[Definition:Staircase Contour|staircase contour]] that is a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of at least $4$ [[Definition:Directed Smooth Curve (Complex Plane)|directed smooth curves]].
+Then both $\tilde C_1 \cup \tilde C_3$ and $\tilde C_2$ are a [[Definition:Concatenation of Contours (Complex Plane)|concatenation]] of fewer than $n + 1$ [[Definition:Directed Smooth Curve (Complex Plane)|directed smooth curves]], so:
+{{begin-eqn}}
+{{eqn | l = \oint_C \map f z \rd z
+ | r = \oint_{\tilde C_1 \cup \tilde C_2 \cup \tilde C_3} \map f z \rd z
+}}
+{{eqn | r = \oint_{\tilde C_1 \cup \tilde C_3} \map f z \rd z + \oint_{\tilde C_2} \map f z \rd z
+ | c = [[Contour Integral of Concatenation of Contours]]
+}}
+{{eqn | r = 0
+ | c = [[Zero Simple Staircase Integral Condition for Primitive#Induction Hypothesis|Induction Hypothesis]]
+}}
+{{end-eqn}}
+{{qed}}
+[[Category:Complex Analysis]]
+d9syvp4ve618ilyjlzt9dae82wmnqfj
+\end{proof}<|endoftext|>
+\section{Preordering induces Equivalence Relation}
+Tags: Preorder Theory, Equivalence Relations
+
+\begin{theorem}
+Let $\struct {S, \precsim}$ be a [[Definition:Preordered Set|preordered set]].
+Define a [[Definition:Endorelation|relation]] $\sim$ on $S$ by letting $x \sim y$ {{iff}} $x \precsim y$ and $y \precsim x$.
+Then $\sim$ is an [[Definition:Equivalence Relation|equivalence relation]].
+\end{theorem}
+
+\begin{proof}
+To show that $\sim$ is an [[Definition:Equivalence Relation|equivalence relation]], we must show that it is [[Definition:Reflexive Relation|reflexive]], [[Definition:Transitive Relation|transitive]], and [[Definition:Symmetric Relation|symmetric]].
+By the definition of [[Definition:Preordering|preordering]], $\precsim$ is [[Definition:Transitive Relation|transitive]] and [[Definition:Reflexive Relation|reflexive]].
+=== Transitive ===
+Let $p, q, r \in S$.
+Suppose that $p \sim q$ and $q \sim r$.
+Then $p \precsim q$, $q \precsim r$, $r \precsim q$, and $q \precsim p$.
+Since $\precsim$ is [[Definition:Transitive Relation|transitive]]:
+:$p \precsim r$ and $r \precsim p$.
+Thus by the definition of $\sim$, $p \sim r$.
+Since this holds for all $p$, $q$, and $r$, $\sim$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+=== Reflexive ===
+Let $p \in S$.
+Since $\precsim$ is [[Definition:Reflexive Relation|reflexive]]:
+: $p \precsim p$
+Thus by the definition of $\sim$:
+: $p \sim p$
+As this holds for all $p$, $\sim$ is [[Definition:Reflexive Relation|reflexive]].
+{{qed|lemma}}
+=== Symmetric ===
+Let $p, q \in S$ with $p \sim q$.
+Then $p \precsim q$ and $q \precsim p$.
+Thus $q \sim p$.
+Since this holds for all $p$ and $q$, $\sim$ is [[Definition:Symmetric Relation|symmetric]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Content of Rational Polynomial is Multiplicative}
+Tags: Gauss's Lemma (Polynomial Theory), Content of Polynomial, Content of Rational Polynomial is Multiplicative
+
+\begin{theorem}
+Let $h \in \Q \sqbrk X$ be a [[Definition:Polynomial over Ring in One Variable|polynomial]] with [[Definition:Rational Number|rational]] [[Definition:Polynomial Coefficient|coefficients]].
+Let $\cont h$ denote the [[Definition:Content of Rational Polynomial|content]] of $h$.
+Then for any polynomials $f, g \in \Q \sqbrk X$ with [[Definition:Rational Number|rational]] [[Definition:Polynomial Coefficient|coefficients]]:
+:$\cont {f g} = \cont f \cont g$
+\end{theorem}
+
+\begin{proof}
+From [[Rational Polynomial is Content Times Primitive Polynomial]], let $\map f X$ and $\map g X$ be expressed as:
+:$\map f X = \cont f \cdot \map {f^*} X$
+:$\map g X = \cont g \cdot \map {g^*} X$
+where:
+:$\cont f, \cont g$ are the [[Definition:Content of Rational Polynomial|content]] of $f$ and $g$ respectively
+:$f^*, g^*$ are [[Definition:Primitive Polynomial (Ring Theory)|primitive]].
+We have, by applications of [[Rational Polynomial is Content Times Primitive Polynomial]]:
+:$\cont h \cdot \map {h^*} X = \cont f \cont g \cdot \map {f^*} X \, \map {g^*} X$
+By [[Gauss's Lemma on Primitive Rational Polynomials]] we have that $\map {f^*} X \, \map {g^*} X$ is [[Definition:Primitive Polynomial (Ring Theory)|primitive]].
+As $\cont f > 0$ and $\cont g > 0$, then so is $\cont f \cont g > 0$.
+By the [[Rational Polynomial is Content Times Primitive Polynomial/Uniqueness|uniqueness clause in Rational Polynomial is Content Times Primitive Polynomial]], the result follows.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Gauss's Lemma on Irreducible Polynomials}
+Tags: Gauss's Lemma (Polynomial Theory)
+
+\begin{theorem}
+Let $\Z$ be the [[Definition:Ring of Integers|ring of integers]].
+Let $\Z \sqbrk X$ be the [[Definition:Ring of Polynomials|ring of polynomials]] over $\Z$.
+Let $h \in \Z \sqbrk X$ be a [[Definition:Polynomial over Ring|polynomial]].
+{{TFAE}}
+:$(1): \quad h$ is [[Definition:Irreducible Polynomial|irreducible]] in $\Q \sqbrk X$ and [[Definition:Primitive Polynomial over Integers|primitive]]
+:$(2): \quad h$ is [[Definition:Irreducible Polynomial|irreducible]] in $\Z \sqbrk X$.
+\end{theorem}
+
+\begin{proof}
+{{explain|Needs to be made explicit as to exactly what is being assumed and what follows as a consequence. As it stands, the consequence of 1 implies 2, for example, still needs to be completed so as to explain exactly how that show $h$ is irreducible -- it relies upon the implicit understanding of what irreducible means.}}
+=== 1 implies 2 ===
+Suppose first that $h$ is not [[Definition:Irreducible Polynomial|irreducible]] in $\Z \sqbrk X$.
+Let $\displaystyle h = a_0 + a_1 X + \cdots + a_n X^n$.
+If $\deg h = 0$, then the [[Definition:Content of Integer Polynomial|content]] of $h$ is:
+:$\cont h = \gcd \set {a_0} = \size {a_0}$
+Since $h$ is [[Definition:Primitive Polynomial (Ring Theory)|primitive]] by hypothesis, we have $h = \pm 1$.
+Now by [[Units of Ring of Polynomial Forms over Field]], the [[Definition:Unit of Ring|units]] of $\Q \sqbrk X$ are the [[Definition:Unit of Ring|units]] of $\Q$.
+Thus $h$ is a [[Definition:Unit of Ring|unit]] of $\Q \sqbrk X$.
+Therefore $h$ is [[Definition:Irreducible Polynomial|irreducible]].
+If $\deg h \ge 1$, then by [[Units of Ring of Polynomial Forms over Integral Domain]], the [[Definition:Unit of Ring|units]] of $\Z \sqbrk X$ are the [[Definition:Unit of Ring|units]] of $\Z$.
+Therefore $h$ is not a [[Definition:Unit of Ring|unit]] of $\Z \sqbrk X$.
+Thus since $h$ is reducible, there is a [[Definition:Non-Trivial Factorization|non-trivial factorization]] $h = f g$ in $\Z \sqbrk X$, with $f$ and $g$ both not [[Definition:Unit of Ring|units]].
+If $\deg f = 0$, that is, $f \in \Z$, then $f$ divides each coefficient of $h$.
+Since $h$ is primitive, this means that $f$ divides $\cont h = 1$.
+But the [[Divisors of One|divisors of $1$ are $\pm 1$]], so $f = \pm 1$.
+But then $f$ is a unit in $\Z \sqbrk X$, a contradiction.
+Therefore $\deg f \ge 1$, so $f$ is a non-unit in $\Q \sqbrk X$.
+Similarly, $g$ is a non-unit in $\Q \sqbrk X$.
+Therefore $h = fg$ is a [[Definition:Non-Trivial Factorization|non-trivial factorization]] in $\Q \sqbrk X$.
+=== 2 implies 1 ===
+{{tidy|ok in terms of details, but presentation needs touching up}}
+Suppose now that $h$ is not [[Definition:Irreducible Polynomial|irreducible]] in $\Q \sqbrk X$.
+That is, $h$ has a [[Definition:Non-Trivial Factorization|non-trivial factorization]] in $\Q \sqbrk X$.
+Since the [[Units of Ring of Polynomial Forms over Field|units of $\Q \sqbrk X$ are the units of $\Q$]], this means that $h = f g$, with $f$ and $g$ both of [[Definition:Positive Integer|positive]] [[Definition:Degree of Polynomial|degree]].
+Let $c_f$ and $c_g$ be the [[Definition:Content of Integer Polynomial|contents]] of $f$ and $g$ respectively.
+Define $\tilde f = c_f^{-1} f$ and $\tilde g = c_g^{-1} g$.
+By [[Content of Scalar Multiple]], it follows that $\cont {\tilde f} = \cont {\tilde g} = 1$.
+Moreover by [[Polynomial has Integer Coefficients iff Content is Integer]] we have $\tilde f, \tilde g \in \Z \sqbrk X$.
+Now we have:
+:$\tilde f \tilde g = \dfrac {f g} {c_f c_g} = \dfrac h {c_f c_g}$
+Taking the content, and using [[Content of Scalar Multiple]] we have:
+:$\cont {\tilde f \tilde g} = \dfrac 1 {c_f c_g} \cont h$
+By [[Gauss's Lemma on Primitive Rational Polynomials]] we know that $\cont {\tilde f \tilde g} = 1$.
+Moreover, by [[Irreducible Integer Polynomial is Primitive]], $\cont h = 1$.
+Therefore we must have $c_f c_g = 1$.
+Thus we have a factorization in $\Z \sqbrk X$:
+:$\tilde f \tilde g = h$
+This is a [[Definition:Trivial Factorization|non-trivial factorization]] of $h$, as both $f$ and $g$ have [[Definition:Positive Integer|positive]] [[Definition:Degree of Polynomial|degree]].
+Thus $h$ is not [[Definition:Irreducible Polynomial|irreducible]] in $\Z \sqbrk X$.
+{{Qed}}
+{{Namedfor|Carl Friedrich Gauss|cat = Gauss}}
+[[Category:Gauss's Lemma (Polynomial Theory)]]
+kbf6czd3gif8u3rldfr8pstovy16j0i
+\end{proof}<|endoftext|>
+\section{Antisymmetric Quotient of Preordered Set is Ordered Set}
+Tags: Order Theory, Preorder Theory, Quotient Sets
+
+\begin{theorem}
+Let $\struct {S, \precsim}$ be a [[Definition:Preordered Set|preordered set]].
+Let $\sim$ be the [[Definition:Equivalence Relation|equivalence relation]] on $S$ [[Preordering induces Equivalence Relation|induced]] by $\precsim$.
+Let $\struct {S / {\sim}, \preceq}$ be the [[Definition:Antisymmetric Quotient|antisymmetric quotient]] of $\struct {S, \precsim}$.
+Then:
+:$\struct {S / {\sim}, \preceq}$ is an [[Definition:Ordered Set|ordered set]].
+:$\forall P, Q \in S / {\sim}: \paren {P \preceq Q} \land \paren {p \in P} \land \paren {q \in Q} \implies p \precsim q$
+This second statement means that we could just as well have defined $\preceq$ by letting $P \preceq Q$ iff:
+:$\forall p \in P: \forall q \in Q: p \precsim q$
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Equivalence Relation|equivalence relation]], $\sim$ is [[Definition:Transitive Relation|transitive]], [[Definition:Reflexive Relation|reflexive]], and [[Definition:Symmetric Relation|symmetric]].
+By the definition of [[Definition:preordering]], $\precsim$ is [[Definition:Transitive Relation|transitive]] and [[Definition:Reflexive Relation|reflexive]].
+To show that $\preceq$ is an [[Definition:Ordering|ordering]], we must show that it is [[Definition:Transitive Relation|transitive]], [[Definition:Reflexive Relation|reflexive]], and [[Definition:Antisymmetric Relation|antisymmetric]].
+=== Transitive ===
+Let $P, Q, R \in S / {\sim}$.
+Suppose that $P \preceq Q$ and $Q \preceq R$.
+Then for some $p \in P$, $q_1, q2 \in Q$, and $r \in R$:
+:$p \precsim q_1$ and $q_2 \precsim r$.
+By the definition of [[Definition:Quotient Set|quotient set]], $q_1 \sim q_2$.
+By the [[Preordering induces Equivalence Relation|definition]] of $\sim$:
+:$q_1 \precsim q_2$
+Since $p \precsim q_1$, $q_1 \precsim q_2$, $q_2 \precsim r$, and $\precsim$ is [[Definition:Transitive Relation|transitive]], [[Transitive Chaining]] shows that:
+:$p \precsim r$
+Thus by the definition of $\preceq$:
+:$P \preceq R$.
+Since this holds for all such $P$, $Q$, and $R$, $\preceq$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+=== Reflexive ===
+Let $P \in S / {\sim}$.
+By the definition of [[Definition:Quotient Set|quotient set]], $P$ is [[Definition:Non-Empty Set|non-empty]].
+Thus there exists a $p \in P$.
+Since $\precsim$ is a [[Definition:Preordering|preordering]], it is [[Definition:Reflexive Relation|reflexive]], so $p \precsim p$.
+By definition of the [[Definition:Equivalence Relation|equivalence relation]], we have that $q\sim p$ for any other $q\in P$
+This gives that:
+:$q \sim p \precsim p$
+which is equivalent to $q \precsim p \precsim p$ by definition of the [[Definition:Equivalence Relation|equivalence relation]].
+It also gives that $p \precsim p \precsim q$ by similar reasoning.
+Both by [[Definition:Transitive Relation|transitivity]] gives $p\precsim q$ and $q\precsim p$.
+This gives us that $P \preceq P$
+As this holds for all $P \in S / {\sim}$, $\preceq$ is [[Definition:Reflexive Relation|reflexive]].
+{{qed|lemma}}
+=== Antisymmetric ===
+Let $P, Q \in S / {\sim}$ such that:
+:$P \preceq Q$
+:$Q \preceq P$
+By the definition of $\preceq$, there are elements $p_1, p_2 \in P$ and $q_1, q_2 \in Q$ such that:
+:$(1)\quad p_1 \precsim q_1$
+:$(2)\quad q_2 \precsim p_2$
+Let $p \in P$.
+Then by the definition of [[Definition:Quotient Set|quotient set]]:
+:$p \sim p_1$
+:$p_2 \sim p$
+By the definition of $\sim$:
+:$p \precsim p_1$
+:$p_2 \precsim p$
+Thus by $(1)$ and $(2)$ and the fact that $\precsim$ is [[Definition:Transitive Relation|transitive]]:
+:$p \precsim q_1$
+:$q_2 \precsim p$
+By the definition of [[Definition:Quotient Set|quotient set]]:
+:$q_1 \sim q_2$
+Thus by the definition of $\sim$:
+:$q_1 \precsim q_2$
+Since $\precsim$ is [[Definition:Transitive Relation|transitive]]:
+:$p \precsim q_2$
+We already know that:
+:$q_2 \precsim p$
+Thus $p \sim q_2$.
+By the definition of [[Definition:Quotient Set|quotient set]]:
+:$p \in Q$
+The same argument shows that each element of $Q$ is also in $P$.
+Thus by the [[Axiom:Axiom of Extension|Axiom of Extension]]:
+:$P = Q$
+As this holds for all such $P, Q \in S / {\sim}$, $\preceq$ is [[Definition:Antisymmetric Relation|antisymmetric]].
+{{qed|lemma}}
+=== Relation between Sets implies all their Elements are Related ===
+Let $P, Q \in S / {\sim}$ with $P \preceq Q$.
+Then by the definition of $\preceq$, there are $p \in P$ and $q \in Q$ such that $p \precsim q$.
+Let $p' \in P$ and $q' \in Q$.
+By the definition of [[Definition:Quotient Set|quotient set]]:
+:$p' \sim p$
+:$q \sim q'$
+Thus by the definition of $\sim$:
+:$p' \precsim p$
+:$q \precsim q'$
+Since $p \precsim q$ and $\precsim$ is [[Definition:Transitive Relation|transitive]]:
+:$p' \precsim q'$
+We have shown that:
+:$\forall P, Q \in S / {\sim}: \paren {P \preceq Q} \land \paren {p \in P} \land \paren {q \in Q} \implies p \precsim q$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ordering on Partition Determines Preordering}
+Tags: Order Theory, Preorder Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\PP$ be a [[Definition:Partition (Set Theory)|partition]] of $S$.
+Let $\phi: S \to \PP$ be the [[Definition:Quotient Mapping|quotient mapping]].
+Let $\preceq$ be a [[Definition:ordering|ordering]] of $\PP$.
+Define a [[Definition:Endorelation|relation]] $\precsim$ on $S$ by letting $p \precsim q$ {{iff}}:
+:$\map \phi p \preceq \map \phi q$
+Then:
+:$\precsim$ is a [[Definition:Preordering|preordering]] on $S$.
+:$\precsim$ is the only preordering on $S$ that [[Antisymmetric Quotient of Preordered Set is Ordered Set|induces]] the $\preceq$ ordering on $\PP$.
+\end{theorem}
+
+\begin{proof}
+To show that $\precsim$ is a [[Definition:preordering|preordering]] we must show that it is [[Definition:Reflexive Relation|reflexive]] and [[Definition:Transitive Relation|transitive]].
+=== Reflexive ===
+Let $p \in S$.
+Then $\map \phi p = \map \phi p$.
+Since $\preceq$ is an [[Definition:ordering|ordering]] it is [[Definition:Reflexive Relation|reflexive]].
+Thus $\map \phi p \preceq \map \phi p$.
+By the definition of $\precsim$, $p \precsim p$.
+As this holds for all $p \in S$, $\precsim$ is reflexive.
+{{qed|lemma}}
+=== Transitive ===
+Let $p, q, r \in S$.
+Suppose that $p \precsim q$ and $q \precsim r$.
+By the definition of $\precsim$:
+:$\map \phi p \preceq \map \phi q$
+:$\map \phi q \preceq \map \phi r$
+Since $\preceq$ is an [[Definition:ordering|ordering]], it is [[Definition:Transitive Relation|transitive]], so:
+:$\map \phi p \preceq \map \phi r$
+Thus by the definition of $\precsim$, $p \precsim r$.
+As this holds for all such $p$, $q$, and $r$, $\precsim$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+=== $\precsim$ induces the $\preceq$ ordering on $\PP$ ===
+Let $P, Q \in P$.
+First suppose that $P \preceq Q$.
+Let $p \in P$ and let $q \in Q$.
+By the definition of [[Definition:Quotient Mapping|quotient mapping]]:
+:$\map \phi p = P$ and $\map \phi q = Q$.
+Thus $\map \phi p \preceq \map \phi q$.
+So by the definition of $\precsim$:
+:$p \precsim q$
+Suppose instead that for some $p \in P$ and $q \in Q$, $p \precsim q$.
+Then by the definition of $\precsim$:
+:$\map \phi p \preceq \map \phi q$
+By the definition of [[Definition:Quotient Mapping|quotient mapping]]:
+:$\map \phi p = P$ and $\map \phi q = Q$.
+Thus $P \preceq Q$.
+{{qed|lemma}}
+=== $\struct {\PP, \preceq}$ is the Antisymmetric Quotient of $\struct {S, \precsim}$ ===
+Let $\sim$ be the [[Definition:Equivalence Relation|equivalence relation]] on $S$ [[Preordering induces Equivalence Relation|induced]] by $\precsim$.
+First we show that $\PP = S / {\sim}$.
+As both $\PP$ and $S/ {\sim}$ are [[Definition:Partition (Set Theory)|partitions]] of $S$, we need only show that for $p, q \in S$:
+:$\map \phi p = \map \phi q \iff p \sim q$
+First suppose that $p \sim q$.
+By the definition of $\sim$:
+:$p \precsim q$ and $q \precsim p$.
+Then by the definition of $\precsim$:
+:$\map \phi p \preceq \map \phi q$
+:$\map \phi q \preceq \map \phi p$
+Since $\preceq$ is an [[Definition:ordering|ordering]], and hence [[Definition:Antisymmetric Relation|antisymmetric]]:
+:$\map \phi p = \map \phi q$
+Suppose instead that $\map \phi p = \map \phi q$.
+Since $\preceq$ is [[Definition:Reflexive Relation|reflexive]]:
+:$\map \phi p \preceq \map \phi q$
+:$\map \phi q \preceq \map \phi p$
+By the definition of $\precsim$:
+:$p \precsim q$ and $q \precsim p$.
+By the definition of $\sim$:
+:$p \sim q$
+Now we must show that for $P, Q \in \PP$:
+:$P \preceq Q \iff \exists p \in P: \exists q \in Q: p \precsim q$
+Suppose that $P \preceq Q$.
+Let $p \in P$ and $q \in Q$.
+Then $\map \phi p = P$ and $\map \phi q = Q$, so $p \precsim q$ by the definition of $\precsim$.
+Suppose instead that $\exists p \in P: \exists q \in Q: p \precsim q$.
+Then by the definition of $\precsim$, $P \preceq Q$.
+{{qed|lemma}}
+=== $\precsim$ is Unique ===
+Let $\precsim'$ be a [[Definition:Preordering|preordering]] such that the [[Definition:Antisymmetric Quotient|antisymmetric quotient]] of $\struct {S, \precsim'}$ is $\struct {P, \preceq}$.
+Let $p, q \in S$.
+First suppose that $p \precsim' q$.
+Then $\map \phi p \preceq \map \phi q$ by the definition of antisymmetric quotient.
+Thus $p \precsim q$ by the definition of $\precsim$.
+Suppose instead that $p \precsim q$.
+Then $\map \phi p \preceq \map \phi q$ by the definition of $\precsim$.
+Thus $p \precsim' q$ by the definition of antisymmetric quotient.
+{{qed}}
+[[Category:Order Theory]]
+[[Category:Preorder Theory]]
+sxc05rmv5q06ns1feh0ltljtnph65pw
+\end{proof}<|endoftext|>
+\section{Units of Ring of Polynomial Forms over Commutative Ring}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a non-[[Definition:Null Ring|null]] [[Definition:Commutative Ring with Unity|commutative ring with unity]] whose [[Definition:Ring Zero|zero]] is $0_R$ and whose [[Definition:Unity of Ring|unity]] is $1_R$.
+Let $R \sqbrk X$ be the [[Definition:Ring of Polynomial Forms|ring of polynomial forms]] in an [[Definition:Indeterminate (Polynomial Theory)|indeterminate]] $X$ over $R$.
+Let $\map P X = a_0 + a_1 X + \cdots + a_n X^n \in R \sqbrk X$.
+Then:
+:$\map P X$ is a [[Definition:Unit of Ring|unit]] of $R \sqbrk X$
+{{iff}}:
+:$a_0$ is a [[Definition:Unit of Ring|unit]] of $R$
+Also, for $i = 1, \ldots, n$, $a_i$ is [[Definition:Nilpotent Ring Element|nilpotent]] in $R$.
+\end{theorem}
+
+\begin{proof}
+=== Necessary condition ===
+Let $a_0$ be a [[Definition:Unit of Ring|unit]] of $R$.
+For $i = 1, \ldots, n$, let $a_i$ be [[Definition:Nilpotent Ring Element|nilpotent]] in $R$.
+Because the [[Definition:Nilradical of Ring|nilradical]] is an [[Definition:Ideal of Ring|ideal]] of $R$, it follows that:
+:$Q = -a_1 X + \dotsb + -a_n X^n$
+is [[Definition:Nilpotent Ring Element|nilpotent]].
+Moreover, multiplying through by $a_0^{-1}$ we may as well assume that $a_0 = 1_R$.
+Then:
+:$P = 1_R - Q$
+and from [[Unity plus Negative of Nilpotent Ring Element is Unit]] $P$ is a [[Definition:Unit of Ring|unit]] of $R \sqbrk X$.
+{{qed|lemma}}
+=== Sufficient condition ===
+Let $\map P X$ be a [[Definition:Unit of Ring|unit]] of $R \sqbrk X$.
+That is, there exists:
+:$Q = b_0 + b_1 X + \dotsb + b_m X^m \in R \sqbrk X$
+such that $P Q = 1$.
+By the definition of [[Definition:Multiplication of Polynomial Forms|polynomial multiplication]] the [[Definition:Polynomial of Degree Zero|degree zero]] term of $P Q$ is $a_0 b_0$.
+Therefore $a_0 b_0 = 1_R$.
+So $a_0$ is a [[Definition:Unit of Ring|unit]] of $R$.
+{{qed|lemma}}
+Next we show that $a_1, \dotsc, a_n$ are [[Definition:Nilpotent Ring Element|nilpotent]].
+By [[Spectrum of Ring is Nonempty]], $R$ has at least one [[Definition:Prime Ideal of Ring|prime ideal]] $\mathfrak p$.
+By [[Prime Ideal iff Quotient Ring is Integral Domain]]:
+:$R / \mathfrak p$ is an [[Definition:Integral Domain|integral domain]].
+By [[Ring of Polynomial Forms over Integral Domain is Integral Domain]]:
+:$R / \mathfrak p \sqbrk X$ is also an [[Definition:Integral Domain|integral domain]].
+For any polynomial $T \in R \sqbrk X$ let $\overline T$ denote the image of $T$ under the [[Induced Homomorphism of Polynomial Forms]] defined by the [[Definition:Quotient Mapping|quotient mapping]] $R \to R / \mathfrak p$.
+Now we have:
+:$\overline P \cdot \overline Q = 1_{R / \mathfrak p}$
+By [[Units of Ring of Polynomial Forms over Integral Domain]] this implies that $\overline P$ has [[Definition:Polynomial of Degree Zero|degree zero]].
+In particular for $i = 1, \dotsc, n$, the image of $a_i$ in $R / \mathfrak p$ is $0_{R / \mathfrak p}$.
+By definition, this means that $a_i \in \mathfrak p$.
+But this is true for every [[Definition:Prime Ideal of Ring|prime ideal]] $\mathfrak p$.
+Thus by definition:
+:$a_i \in \Nil R$
+where $\Nil R$ denotes the [[Definition:Nilradical of Ring|nilradical]] of $R$.
+{{Qed}}
+[[Category:Polynomial Theory]]
+aewy1zp3boybd5gid8q2bag3v9j2elu
+\end{proof}<|endoftext|>
+\section{Polynomials Closed under Addition/Polynomials over Integral Domain}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $\left({R, +, \circ}\right)$ be a [[Definition:Commutative Ring with Unity|commutative ring with unity]].
+Let $\left({D, +, \circ}\right)$ be an [[Definition:Subdomain|integral subdomain]] of $R$.
+Then $\forall x \in R$, the set $D \left[{x}\right]$ of [[Definition:Polynomial over Integral Domain|polynomials in $x$ over $D$]] is [[Definition:Closed Algebraic Structure|closed]] under the operation $+$.
+\end{theorem}<|endoftext|>
+\section{Polynomials Closed under Addition/Polynomials over Ring}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $\left({R, +, \circ}\right)$ be a [[Definition:Ring (Abstract Algebra)|ring]].
+Let $\left({S, +, \circ}\right)$ be a [[Definition:Subring|subring]] of $R$.
+Then $\forall x \in R$, the set $S \left[{x}\right]$ of [[Definition:Polynomial over Ring|polynomials in $x$ over $S$]] is [[Definition:Closed Algebraic Structure|closed]] under the operation $+$.
+\end{theorem}
+
+\begin{proof}
+Let $p, q$ be [[Definition:Polynomial over Ring|polynomials in $x$ over $S$]].
+We can express them as:
+: $\displaystyle p = \sum_{k \mathop = 0}^m a_k \circ x^k$
+: $\displaystyle q = \sum_{k \mathop = 0}^n b_k \circ x^k$
+where:
+: $(1): \quad a_k, b_k \in S$ for all $k$
+: $(2): \quad m, n \in \Z_{\ge 0}$, that is, are [[Definition:Non-Negative Integer|non-negative integers]].
+Suppose $m = n$.
+Then:
+: $\displaystyle p + q = \sum_{k \mathop = 0}^n a_k \circ x^k + \sum_{k \mathop = 0}^n b_k \circ x^k$
+Because $\left({R, +, \circ}\right)$ is a [[Definition:Ring (Abstract Algebra)|ring]], it follows that:
+: $\displaystyle p + q = \sum_{k \mathop = 0}^n \left({a_k + b_k}\right) \circ x^k$
+which is also a polynomial in $x$ over $S$.
+{{handwaving|What needs to be done here is to establish that each of the terms is an element of $R$ and that each of the coefficients is an element of $S$. Trivial, but important.}}
+Now suppose [[Definition:WLOG|WLOG]] that $m > n$.
+Then we can express $q$ as:
+: $\displaystyle \sum_{k \mathop = 0}^n b_k \circ x^k + \sum_{k \mathop = n \mathop + 1}^m 0_D \circ x^k$
+Thus:
+: $\displaystyle p + q = \sum_{k \mathop = 0}^n \left({a_k + b_k}\right) \circ x^k + \sum_{k \mathop = n \mathop + 1}^m a_k \circ x^k$
+which is also a polynomial in $x$ over $S$.
+Thus the sum of two [[Definition:Polynomial over Ring|polynomials in $x$ over $S$]] is another [[Definition:Polynomial over Ring|polynomial in $x$ over $S$]].
+Hence the result.
+{{qed}}
+[[Category:Polynomial Theory]]
+i52w3woxyh57xvvzudjjhg7qfzd85ml
+\end{proof}<|endoftext|>
+\section{Polynomials Closed under Addition/Polynomial Forms}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let:
+: $\displaystyle f = \sum_{k \mathop \in Z} a_k \mathbf X^k$
+: $\displaystyle g = \sum_{k \mathop \in Z} b_k \mathbf X^k$
+be [[Definition:Polynomial Form|polynomials]] in the [[Definition:Indeterminate (Polynomial Theory)|indeterminates]] $\left\{{X_j: j \in J}\right\}$ over the [[Definition:Ring (Abstract Algebra)|ring]] $R$.
+Then the operation of [[Definition:Addition of Polynomial Forms|polynomial addition]] on $f$ and $g$:
+Define the sum:
+:$\displaystyle f \oplus g = \sum_{k \mathop \in Z} \left({a_k + b_k}\right) \mathbf X^k$
+Then $f \oplus g$ is a [[Definition:Polynomial Form|polynomial]].
+That is, the [[Definition:Binary Operation|operation]] of [[Definition:Addition of Polynomial Forms|polynomial addition]] is [[Definition:Closed Operation|closed]] on the set of all [[Definition:Polynomial Form|polynomials]] on a given set of [[Definition:Indeterminate (Polynomial Theory)|indeterminates]] $\left\{{X_j: j \in J}\right\}$.
+\end{theorem}
+
+\begin{proof}
+It is immediate that $f \oplus g$ is a map from the [[Definition:Free Commutative Monoid|free commutative monoid]] to $R$, so we need only prove that $f \oplus g$ is nonzero on finitely many $\mathbf X^k$, $k \in Z$.
+Suppose that for some $k \in Z$, $a_k + b_k \ne 0$
+This forces at least one of $a_k$ and $b_k$ to be non-zero.
+This can only be true for a [[Definition:Finite|finite number]] of [[Definition:Term of Polynomial|terms]] because $f$ and $g$ are [[Definition:Polynomial Form|polynomials]].
+The result follows.
+{{qed}}
+[[Category:Polynomial Theory]]
+{{proofread}}
+meubepouwyzytdegniyqmpwk5ne5cvx
+\end{proof}<|endoftext|>
+\section{Fuzzy Intersection is Commutative}
+Tags: Fuzzy Set Theory
+
+\begin{theorem}
+[[Definition:Fuzzy Intersection|Fuzzy intersection]] is [[Definition:Commutative Operation|commutative]].
+\end{theorem}
+
+\begin{proof}
+Let $\textbf A = \left({A, \mu_A}\right)$ and $\textbf B = \left({B, \mu_B}\right)$ be [[Definition:Fuzzy Set|fuzzy sets]].
+=== Proving Domain Equality ===
+By the definition of [[Definition:Fuzzy Intersection|fuzzy intersection]] the [[Definition:Fuzzy Set/Domain|domain]] of $\textbf A \cap \textbf B$ is:
+:$A \cap B$
+Similarly the domain of $\textbf B \cap \textbf A$ is:
+:$B \cap A$
+By [[Intersection is Commutative]]:
+:$A \cap B = B \cap A$
+Hence their domains are equal.
+{{qed|lemma}}
+=== Proving Membership Function Equality ===
+==== Proving Form Equality ====
+By the definition of [[Definition:Fuzzy Intersection|fuzzy intersection]] the [[Definition:Fuzzy Set/Membership Function|membership function]] of $\textbf A \cap \textbf B$ is of the form:
+:$\mu:A \cap B \to \left [{0 \,.\,.\, 1}\right]$
+Similarly, the [[Definition:Fuzzy Set/Membership Function|membership function]] of $\textbf B \cap \textbf A$ is of the form:
+:$\mu:B \cap A \to \left [{0 \,.\,.\, 1}\right]$
+By [[Intersection is Commutative]] this is the same as:
+:$\mu:A \cap B \to \left [{0 \,.\,.\, 1}\right]$
+Hence the [[Definition:Fuzzy Set/Membership Function|membership functions]] are of the same form.
+==== Proving Rule Equality ====
+{{begin-eqn}}
+{{eqn | l=\forall x \in A \cap B: \mu_{A \cap B}(x) = \operatorname{min}\left({\mu_A(x), \mu_B(x)}\right)
+ | o=\iff
+ | r=\forall x \in B \cap A: \mu_{B \cap A}(x) = \operatorname{min}\left({\mu_A(x), \mu_B(x)}\right)
+ | c=[[Intersection is Commutative]]
+}}
+{{eqn | r=\forall x \in B \cap A: \mu_{B \cap A}(x) = \operatorname{min}\left({\mu_B(x), \mu_A(x)}\right)
+ | o=\iff
+ | c=[[Max and Min are Commutative|Min Operation is Commutative]]
+}}
+{{end-eqn}}
+Hence the [[Definition:Fuzzy Set/Membership Function|membership functions]] have the same rule.
+{{qed}}
+[[Category:Fuzzy Set Theory]]
+s2aicbj8uixr4w0uhz7a9168hepj7x5
+\end{proof}<|endoftext|>
+\section{Nilpotent Element is Zero Divisor}
+Tags: Nilpotent Ring Elements, Zero Divisors
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Ring (Abstract Algebra)|ring]] whose [[Definition:Ring Zero|zero]] is $0_R$.
+Suppose further that $R$ is not the [[Definition:Null Ring|null ring]].
+Let $x \in R$ be a [[Definition:Nilpotent Ring Element|nilpotent element]] of $R$.
+Then $x$ is a [[Definition:Zero Divisor of Ring|zero divisor]] in $R$.
+\end{theorem}
+
+\begin{proof}
+First note that when $R$ is the [[Definition:Null Ring|null ring]] the result is false.
+This is because although $0_R$ is [[Definition:Nilpotent Ring Element|nilpotent element]] in the [[Definition:Null Ring|null ring]], it is not actually a [[Definition:Zero Divisor of Ring|zero divisor]].
+Hence in this case $0_R$ is both [[Definition:Nilpotent Ring Element|nilpotent]] and a [[Definition:Zero Divisor of Ring|zero divisor]].
+So, let $R$ be a non-[[Definition:Null Ring|null ring]].
+By hypothesis, there exists $n \in \Z_{>0}$ such that $x^n = 0_R$.
+If $n = 1$, then $x = 0_R$.
+By hypothesis, $R$ is not the [[Definition:Null Ring|null ring]], so we may choose $y \in R \setminus \set 0$.
+By [[Ring Product with Zero]]:
+:$y \circ x = y \circ 0_R = 0_R$
+Therefore $x$ is a [[Definition:Zero Divisor of Ring|zero divisor]] in $R$.
+If $n \ge 2$, define $y = x^{n - 1}$.
+Then:
+:$y \circ x = x^{n - 1} \circ x = x^n = 0_R$
+so $x$ is a [[Definition:Zero Divisor of Ring|zero divisor]] in $R$.
+{{Qed}}
+[[Category:Nilpotent Ring Elements]]
+[[Category:Zero Divisors]]
+820we46dktq2afef9pcvxgsk5f4sprg
+\end{proof}<|endoftext|>
+\section{Integral Domain is Reduced Ring}
+Tags: Ring Theory
+
+\begin{theorem}
+Let $\left({D, +, \circ}\right)$ be an [[Definition:Integral Domain|integral domain]].
+Then $D$ is [[Definition:Reduced Ring|reduced]].
+\end{theorem}
+
+\begin{proof}
+Let $x \in D$ be a [[Definition:Nilpotent Ring Element|nilpotent element]].
+Then by [[Nilpotent Element is Zero Divisor]], $x$ is a [[Definition:Zero Divisor|zero divisor]] in $D$.
+By the definition of an [[Definition:Integral Domain|integral domain]], this means that $x = 0$.
+Therefore the only [[Definition:Nilpotent Ring Element|nilpotent element]] of $D$ is $0$.
+That is, $D$ is [[Definition:Reduced Ring|reduced]].
+{{Qed}}
+[[Category:Ring Theory]]
+fm7xo02i9ov8o6hcq03u0qp2633wo8v
+\end{proof}<|endoftext|>
+\section{Units of Ring of Polynomial Forms over Integral Domain}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $\struct {D, +, \circ}$ be an [[Definition:Integral Domain|integral domain]].
+Let $D \sqbrk X$ be the [[Definition:Ring of Polynomial Forms|ring of polynomial forms]] in an [[Definition:Indeterminate (Polynomial Theory)|indeterminate]] $X$ over $D$.
+Then the [[Definition:Group of Units of Ring|group of units]] of $D \sqbrk X$ is precisely the [[Definition:Group|group]] of [[Definition:Element|elements]] of $D \sqbrk X$ of [[Definition:Polynomial of Degree Zero|degree zero]] that are [[Definition:Unit of Ring|units]] of $D$.
+\end{theorem}
+
+\begin{proof}
+It is immediate that a [[Definition:Unit of Ring|unit]] of $D$ is also a [[Definition:Unit of Ring|unit]] of $D \sqbrk X$.
+Let $P$ be a [[Definition:Unit of Ring|unit]] of $D \sqbrk X$.
+Then there exists $Q \in D \sqbrk X$ such that $P Q = 1$.
+By [[Degree of Product of Polynomials over Ring/Corollary 2|Corollary 2 to Degree of Product of Polynomials over Ring]] we have:
+:$0 = \map \deg 1 = \map \deg P + \map \deg Q$
+Therefore:
+:$\map \deg P = \map \deg Q = 0$
+That is, $P \in R$ and $Q \in R$.
+Moreover $P Q = 1$ in $R$, so it follows that $P$ is a [[Definition:Unit of Ring|unit]] of $R$.
+{{Qed}}
+[[Category:Polynomial Theory]]
+4dkr1neieh4qu20hakri4h3i80a93f5
+\end{proof}<|endoftext|>
+\section{Kernel of Magma Homomorphism is Submagma}
+Tags: Abstract Algebra
+
+\begin{theorem}
+Let $\left({S, *}\right)$ and $\left({T, \circ}\right)$ be [[Definition:Algebraic Structure|algebraic structures]].
+Let $\left({T, \circ}\right)$ have an [[Definition:Identity Element|identity]] $e$.
+Let $\phi: S \to T$ be a [[Definition:Homomorphism (Abstract Algebra)|magma homomorphism]].
+Then the [[Definition:Kernel of Magma Homomorphism|kernel]] of $\phi$ is a [[Definition:Submagma|submagma]] of $\left({S, *}\right)$.
+That is:
+:$\left({\phi^{-1} \left({e}\right), *}\right)$ is a [[Definition:Submagma|submagma]] of $\left({S, *}\right)$
+where $\phi^{-1} \left({e}\right)$ denote the [[Definition:Preimage of Element under Mapping|preimage]] of $e$.
+\end{theorem}
+
+\begin{proof}
+Let $x, y \in \phi^{-1} \left({e}\right)$.
+It is to be shown that:
+:$x * y \in \phi^{-1} \left({e}\right)$
+Thus:
+{{begin-eqn}}
+{{eqn | l = x, y \in \phi^{-1} \left({e}\right)
+ | o = \iff
+ | r = \left({\phi \left({x}\right) = e}\right) \land \left({\phi \left({y}\right) = e}\right)
+ | c = Definition of [[Definition:Kernel of Magma Homomorphism|Kernel]]
+}}
+{{eqn | o = \iff
+ | r = \phi \left({x}\right) \circ \phi \left({y}\right) = e
+ | c = Definition of [[Definition:Identity Element|Identity]]
+}}
+{{eqn | o = \iff
+ | r = \phi \left({x * y}\right) = e
+ | c = Definition of [[Definition:Homomorphism (Abstract Algebra)|Homomorphism]]
+}}
+{{eqn | o = \iff
+ | r = x*y \in \phi^{-1} \left({e}\right)
+ | c = Definition of [[Definition:Preimage of Element under Mapping|Preimage]]
+}}
+{{end-eqn}}
+Hence the result.
+{{qed}}
+[[Category:Abstract Algebra]]
+677e0zsz2ea0h4jfe8kvauldn584i1e
+\end{proof}<|endoftext|>
+\section{Preimage of Zero of Homomorphism is Submagma}
+Tags: Abstract Algebra
+
+\begin{theorem}
+Let $\struct {S, *}$ be a [[Definition:Magma|magma]].
+Let $\struct {T, \circ}$ be a [[Definition:Magma|magma]] with a [[Definition:Zero Element|zero element]] $0$.
+Let $\phi: S \to T$ be a [[Definition:Homomorphism (Abstract Algebra)|magma homomorphism]].
+Then $\struct {\phi^{-1} \sqbrk 0, *}$ is a [[Definition:Submagma|submagma]] of $\struct {S, *}$.
+\end{theorem}
+
+\begin{proof}
+Let $x, y \in \phi^{-1} \sqbrk 0$.
+It is to be shown that:
+:$x * y \in \phi^{-1} \sqbrk 0$
+Thus:
+{{begin-eqn}}
+{{eqn | l = x, y \in \phi^{-1} \sqbrk 0
+ | o = \leadstoandfrom
+ | r = \paren {\map \phi x = 0} \land \paren {\map \phi y = 0}
+ | c = {{Defof|Preimage of Element under Mapping}}
+}}
+{{eqn | o = \leadstoandfrom
+ | r = \map \phi x \circ \map \phi y = 0
+ | c = {{Defof|Zero Element}}
+}}
+{{eqn | o = \leadstoandfrom
+ | r = \map \phi {x * y} = 0
+ | c = {{Defof|Homomorphism (Abstract Algebra)}}
+}}
+{{eqn | o = \leadstoandfrom
+ | r = x * y \in \phi^{-1} \sqbrk 0
+ | c = {{Defof|Preimage of Element under Mapping}}
+}}
+{{end-eqn}}
+Hence the result.
+{{qed}}
+[[Category:Abstract Algebra]]
+6xlhe0vgz3184l91icqbi52xt92i8m8
+\end{proof}<|endoftext|>
+\section{Polynomial over Field is Reducible iff Scalar Multiple is Reducible}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $K$ be a [[Definition:Field (Abstract Algebra)|field]].
+Let $K \left[{X}\right]$ be the [[Definition:Ring of Polynomial Forms|ring of polynomial forms]] over $K$.
+Let $P \in K \left[{X}\right]$.
+Let $\lambda \in K \setminus \left\{{0}\right\}$.
+Then $P$ is [[Definition:Irreducible Polynomial|irreducible]] in $K \left[{X}\right]$ [[Definition:Iff|iff]] $\lambda P$ is also [[Definition:Irreducible Polynomial|irreducible]] in $K \left[{X}\right]$.
+{{expand|Investigate whether this result also holds where $K$ is a general ring.}}
+\end{theorem}
+
+\begin{proof}
+=== Necessary Condition ===
+Let $P$ be [[Definition:Irreducible Polynomial|irreducible]].
+Suppose further that $ \lambda P$ has a [[Definition:Trivial Factorization|non-trivial factorization]]:
+:$\displaystyle \lambda P = Q_1 Q_2$
+that is, such that $Q_1$ and $Q_2$ are not [[Definition:Unit of Ring|units]] of $K \left[{X}\right]$.
+By [[Units of Ring of Polynomial Forms over Field]] it follows that $\deg Q_1 \ge 1$ and $\deg Q_2 \ge 1$.
+Let $Q_1' = \lambda^{-1} Q_1$.
+This implies that:
+:$P = Q_1' Q_2$
+with $\deg Q_1' = \deg Q_1 \ge 1$.
+But this is a [[Definition:Trivial Factorization|non-trivial factorization]] of $P$ in $K \left[{X}\right]$.
+This contradicts our supposition that $P$ is [[Definition:Irreducible Polynomial|irreducible]].
+Therefore $\lambda P$ has no [[Definition:Trivial Factorization|non-trivial factorization]], that is, $\lambda P$ is [[Definition:Irreducible Polynomial|irreducible]].
+{{qed|lemma}}
+=== Sufficient Condition ===
+Let $\lambda P$ be [[Definition:Irreducible Polynomial|irreducible]].
+Let $Q = \lambda P$.
+From the [[Polynomial over Field is Reducible iff Scalar Multiple is Reducible#Necessary Condition|necessary condition]], we know that any [[Definition:Scalar Multiple of Polynomial|scalar multiple]] of $Q$ is [[Definition:Irreducible Polynomial|irreducible]].
+In particular:
+:$\lambda^{-1}Q = \lambda^{-1}\lambda P = P$
+is [[Definition:Irreducible Polynomial|irreducible]], the required result.
+{{Qed}}
+[[Category:Polynomial Theory]]
+nw6yg0yol9cx0ajlkik5il3g22753ym
+\end{proof}<|endoftext|>
+\section{Conjunction of Disjunctions Consequence}
+Tags: Conjunction, Disjunction
+
+\begin{theorem}
+:$\left({p \lor q}\right) \land \left({r \lor s}\right) \vdash p \lor r \lor \left({q \land s}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\left({p \lor q}\right) \land \left({r \lor s}\right) \vdash \left({p \lor r}\right) \lor \left({q \land s}\right)}}
+{{Premise|1|\left({p \lor q}\right) \land \left({r \lor s}\right)}}
+{{SequentIntro|2|1|\left({p \land \left({r \lor s}\right)}\right) \lor \left({q \land \left({r \lor s}\right)}\right)|1|[[Rule of Distribution/Conjunction Distributes over Disjunction/Right Distributive/Formulation 1|Conjunction Distributes over Disjunction]]}}
+{{TheoremIntro|3|p \land \left({r \lor s}\right) \implies p|[[Rule of Simplification/Sequent Form/Formulation 2|Simplification]]}}
+{{TheoremIntro|4|q \land \left({r \lor s}\right) \implies \left({q \land r}\right) \lor \left({q \land s}\right)|[[Rule of Distribution/Conjunction Distributes over Disjunction/Left Distributive/Formulation 2/Forward Implication|Conjunction Distributes over Disjunction]]}}
+{{TheoremIntro|5|q \land r \implies r|[[Rule of Simplification/Sequent Form/Formulation 2|Simplification]]}}
+{{TheoremIntro|6|q \land s \implies q \land s|[[Law of Identity/Formulation 2]]}}
+{{SequentIntro|7||\left({q \land r}\right) \lor \left({q \land s}\right) \implies r \lor \left({q \land s}\right)|5,6|[[Constructive Dilemma/Formulation 1|Constructive Dilemma]]}}
+{{SequentIntro|8||q \land \left({r \lor s}\right) \implies r \lor \left({q \land s}\right)|4,7|[[Hypothetical Syllogism/Formulation 1|Hypothetical Syllogism]]}}
+{{SequentIntro|9||\left({p \lor q}\right) \land \left({r \lor s}\right) \implies p \lor \left({r \lor \left({q \land s}\right)}\right)|3,8|[[Constructive Dilemma/Formulation 1|Constructive Dilemma]]}}
+{{ModusPonens|10|1|p \lor \left({r \lor \left({q \land s}\right)}\right)|9|1}}
+{{SequentIntro|11|1|\left({p \lor r}\right) \lor \left({q \land s}\right)|10|[[Rule of Association/Disjunction/Formulation 1|Rule of Association]]}}
+{{EndTableau}}
+{{qed}}
+[[Category:Conjunction]]
+[[Category:Disjunction]]
+75311jdpussf09y7sbnft5t38dm2bjf
+\end{proof}<|endoftext|>
+\section{Existence of Ring of Polynomial Forms in Transcendental over Integral Domain}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Commutative Ring with Unity|commutative ring with unity]].
+Let $\struct {D, +, \circ}$ be an [[Definition:Subdomain|integral subdomain]] of $R$ whose [[Definition:Ring Zero|zero]] is $0_D$.
+Let $X \in R$ be [[Definition:Transcendental over Integral Domain|transcendental over $D$]]
+Then the [[Definition:Ring of Polynomials in Ring Element|ring of polynomials $D \sqbrk X$]] in $X$ over $D$ exists.
+\end{theorem}
+
+\begin{proof}
+{{finish|The following is an outline only}}
+Suppose that $D \sqbrk X$ exists.
+Let $\displaystyle \map P X = \sum_{k \mathop = 0}^n a_k X^k$, where $a_n \ne 0_D$, be an arbitrary [[Definition:Element|element]] of $D \sqbrk X$.
+Then $\displaystyle \map P X$ corresponds to, and is completely described by, the [[Definition:Ordered Tuple|ordered tuple]] of [[Definition:Coefficient|coefficients]] $\tuple {a_0, a_1, \dotsc, a_n, 0_D, 0_D, 0_D, \dotsc}$.
+Consider the set $S$ of [[Definition:Infinite Sequence|infinite sequences]] of [[Definition:Element|elements]] of $D$ which are eventually $0_D$.
+That is, whose [[Definition:Element|elements]] are of the form $\tuple {b_0, b_1, \dotsc, b_n, 0_D, 0_D, 0_D, \dotsc}$ where $b_0, \ldots, b_n \in D$.
+Consider the [[Definition:Polynomial Ring over Sequence|polynomial ring over $S$]] by defining the operations:
+{{begin-axiom}}
+{{axiom | n = 1
+ | lc= '''Ring Addition:'''
+ | ml= \sequence {r_0, r_1, r_2, \ldots} + \sequence {s_0, s_1, s_2, \ldots}
+ | mo= =
+ | mr= \sequence {r_0 + s_0, r_1 + s_1, r_2 + s_2, \ldots}
+ | c =
+}}
+{{axiom | n = 2
+ | lc= '''Ring Negative:'''
+ | ml= -sequence {r_0, r_1, r_2, \ldots}
+ | mo= =
+ | mr= \sequence {-r_0, -r_1, -r_2, \ldots}
+ | c =
+}}
+{{axiom | n = 3
+ | lc= '''Ring Product:'''
+ | ml= \sequence {r_0, r_1, r_2, \ldots} \circ \sequence {s_0, s_1, s_2, \ldots}
+ | mo= =
+ | mr= \sequence {t_0, t_1, t_2, \ldots}
+ | rc= where $\displaystyle t_i = \sum_{j \mathop + k \mathop = i} r_j s_k$
+}}
+{{end-axiom}}
+From [[Polynomial Ring of Sequences is Ring]] we have that $\struct {S, +, \circ}$ is a [[Definition:Ring (Abstract Algebra)|ring]].
+{{Finish|To be proved: a) that the sequences $\tuple {a_0, 0_D, 0_d}$ form a subring $D'$ of $\struct {S, +, \circ}$ isomorphic to $D$, b) the sequence $\tuple {0_D, 1_D, 0_D, 0_D, \dotsc}$ is transcendental over $D'$, and c) that $D' \sqbrk X$ is the whole of $R$. Thus we have constructed $D' \simeq D$. If we now ignore the difference between $a_0 \in D$ and $\tuple {a_0, 0_D, 0_d} \in D'$ so that $D'$ is identified with $D$ the ring $D \sqbrk X$ has been constructed as required.}}
+\end{proof}<|endoftext|>
+\section{Rule of Distribution/Conjunction Distributes over Disjunction/Left Distributive/Formulation 2/Proof 2}
+Tags: Rule of Distribution
+
+\begin{theorem}
+:$\vdash \left({p \land \left({q \lor r}\right)}\right) \iff \left({\left({p \land q}\right) \lor \left({p \land r}\right)}\right)$
+\end{theorem}
+
+\begin{proof}
+{{BeginTableau|\vdash \left({p \land \left({q \lor r}\right)}\right) \iff \left({\left({p \land q}\right) \lor \left({p \land r}\right)}\right)}}
+{{TheoremIntro|1|\left({p \land \left({q \lor r}\right)}\right) \implies \left({\left({p \land q}\right) \lor \left({p \land r}\right)}\right)|[[Rule of Distribution/Conjunction Distributes over Disjunction/Left Distributive/Formulation 2/Forward Implication|Conjunction Distributes over Disjunction: Forward Implication]]}}
+{{TheoremIntro|2|\left({\left({p \land q}\right) \lor \left({p \land r}\right)}\right) \implies \left({p \land \left({q \lor r}\right)}\right)|[[Rule of Distribution/Conjunction Distributes over Disjunction/Left Distributive/Formulation 2/Reverse Implication|Conjunction Distributes over Disjunction: Reverse Implication]]}}
+{{BiconditionalIntro|3||\left({p \land \left({q \lor r}\right)}\right) \iff \left({\left({p \land q}\right) \lor \left({p \land r}\right)}\right)|1|2}}
+{{EndTableau}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Eisenstein Integers form Integral Domain}
+Tags: Number Theory, Integral Domains, Eisenstein Integers
+
+\begin{theorem}
+The [[Definition:Ring of Eisenstein Integers|ring of Eisenstein integers]] $\struct {\Z \sqbrk \omega, +, \times}$ is an [[Definition:Integral Domain|integral domain]].
+\end{theorem}
+
+\begin{proof}
+By [[Eisenstein Integers form Subring of Complex Numbers]] we know that $\struct {\Z \sqbrk \omega, +, \times}$ is a [[Definition:Subring|subring]] of the [[Definition:Complex Number|complex numbers]] $\C$.
+Let $1_\C$ be the [[Definition:Unity of Ring|unity]] of $\C$.
+Let $1_\omega$ be the [[Definition:Unity of Ring|unity]] of $\Z \sqbrk \omega$.
+By the [[Subdomain Test]] it suffices to show that $1_\C = 1_\omega$.
+By [[Unity of Ring is Unique]] it suffices to show that $1_\C$ is a [[Definition:Unity of Ring|unity]] of $\Z \sqbrk \omega$.
+First we note that:
+:$\Z \sqbrk \omega = \set {a + b\omega: a, b \in \Z}$
+In particular:
+:$1_\C \in \Z \sqbrk \omega$
+Moreover, by definition, $\Z \sqbrk \omega$ inherits its [[Definition:Ring Product|ring product]] from $\C$.
+For any $\alpha \in \Z \sqbrk \omega$:
+:$1_\C \alpha = \alpha 1_\C = \alpha$
+in $\C$.
+Therefore this identity holds in $\Z \sqbrk \omega$ as well.
+{{Qed}}
+[[Category:Number Theory]]
+[[Category:Integral Domains]]
+[[Category:Eisenstein Integers]]
+5sqorqvi9uyhayy87nukjuax98mo1k8
+\end{proof}<|endoftext|>
+\section{Eisenstein Integers form Subring of Complex Numbers}
+Tags: Integral Domains, Subrings, Complex Numbers, Eisenstein Integers
+
+\begin{theorem}
+The set of [[Definition:Eisenstein Integer|Eisenstein integers]] $\Z \sqbrk \omega$, under the operations of [[Definition:Complex Addition|complex addition]] and [[Definition:Complex Multiplication|complex multiplication]], forms a [[Definition:Subring|subring]] of the set of [[Definition:Complex Number|complex numbers]] $\C$.
+\end{theorem}
+
+\begin{proof}
+We will use the [[Subring Test]].
+This is valid, as the [[Complex Numbers form Field|set of complex numbers $\C$ forms a field]], which is [[Definition:Field (Abstract Algebra)|by definition]] itself a [[Definition:Ring (Abstract Algebra)|ring]].
+We note that $\Z \sqbrk \omega$ is not [[Definition:Empty Set|empty]], as (for example) $0 + 0 \omega \in \Z \sqbrk \omega$.
+Let $a + b \omega, c + d \omega \in \Z \sqbrk \omega$.
+Then we have $-\paren {c + d \omega} = -c - d \omega$, and so:
+{{begin-eqn}}
+{{eqn | l = \paren {a + b \omega} + \paren {-\paren {c + d \omega} }
+ | r = \paren {a + b \omega} + \paren {-c - d \omega}
+ | c =
+}}
+{{eqn | r = \paren {a + \paren {-c} } + \paren {b + \paren {-d} } \omega
+ | c =
+}}
+{{eqn | r = \paren {a - c} + \paren {b - d} \omega
+ | c =
+}}
+{{end-eqn}}
+We have that $a, b, c, d \in \Z$ and [[Integers form Integral Domain|$\Z$ is an integral domain]].
+Therefore by [[Definition:Integral Domain|definition]] $\Z$ is a [[Definition:Ring (Abstract Algebra)|ring]].
+So it follows that $a - c \in \Z$ and $b - d \in \Z$.
+Hence $\paren {a - c} + \paren {b - d} \omega \in \Z \sqbrk \omega$.
+Now consider $\paren {a + b \omega} \paren {c + d \omega}$.
+By the definition of [[Definition:Complex Multiplication|complex multiplication]], we have:
+:$\paren {a + b \omega} \paren {c + d \omega} = \paren {a c - b d} + \paren {a d + b c} \omega$
+As $a, b, c, d \in \Z$ and $\Z$ is a [[Definition:Ring (Abstract Algebra)|ring]], it follows that $a c - b d \in \Z$ and $ad + bc \in \Z$.
+Hence:
+:$\paren {a + b \omega} \paren {c + d \omega} \in \Z \sqbrk \omega$
+So by the [[Subring Test]], $\Z \sqbrk \omega$ is a [[Definition:Subring|subring]] of $\C$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Norm of Eisenstein Integer}
+Tags: Algebraic Number Theory
+
+\begin{theorem}
+Let $\alpha$ be an [[Definition:Eisenstein Integer|Eisenstein integer]].
+That is, $\alpha = a + b \omega$ for some $a, b \in \Z$, where $\omega = e^{2\pi i /3}$.
+Then:
+:$\cmod \alpha^2 = a^2 - a b + b^2$
+where $\cmod {\, \cdot \,}$ denotes the [[Definition:Complex Modulus|modulus]] of a [[Definition:Complex Number|complex number]].
+\end{theorem}
+
+\begin{proof}
+We find that:
+{{begin-eqn}}
+{{eqn | l = \cmod \alpha^2
+ | r = \alpha \overline \alpha
+ | c = [[Modulus in Terms of Conjugate]]
+}}
+{{eqn | r = \paren {a + b \omega} \paren {\overline {a + b \omega} }
+ | c = [[Modulus in Terms of Conjugate]]
+}}
+{{eqn | r = \paren {a + b \omega} \paren {\overline a + \overline b \overline \omega}
+ | c = [[Sum of Complex Conjugates]] and [[Product of Complex Conjugates]]
+}}
+{{eqn | r = \paren {a + b \omega} \paren {a + b \overline \omega}
+ | c = [[Complex Number equals Conjugate iff Wholly Real]]
+}}
+{{eqn | r = a^2 + \paren {\omega + \overline \omega} a b + \omega \overline \omega b^2
+ | c =
+}}
+{{end-eqn}}
+By the definition of the [[Definition:Polar Form of Complex Number|polar form of a complex number]]:
+:$\omega = \exp \paren {\dfrac {2 \pi i} 3} = \map \cos {\dfrac {2 \pi} 3} + i \, \map \sin {\dfrac {2 \pi} 3} = -\dfrac 1 2 + i \dfrac {\sqrt 3} 2$
+Thus by [[Sum of Complex Number with Conjugate]]:
+:$\omega + \overline \omega = 2 \cdot \paren {-\dfrac 1 2} = -1$
+Also:
+{{begin-eqn}}
+{{eqn | l = \omega \overline \omega
+ | r = \map \exp {\dfrac {2 \pi i} 3} \, \overline {\map \exp {\dfrac {2 \pi i} 3} }
+ | c =
+}}
+{{eqn | r = \map \exp {\dfrac {2 \pi i} 3} \, \map \exp {-\dfrac {2 \pi i} 3}
+ | c = [[Polar Form of Complex Conjugate]]
+}}
+{{eqn | r = \map \exp {\dfrac {2 \pi i} 3 - \dfrac {2 \pi i} 3}
+ | c = [[Exponential of Sum]]
+}}
+{{eqn | r = \map \exp 0
+ | c =
+}}
+{{eqn | r = 1
+ | c = [[Exponential of Zero]]
+}}
+{{end-eqn}}
+Therefore:
+:$\cmod \alpha^2 = a^2 + \paren {\omega + \overline \omega} a b + \omega \overline \omega b^2 = a^2 - a b + b^2$
+as required.
+{{qed}}
+[[Category:Algebraic Number Theory]]
+033fhr3hho1o0vaercxao9w8ucefa8h
+\end{proof}<|endoftext|>
+\section{Polynomial Forms over Field form Integral Domain/Formulation 2}
+Tags: Polynomial Forms over Field form Integral Domain
+
+\begin{theorem}
+Let $\struct {F, +, \circ}$ be a [[Definition:Field (Abstract Algebra)|field]] whose [[Definition:Field Zero|zero]] is $0_F$ and whose [[Definition:Unity of Field|unity]] is $1_F$.
+Let $\GF$ be the [[Definition:Set|set]] of all [[Definition:Polynomial over Field as Sequence|polynomials over $\struct {F, +, \circ}$ defined as sequences]].
+Let [[Definition:Addition of Polynomials over Field as Sequence|polynomial addition]] and [[Definition:Multiplication of Polynomials over Field as Sequence|polynomial multiplication]] be defined as:
+:$\forall f = \sequence {a_k} = \tuple {a_0, a_1, a_2, \ldots}, g = \sequence {b_k} = \tuple {b_0, b_1, b_2, \ldots} \in \GF$:
+::$f \oplus g := \tuple {a_0 + b_0, a_1 + b_1, a_2 + b_2, \ldots}$
+::$f \otimes g := \tuple {c_0, c_1, c_2, \ldots}$ where $\displaystyle c_i = \sum_{j \mathop + k \mathop = i} a_j \circ b_k$
+Then $\struct {\GF, \oplus, \otimes}$ is an [[Definition:Integral Domain|integral domain]].
+\end{theorem}
+
+\begin{proof}
+As $\struct {F, +, \circ}$ is a [[Definition:Field (Abstract Algebra)|field]], it is also by definition a [[Definition:Ring (Abstract Algebra)|ring]].
+Thus from [[Polynomial Ring of Sequences is Ring]] we have that $\struct {\GF, \oplus, \otimes}$ is a [[Definition:Ring (Abstract Algebra)|ring]].
+{{explain|Use an analogous result to [[Ring of Polynomial Forms is Commutative Ring with Unity]] to get the CRU bit done}}
+From [[Field is Integral Domain]], a [[Definition:Field (Abstract Algebra)|field]] is also by definition an [[Definition:Integral Domain|integral domain]].
+Let $f, g \in \GF$ such that neither $f$ nor $g$ are the [[Definition:Null Polynomial over Sequence|null polynomial]].
+Let:
+:$\deg f = m, \deg g = n$
+where $\deg$ denotes the [[Definition:Degree of Polynomial over Field as Sequence|degree]] of $f$ and $g$ respectively.
+By [[Degree of Product of Polynomials over Integral Domain]], the [[Definition:Degree of Polynomial over Field as Sequence|degree]] of $f \times g$ is $m + n$.
+Then by definition of [[Definition:Multiplication of Polynomials over Field as Sequence|polynomial multiplication]], its [[Definition:Leading Coefficient of Polynomial|leading coefficient]] is $a_m \circ b_n$.
+As by definition an [[Definition:Integral Domain|integral domain]] has no [[Definition:Proper Zero Divisor|proper zero divisors]]:
+:$a_m \circ b_n \ne 0_F$.
+So, by definition, $f \otimes g$ has a [[Definition:Leading Coefficient of Polynomial|leading coefficient]] which is not $0_F$.
+That is, $f \otimes g$ is not the [[Definition:Null Polynomial over Sequence|null polynomial]]
+The result follows by definition of [[Definition:Integral Domain|integral domain]].
+\end{proof}<|endoftext|>
+\section{Maximal Spectrum of Ring is Nonempty}
+Tags: Commutative Algebra
+
+\begin{theorem}
+Let $A$ be a [[Definition:Non-Trivial Ring|non-trivial]] [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+Then its [[Definition:Maximal Spectrum of Ring|maximal spectrum]] is [[Definition:Non-Empty Set|non-empty]]:
+:$\operatorname {Max} \Spec A \ne \O$
+\end{theorem}
+
+\begin{proof}
+This is a reformulation of [[Krull's Theorem]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ring of Polynomial Functions is Commutative Ring with Unity}
+Tags: Polynomial Theory
+
+\begin{theorem}
+Let $\struct {R, +, \circ}$ be a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+Let $R \sqbrk {\set {X_j: j \in J} }$ be the [[Definition:Ring of Polynomial Forms|ring of polynomial forms]] over $R$ in the [[Definition:Indeterminate (Polynomial Theory)|indeterminates]] $\set {X_j: j \in J}$.
+Let $R^J$ be the [[Definition:Free Module|free module]] on $J$.
+Let $A$ be the set of all [[Definition:Polynomial Function/General Definition|polynomial functions]] $R^J \to R$.
+Let $\struct {A, +, \circ}$ be the [[Definition:Ring of Polynomial Functions|ring of polynomial functions]] on $R$.
+Then $\struct {A, +, \circ}$ is a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+\end{theorem}
+
+\begin{proof}
+First we check that the operations of [[Definition:Ring Product|ring product]] and [[Definition:Ring Addition|ring addition]] are [[Definition:Closed Operation|closed in $A$]].
+Let $Z$ be the set of all [[Definition:Multiindex|multiindices]] indexed by $J$.
+Let:
+:$\displaystyle f = \sum_{k \mathop \in Z} a_k \mathbf X^k, \ g = \sum_{k \mathop \in Z} b_k \mathbf X^k \in R \sqbrk {\set {X_j: j \in J} }$.
+Under the [[Equality of Polynomials|evaluation homomorphism]], $f$ and $g$ map to:
+:$\displaystyle A \owns \hat f: \forall x \in R^J: \map {\hat f} x = \sum_{k \mathop \in Z} a_k x^k$
+:$\displaystyle A \owns \hat g: \forall x \in R^J: \map {\hat g} x = \sum_{k \mathop \in Z} b_k x^k$
+{{explain|Clarification needed on the above link: exactly what the "evaluation homomorphism" is needs to be established.}}
+Then the [[Definition:Pointwise Operation|induced pointwise sum]] of $\hat f$ and $\hat g$ is:
+{{begin-eqn}}
+{{eqn | l = \map {\hat f} x + \map {\hat g} x
+ | r = \sum_{k \mathop \in Z} a_k x^k + \sum_{k \mathop \in Z} b_k x^k
+}}
+{{eqn | r = \sum_{k \mathop \in Z} \paren {a_k + b_k} x^k
+}}
+{{eqn | r = \map {\widehat {f + g} } x
+ | c = {{Defof|Addition of Polynomial Forms}}
+}}
+{{end-eqn}}
+Thus [[Definition:Polynomial Function (Abstract Algebra)|polynomial functions]] are [[Definition:Closed Algebraic Structure|closed]] under [[Definition:Ring Addition|ring addition]].
+The [[[[Definition:Pointwise Operation|induced pointwise product]] of $\hat f$ and $\hat g$ is:
+{{begin-eqn}}
+{{eqn | l = \map {\hat f} x \circ \map {\hat g} x
+ | r = \paren {\sum_{k \mathop \in Z} a_k x^k} \circ \paren {\sum_{k \mathop \in Z} a_k x^k}
+}}
+{{eqn | r = \sum_{k \mathop \in Z} \paren {\sum_{p + q \mathop = k} a_p b_q} \mathbf X^k
+}}
+{{eqn | r = \map {\widehat {f \circ g} } x
+ | c = {{Defof|Multiplication of Polynomial Forms}}
+}}
+{{end-eqn}}
+Thus [[Definition:Polynomial Function (Abstract Algebra)|polynomial functions]] are [[Definition:Closed Algebraic Structure|closed]] under [[Definition:Ring Product|ring product]].
+Finally, we invoke [[Structure Induced by Ring Operations is Ring]], which shows that $\struct {A, +, \circ}$ is a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+{{qed}}
+[[Category:Polynomial Theory]]
+d0rhzr8jw0klq6yvjr2jn9d9ky9hnhe
+\end{proof}<|endoftext|>
+\section{Knaster-Tarski Lemma}
+Tags: Complete Lattices
+
+\begin{theorem}
+Let $\left({L, \preceq}\right)$ be a [[Definition:Complete Lattice|complete lattice]].
+Let $f: L \to L$ be an [[Definition:Increasing Mapping|increasing mapping]].
+Then $f$ has a [[Definition:Smallest Element|least]] [[Definition:Fixed Point|fixed point]] and a [[Definition:Greatest Element|greatest]] [[Definition:Fixed Point|fixed point]].
+\end{theorem}
+
+\begin{proof}
+Let $P = \left\{{x \in L: x \preceq f \left({x}\right)}\right\}$.
+Let $p = \bigvee P$, the [[Definition:Supremum of Set|supremum]] of $P$.
+Let $x \in P$.
+Then by the definition of [[Definition:Supremum of Set|supremum]]:
+: $x \preceq p$
+Since $f$ is [[Definition:Increasing Mapping|increasing]]:
+: $f \left({x}\right) \preceq f \left({p}\right)$
+By the definition of $P$:
+: $x \preceq f \left({x}\right)$
+Thus because $\preceq$ is an [[Definition:Ordering|ordering]], and therefore [[Definition:Transitive Relation|transitive]]:
+: $x \preceq f \left({p}\right)$
+As this holds for all $x \in P$, $f \left({p}\right)$ is an [[Definition:Upper Bound of Set|upper bound]] of $P$.
+By the definition of [[Definition:Supremum of Set|supremum]]:
+: $p \preceq f \left({p}\right)$
+As $f$ is [[Definition:Increasing Mapping|increasing]]:
+: $f \left({p}\right) \preceq f \left({f \left({p}\right)}\right)$
+Thus by the definition of $P$:
+: $f \left({p}\right) \in P$
+Since $p$ is the [[Definition:Supremum of Set|supremum]] of $P$:
+: $f \left({p}\right) \preceq p$
+Since we already know that $p \preceq f \left({p}\right)$:
+: $f \left({p}\right) = p$
+because $\preceq$ is an [[Definition:Ordering|ordering]] and therefore [[Definition:Antisymmetric Relation|antisymmetric]].
+Thus $p$ is a [[Definition:Fixed Point|fixed point]] of $f$.
+We have that $\preceq$ is an [[Definition:Ordering|ordering]], and therefore [[Definition:Reflexive Relation|reflexive]].
+Thus every [[Definition:Fixed Point|fixed point]] of $f$ is in $P$.
+So $p$ is the [[Definition:Greatest Element|greatest]] [[Definition:Fixed Point|fixed point]] of $f$.
+Now note that $f$ is also [[Definition:Increasing Mapping|increasing]] in the [[Definition:Dual Ordering|dual ordering]].
+Thus $f$ also has a [[Definition:Greatest Element|greatest]] [[Definition:Fixed Point|fixed point]] in the [[Definition:Dual Ordering|dual ordering]].
+That is, it has a [[Definition:Smallest Element|least]] [[Definition:Fixed Point|fixed point]] in the original [[Definition:Ordering|ordering]].
+{{qed}}
+{{Namedfor|Bronisław Knaster|name2 = Alfred Tarski|cat = Knaster|cat2 = Tarski}}
+\end{proof}<|endoftext|>
+\section{Knaster-Tarski Theorem}
+Tags: Complete Lattices
+
+\begin{theorem}
+Let $\left({L, \preceq}\right)$ be a [[Definition:Complete Lattice|complete lattice]].
+Let $f: L \to L$ be an [[Definition:Increasing Mapping|increasing mapping]].
+Let $F$ be the [[Definition:Set|set]] (or [[Definition:Class (Class Theory)|class]]) of [[Definition:Fixed Point|fixed points]] of $f$.
+Then $\left({F, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+\end{theorem}
+
+\begin{proof}
+Let $S \subseteq F$.
+Let $s = \bigvee S$ be the [[Definition:Supremum of Set|supremum]] of $S$.
+We wish to show that there is an [[Definition:Element|element]] of $F$ that [[Definition:Succeed|succeeds]] all [[Definition:Element|elementd]] of $S$ and is the [[Definition:Smallest Element|smallest element]] of $F$ to do so.
+By the definition of [[Definition:Supremum of Set|supremum]], an [[Definition:Element|element]] [[Definition:Succeed|succeeds]] all [[Definition:Element|elements]] of $S$ {{iff}} it [[Definition:Succeed|succeeds]] $s$.
+Let $U = s^\succeq$ be the [[Definition:Upper Closure|upper closure]] of $s$.
+Thus we seek the [[Definition:Smallest Element|smallest]] [[Definition:Fixed Point|fixed point]] of $f$ that lies in $U$.
+Note that $U = \left[{s \,.\,.\, \top}\right]$, the [[Definition:Closed Interval|closed interval]] between $s$ and the [[Definition:Top (Lattice Theory)|top]] element of $L$.
+First we show that $U$ is [[Definition:Closed under Mapping|closed]] under $f$.
+We have that:
+:$\forall a \in S: a \preceq s$
+so:
+$a = f \left({a}\right) \preceq f \left({s}\right)$
+Thus $f \left({s}\right)$ is an upper bound of $S$, so by the definition of [[Definition:Supremum of Set|supremum]], $s \preceq f \left({s}\right)$.
+Let $x \in U$.
+Then $s \preceq x$.
+So:
+: $f \left({s}\right) \preceq f \left({x}\right)$
+Since $s \preceq f \left({s}\right)$, it follows that:
+: $s \preceq f \left({x}\right)$
+so:
+: $f \left({x}\right) \in U$
+Thus the restriction of $f$ to $U$ is an increasing mapping from $U$ to $U$.
+By [[Interval in Complete Lattice is Complete Lattice]], $\left({U, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+Thus by [[Knaster-Tarski Lemma]], $f$ has a [[Definition:Smallest Element|smallest]] [[Definition:Fixed Point|fixed point]] in $U$.
+Thus $S$ has a [[Definition:Supremum of Set|supremum]] in $F$.
+A precisely similar argument shows that $S$ has an [[Definition:Infimum of Set|infimum]] in $F$.
+Since this holds for all $S \subseteq F$, it follows that $\left({F, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Degree of Product of Polynomials over Ring/Corollary 2}
+Tags: Degree of Product of Polynomials over Ring
+
+\begin{theorem}
+Let $\struct {D, +, \circ}$ be an [[Definition:Integral Domain|integral domain]] whose [[Definition:Ring Zero|zero]] is $0_D$.
+Let $D \sqbrk X$ be the [[Definition:Ring of Polynomials|ring of polynomials]] over $D$ in the [[Definition:Indeterminate (Polynomial Theory)|indeterminate]] $X$.
+For $f \in D \sqbrk X$ let $\map \deg f$ denote the [[Definition:Degree of Polynomial|degree]] of $f$.
+Then:
+:$\forall f, g \in D \sqbrk X: \map \deg {f g} = \map \deg f + \map \deg g$
+\end{theorem}
+
+\begin{proof}
+An [[Definition:Integral Domain|integral domain]] is a [[Definition:Commutative and Unitary Ring|commutative and unitary ring]] with no [[Definition:Proper Zero Divisor|proper zero divisors]].
+The result follows from [[Degree of Product of Polynomials over Ring/Corollary 1|Degree of Product of Polynomials over Ring: Corollary 1]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Knaster-Tarski Lemma/Power Set}
+Tags: Mapping Theory, Power Set
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\powerset S$ be the [[Definition:Power Set|power set]] of $S$.
+Let $f: \powerset S \to \powerset S$ be a $\subseteq$-[[Definition:Increasing Mapping|increasing mapping]].
+That is, suppose that for all $T, U \in \powerset S$:
+:$T \subseteq U \implies \map f T \subseteq \map f U$
+Then $f$ has a [[Definition:Greatest Set by Set Inclusion|greatest]] [[Definition:Fixed Point|fixed point]] and a [[Definition:Smallest Set by Set Inclusion|least]] [[Definition:Fixed Point|fixed point]].
+\end{theorem}
+
+\begin{proof}
+By [[Power Set is Complete Lattice]], $\struct {\powerset S, \cap, \cup, \subseteq}$ is a [[Definition:Complete Lattice|complete lattice]].
+Thus the theorem holds by the [[Knaster-Tarski Lemma]].
+{{qed}}
+{{Namedfor|Bronisław Knaster|name2 = Alfred Tarski|cat = Knaster|cat2 = Tarski}}
+\end{proof}<|endoftext|>
+\section{Cantor-Bernstein-Schröder Theorem/Proof 6}
+Tags: Cantor-Bernstein-Schröder Theorem
+
+\begin{theorem}
+Let $A$ and $B$ be [[Definition:Set|sets]].
+Let $f: A \to B$ and $g: B \to A$ be [[Definition:Injection|injections]].
+Then there is a [[Definition:Bijection|bijection]] $h: A \to B$; so that $A$ and $B$ are [[Definition:Set Equivalence|equivalent]].
+Furthermore:
+: For all $x \in A$ and $y \in B$, if $y = h \left({x}\right)$ then either $y = f \left({x}\right)$ or $x = g \left({y}\right)$.
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal P \left({A}\right)$ be the [[Definition:Power Set|power set]] of $A$.
+Define a mapping $E: \mathcal P \left({A}\right) \to \mathcal P \left({A}\right)$ thus:
+: $E \left({S}\right) = A \setminus g \left({B \setminus f \left({S}\right)}\right)$
+=== $E$ is increasing ===
+Let $S, T \in \mathcal P \left({A}\right)$ such that $S \subseteq T$.
+Then:
+{{begin-eqn}}
+{{eqn | l = f \left({S}\right)
+ | o = \subseteq
+ | r = f \left({T}\right)
+ | c = [[Image of Subset is Subset of Image]]
+}}
+{{eqn | ll= \implies
+ | l = B \setminus f \left({T}\right)
+ | o = \subseteq
+ | r = B \setminus f \left({S}\right)
+ | c = [[Set Difference with Subset is Superset of Set Difference]]
+}}
+{{eqn | ll= \implies
+ | l = g \left({B \setminus f \left({T}\right)}\right)
+ | o = \subseteq
+ | r = g \left({B \setminus f \left({S}\right)}\right)
+ | c = [[Image of Subset is Subset of Image]]
+}}
+{{eqn | ll= \implies
+ | l = A \setminus g \left({B \setminus f \left({S}\right)}\right)
+ | o = \subseteq
+ | r = A \setminus g \left({B \setminus f \left({T}\right)}\right)
+ | c = [[Set Difference with Subset is Superset of Set Difference]]
+}}
+{{end-eqn}}
+That is, $E \left({S}\right) \subseteq E \left({T}\right)$.
+{{qed|lemma}}
+By the [[Knaster-Tarski Lemma/Power Set|Knaster-Tarski Lemma]], $E$ has a [[Definition:Fixed Point|fixed point]] $X$.
+By the definition of [[Definition:Fixed Point|fixed point]]:
+: $E \left({X}\right) = X$
+Thus by the definition of $E$:
+: $A \setminus g \left({B \setminus f \left({X}\right)}\right) = X$
+Therefore:
+: $(1): \quad A \setminus \left({A \setminus g \left({B \setminus f \left({X}\right)}\right)}\right) = A \setminus X$
+Since $g$ is a [[Definition:Mapping|mapping]] into $A$:
+: $g \left({B \setminus f \left({X}\right)}\right) \subseteq A$
+Thus by [[Relative Complement of Relative Complement]]:
+: $A \setminus \left({A \setminus g \left({B \setminus f \left({X}\right)}\right)}\right) = g \left({B \setminus f \left({X}\right)}\right)$
+Thus by $(1)$:
+:$g \left({B \setminus f \left({X}\right)}\right) = A \setminus X$
+Let $f' = f \restriction_{X \times f \left({X}\right)}$ be the [[Definition:Restriction of Mapping|restriction]] of $f$ to $X \times f \left({X}\right)$.
+Similarly, let $g' = g \restriction_{\left({B \setminus f \left({X}\right)}\right) \times \left({A \setminus X}\right)} = g \restriction_{\left({B \setminus f \left({X}\right)}\right) \times g \left({B \setminus f \left({X}\right)}\right)}$.
+By [[Injection to Image is Bijection]], $f'$ and $g'$ are both [[Definition:Bijection|bijections]].
+Define a [[Definition:Relation|relation]] $h: A \to B$ by $h = f' \cup {g'}^{-1}$.
+We will show that $h$ is a [[Definition:Bijection|bijection]] from $A$ onto $B$.
+The [[Definition:Domain of Mapping|domain]] of $f'$ is $X$, which is [[Definition:Disjoint Sets|disjoint]] from the [[Definition:Codomain of Mapping|codomain]], $A \setminus X$, of $g'$.
+The [[Definition:Domain of Mapping|domain]] of $g'$ is $B \setminus f \left({X}\right)$, which is [[Definition:Disjoint Sets|disjoint]] from the [[Definition:Codomain of Mapping|codomain]], $f \left({X}\right)$, of $f'$.
+Let $h = f' \cup {g'}^{-1}$.
+By the [[Union of Bijections with Disjoint Domains and Codomains is Bijection/Corollary|corollary to Union of Bijections with Disjoint Domains and Codomains is Bijection]]:
+: $h$ is a [[Definition:Bijection|bijection]] from $X \cup \left({A \setminus X}\right)$ onto $f \left({X}\right) \cup \left({B \setminus f \left({X}\right)}\right)$.
+By [[Union with Relative Complement]], $h$ is a [[Definition:Bijection|bijection]] from $A$ onto $B$.
+Since $f' \subseteq f$ and $g' \subseteq g$, each element of $h$ is an element of $f$ or of $g^{-1}$.
+That is, if $y = h \left({x}\right)$ then either $y = f \left({x}\right)$ or $x = g \left({y}\right)$.
+{{Qed}}
+\end{proof}<|endoftext|>
+\section{Ring of Polynomial Forms over Integral Domain is Integral Domain}
+Tags: Polynomial Rings, Integral Domains
+
+\begin{theorem}
+Let $\struct {D, +, \circ}$ be an [[Definition:Integral Domain|integral domain]] whose [[Definition:Ring Zero|zero]] is $0_D$.
+Let $\struct {D \sqbrk X, \oplus, \odot}$ be the [[Definition:Ring of Polynomial Forms|ring of polynomial forms]] over $D$ in the [[Definition:Indeterminate (Polynomial Theory)|indeterminate]] $X$.
+Then $\struct {D \sqbrk X, \oplus, \odot}$ is an [[Definition:Integral Domain|integral domain]].
+\end{theorem}
+
+\begin{proof}
+By definition an [[Definition:Integral Domain|integral domain]] is a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+From [[Ring of Polynomial Forms is Commutative Ring with Unity]] it follows that $\struct {D \sqbrk X, +, \circ}$ is a [[Definition:Commutative and Unitary Ring|commutative ring with unity]].
+Suppose $f, g \in D \sqbrk X$ such that neither $f$ nor $g$ are the [[Definition:Null Polynomial|null polynomial]].
+Let $\map \deg f = n$ and $\map \deg g = m$.
+From [[Degree of Product of Polynomials over Integral Domain]] the [[Definition:Degree of Polynomial|degree]] of $f \odot g$ is $n + m$.
+Thus by definition $f \odot g$ is not the [[Definition:Null Polynomial|null polynomial]] of $D \sqbrk X$.
+Thus neither $f$ nor $g$ is a [[Definition:Proper Zero Divisor|proper zero divisor]] of $D \sqbrk X$.
+This holds for any two arbitrary non-[[Definition:Null Polynomial|null polynomials]] elements of $D \sqbrk X$.
+Hence $\struct {D \sqbrk X, \oplus, \odot}$ is a [[Definition:Commutative and Unitary Ring|commutative ring with unity]] with no [[Definition:Proper Zero Divisor|proper zero divisors]].
+That is, $\struct {D \sqbrk X, \oplus, \odot}$ is an [[Definition:Integral Domain|integral domain]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Union of One-to-Many Relations with Disjoint Images is One-to-Many}
+Tags: Relation Theory
+
+\begin{theorem}
+Let $S_1, S_2, T_1, T_2$ be [[Definition:Set|sets]] or [[Definition:Class (Class Theory)|classes]].
+Let $\mathcal R_1$ be a [[Definition:One-to-Many Relation|one-to-many relation]] on $S_1 \times T_1$.
+Let $\mathcal R_2$ be a [[Definition:One-to-Many Relation|one-to-many relation]] on $S_2 \times T_2$.
+Suppose that the [[Definition:Image Set of Relation|images]] of $\mathcal R_1$ and $\mathcal R_2$ are [[Definition:Disjoint Sets|disjoint]].
+Then $\mathcal R_1 \cup \mathcal R_2$ is a [[Definition:One-to-Many Relation|one-to-many relation]] on $(S_1 \cup S_2) \times (T_1 \cup T_2)$.
+\end{theorem}
+
+\begin{proof}
+Let $\mathcal Q = \mathcal R_1 \cup \mathcal R_2$.
+Then $Q \subseteq (S_1 \times T_1) \cup (S_2 \times T_2) \subseteq (S_1 \cup S_2) \times (T_1 \cup T_2)$.
+Thus $Q$ is a [[Definition:Relation|relation]] on $(S_1 \cup S_2) \times (T_1 \cup T_2)$.
+Let $T'_1$ and $T'_2$ be the [[Definition:Image of Relation|images]] of $\mathcal R_1$ and $\mathcal R_2$, respectively.
+Let $(x_1, y), (x_2, y) \in Q$.
+Then $y \in T'_1$ or $y \in T'_2$.
+If $y \in T'_1$ then $y \notin T'_2$, so neither $(x_1, y)$ nor $(x_2, y)$ is in $\mathcal R_2$, so these pairs are both in $\mathcal R_1$.
+As $\mathcal R_1$ is [[Definition:One-to-Many Relation|one-to-many]], $x_1 = x_2$.
+A similar argument leads to the same result for $y \in T'_2$.
+As this holds for all such $x_1, x_2, y$: $Q$ is a one-to-many relation.
+{{qed}}
+[[Category:Relation Theory]]
+l56ztdhi5bg9c7f5cwi8xib4au1autp
+\end{proof}<|endoftext|>
+\section{Union of Many-to-One Relations with Disjoint Domains is Many-to-One}
+Tags: Relation Theory
+
+\begin{theorem}
+Let $S_1, S_2, T_1, T_2$ be [[Definition:Set|sets]] or [[Definition:Class (Class Theory)|classes]].
+Let $\RR_1$ be a [[Definition:Many-to-One Relation|many-to-one relation]] on $S_1 \times T_1$.
+Let $\RR_2$ be a [[Definition:Many-to-One Relation|many-to-one relation]] on $S_2 \times T_2$.
+Suppose that the [[Definition:Domain of Relation|domains]] of $\RR_1$ and $\RR_2$ are [[Definition:Disjoint Sets|disjoint]].
+Then $\RR_1 \cup \RR_2$ is a [[Definition:Many-to-One Relation|many-to-one relation]] on $\paren {S_1 \cup S_2} \times \paren {T_1 \cup T_2}$.
+\end{theorem}
+
+\begin{proof}
+Let $\RR = \RR_1 \cup \RR_2$.
+Let $\tuple {x, y_1}, \tuple {x, y_2} \in \RR$.
+By the definition of [[Definition:Set Union|union]], $\tuple {x, y_1}$ and $\tuple {x, y_2}$ are each in $\RR_1$ or $\RR_2$.
+Suppose that both are in $\RR_1$.
+Then since $\RR_1$ is a [[Definition:Many-to-One Relation|many-to-one relation]], $y_1 = y_2$.
+Suppose that $\tuple {x, y_1} \in \RR_1$ and $\tuple {x, y_2} \in \RR_2$.
+Then $x$ is in the [[Definition:Domain of Relation|domain]] of $\RR_1$ and that of $\RR_2$, contradicting the premise, so this cannot occur.
+The other two cases are precisely similar.
+Thus in all cases $y_1 = y_2$.
+As this holds for all such pairs, $\RR$ is [[Definition:Many-to-One Relation|many-to-one]].
+{{qed}}
+[[Category:Relation Theory]]
+93vaix66p8b4htn9iiddor48mxwxzmb
+\end{proof}<|endoftext|>
+\section{Nth Root of Integer is Integer or Irrational}
+Tags: Irrationality Proofs, Integers
+
+\begin{theorem}
+Let $n$ be a [[Definition:Natural Number|natural number]].
+Let $x$ be an [[Definition:Integer|integer]].
+If the [[Definition:Root (Analysis)|$n$th root]] of $x$ is not an [[Definition:Integer|integer]], it must be [[Definition:Irrational Number|irrational]].
+\end{theorem}
+
+\begin{proof}
+We prove the [[Definition:Contrapositive Statement|contrapositive]]: if the [[Definition:Root (Analysis)|$n$th root]] of $x$ is [[Definition:Rational Number|rational]], it must be an [[Definition:Integer|integer]].
+By [[Existence of Canonical Form of Rational Number]], there exist an [[Definition:Integer|integer]] $a$ and a [[Definition:Natural Number|natural number]] $b$ which are [[Definition:Coprime Integers|coprime]] such that:
+{{begin-eqn}}
+{{eqn | l = x^{1/n}
+ | r = \frac a b
+}}
+{{eqn | ll= \leadsto
+ | l = x
+ | r = \frac {a^n} {b^n}
+}}
+{{end-eqn}}
+Since $a$ and $b$ are [[Definition:Coprime Integers|coprime]], $a^n$ and $b^n$ are [[Definition:Coprime Integers|coprime]] by [[Powers of Coprime Numbers are Coprime]].
+Hence $\dfrac {a^n} {b^n}$ is by definition in [[Definition:Canonical Form of Rational Number|canonical form]].
+Suppose $b \ne 1$.
+As the [[Definition:Denominator|denominator]] of $\dfrac {a^n} {b^n}$ is not $1$, $x = \dfrac {a^n} {b^n}$ is not an [[Definition:Integer|integer]].
+This is a [[Definition:Contradiction|contradiction]].
+Thus $b = 1$, and thus:
+:$x^{1/n} = a$
+which is an [[Definition:Integer|integer]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Union of Bijections with Disjoint Domains and Codomains is Bijection}
+Tags: Mapping Theory
+
+\begin{theorem}
+Let $A$, $B$, $C$, and $D$ be [[Definition:Set|sets]] or [[Definition:Class (Class Theory)|classes]].
+Let $A \cap B = C \cap D = \varnothing$.
+Let $f: A \to C$ and $g: B \to D$ be [[Definition:Bijection|bijections]].
+Then $f \cup g: A \cup B \to C \cup D$ is also a [[Definition:Bijection|bijection]].
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Bijection|bijection]], $f$ and $g$ are [[Definition:Many-to-One Relation|many-to-one]] and [[Definition:One-to-Many Relation|one-to-many relations]].
+By [[Union of Many-to-One Relations with Disjoint Domains is Many-to-One]] and [[Union of One-to-Many Relations with Disjoint Images is One-to-Many]]:
+: $f \cup g$ is [[Definition:Many-to-One Relation|many-to-one]] and [[Definition:One-to-Many Relation|one-to-many]].
+Thus to show $f \cup g$ is a [[Definition:Bijection|bijection]] requires us only to demonstrate that it is both [[Definition:Left-Total Relation|left-total]] and [[Definition:Right-Total Relation|right-total]].
+We will show that $f \cup g$ is [[Definition:Left-Total Relation|left-total]].
+Let $x \in A \cup B$.
+Then $x \in A$ or $x \in B$.
+If $x \in A$ then since $f$ is [[Definition:Left-Total Relation|left-total]] there is a $y \in C$ such that $\left({x, y}\right) \in f$.
+By the definition of [[Definition:Set Union|union]], $\left({x, y}\right) \in f \cup g$.
+If $x \in B$ then since $g$ is [[Definition:Left-Total Relation|left-total]] there is a $y \in D$ such that $\left({x, y}\right) \in g$.
+Then by the definition of [[Definition:Set Union|union]], $\left({x, y}\right) \in f \cup g$.
+As this holds for all $x$, $f \cup g$ is [[Definition:Left-Total Relation|left-total]].
+The proof that $f \cup g$ is [[Definition:Right-Total Relation|right-total]] is similar.
+Thus it has been demonstrated that:
+: $f \cup g$ is [[Definition:Many-to-One Relation|many-to-one]]
+: $f \cup g$ is [[Definition:One-to-Many Relation|one-to-many]]
+: $f \cup g$ is [[Definition:Left-Total Relation|left-total]]
+: $f \cup g$ is [[Definition:Right-Total Relation|right-total]]
+and therefore, by definition, a [[Definition:Bijection|bijection]].
+{{qed}}
+[[Category:Mapping Theory]]
+b0uu79b9e81evrjh8lvkh3j7vtlt6ak
+\end{proof}<|endoftext|>
+\section{Interval in Complete Lattice is Complete Lattice}
+Tags: Lattice Theory
+
+\begin{theorem}
+Let $\left({L, \preceq}\right)$ be a [[Definition:Complete Lattice|complete lattice]].
+Let $a, b \in L$ with $a \preceq b$.
+Let $\left[{a \,.\,.\, b}\right]$ be the [[Definition:Closed Interval|closed interval]] between $a$ and $b$.
+{{explain|Demonstrate that for each $a, b \in L$ that $\left[{a \,.\,.\, b}\right]$ exists and is unique.}}
+Then $\left[{a \,.\,.\, b}\right]$ is also a [[Definition:Complete Lattice|complete lattice]] under $\preceq$.
+\end{theorem}
+
+\begin{proof}
+Let $I = \left[{a \,.\,.\, b}\right]$.
+Let $S \subseteq I$.
+If $S = \varnothing$, then it has a [[Definition:Supremum of Set|supremum]] in $I$ of $a$ and an [[Definition:Infimum of Set|infimum]] in $I$ of $b$.
+Let $S \ne \varnothing$.
+Since $S \subseteq I$, $a$ is a [[Definition:Lower Bound of Set|lower bound]] of $S$ and $b$ is an [[Definition:Upper Bound of Set|upper bound]] of $S$.
+Since $L$ is a [[Definition:Complete Lattice|complete lattice]], $S$ has an [[Definition:Infimum of Set|infimum]], $p$, and a [[Definition:Supremum of Set|supremum]], $q$, in $L$.
+Thus by the definitions of [[Definition:Infimum of Set|infimum]] and [[Definition:Supremum of Set|supremum]]:
+: $a \preceq p$ and $q \preceq b$
+Let $x \in S$.
+Since an [[Definition:Infimum of Set|infimum]] is a [[Definition:Lower Bound of Set|lower bound]]:
+: $p \preceq x$
+Since a [[Definition:Supremum of Set|supremum]] is an [[Definition:Upper Bound of Set|upper bound]]:
+: $x \preceq q$
+Thus $a \preceq p \preceq x \preceq q \preceq b$.
+Since $\preceq$ is an [[Definition:Ordering|ordering]], it is [[Definition:Transitive Relation|transitive]], so by [[Transitive Chaining]]:
+:$a \preceq p \preceq b$ and $a \preceq q \preceq b$.
+That is, $p, q \in I$.
+Thus $p$ and $q$ are the [[Definition:Infimum of Set|infimum]] and [[Definition:Supremum of Set|supremum]] of $S$ in $I$.
+As every subset of $I$ has a [[Definition:Supremum of Set|supremum]] and [[Definition:Infimum of Set|infimum]] in $I$, $I$ is a [[Definition:Complete Lattice|complete lattice]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Dedekind-Complete Bounded Ordered Set is Complete Lattice}
+Tags: Complete Lattices
+
+\begin{theorem}
+Let $\left({L, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Let $L$ have a [[Definition:Lower Bound of Set|lower bound]] $\bot$ and an [[Definition:Upper Bound of Set|upper bound]] $\top$.
+Let $\left({L, \preceq}\right)$ be [[Definition:Dedekind Complete|Dedekind-complete]].
+Then $\left({L, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+\end{theorem}
+
+\begin{proof}
+Let $S \subseteq L$.
+If $S = \varnothing$, then $S$ has a [[Definition:Supremum of Set|supremum]] of $\bot$ and an [[Definition:Infimum of Set|infimum]] of $\top$.
+Let $S \ne \varnothing$.
+$S$ is [[Definition:Bounded Above Set|bounded above]] by $\top$.
+As $\left({L, \preceq}\right)$ is [[Definition:Dedekind Complete|Dedekind complete]], $S$ has a [[Definition:Supremum of Set|supremum]].
+$S$ is [[Definition:Bounded Below Set|bounded below]] by $\bot$.
+By [[Dedekind Completeness is Self-Dual]], $S$ has an [[Definition:Infimum of Set|infimum]].
+Thus every [[Definition:Subset|subset]] of $L$ has a [[Definition:Supremum of Set|supremum]] and an [[Definition:Infimum of Set|infimum]].
+So, by definition, $\left({L, \preceq}\right)$ is a [[Definition:Complete Lattice|complete lattice]].
+{{qed}}
+[[Category:Complete Lattices]]
+k3nlnv25ytz2hg1qp0qvvfaaszcffkv
+\end{proof}<|endoftext|>
+\section{Set Difference with Subset is Superset of Set Difference}
+Tags: Set Difference
+
+\begin{theorem}
+Let $A, B, S$ be [[Definition:Set|sets]] or [[Definition:Class (Class Theory)|classes]].
+Suppose that $A \subseteq B$.
+Then $S \setminus B \subseteq S \setminus A$, where $\setminus$ represents [[Definition:Set Difference|set difference]].
+\end{theorem}
+
+\begin{proof}
+Let $x \in S \setminus B$.
+Then by the definition of [[Definition:Set Difference|set difference]]:
+: $x \in S$ and $x \notin B$
+Suppose [[Proof by Contradiction|for the sake of contradiction]] that $x \in A$.
+Then since $A$ is a [[Definition:subset|subset]] (or [[Definition:subclass|subclass]]) of $B$, $x \in B$, a [[Definition:contradiction|contradiction]].
+Thus $x \notin A$.
+Since $x \in S$ and $x \notin A$, we conclude that $x \in S \setminus A$.
+As this holds for all $x \in S \setminus B$:
+: $S \setminus B \subseteq S \setminus A$
+{{qed}}
+[[Category:Set Difference]]
+t7nq6c7ruyp6dorjy26rdcerzo37il9
+\end{proof}<|endoftext|>
+\section{Knaster-Tarski Lemma/Corollary}
+Tags: Complete Lattices
+
+\begin{theorem}
+Let $\struct {L, \preceq}$ be a [[Definition:Complete Lattice|complete lattice]].
+Let $f: L \to L$ be an [[Definition:Increasing Mapping|increasing mapping]].
+Then $f$ has a [[Definition:Fixed Point|fixed point]]
+\end{theorem}
+
+\begin{proof}
+By the [[Knaster-Tarski Lemma]], $f$ has a [[Definition:Smallest Element|least]] [[Definition:Fixed Point|fixed point]].
+Thus it has a [[Definition:Fixed Point|fixed point]].
+{{qed}}
+[[Category:Complete Lattices]]
+1umbgdu1usc8o7l7c7kujl3z3u5u7a7
+\end{proof}<|endoftext|>
+\section{Knaster-Tarski Lemma/Corollary/Power Set}
+Tags: Complete Lattices
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]].
+Let $\mathcal P \left({S}\right)$ be the [[Definition:Power Set|power set]] of $S$.
+Let $f: \mathcal P \left({S}\right) \to \mathcal P \left({S}\right)$ be a $\subseteq$-[[Definition:Increasing Mapping|increasing mapping]].
+That is, suppose that for all $T, U \in \mathcal P \left({S}\right)$:
+: $T \subseteq U \implies f \left({T}\right) \subseteq f\left({U}\right)$
+Then $f$ has a [[Definition:Fixed Point|fixed point]].
+\end{theorem}<|endoftext|>
+\section{Natural Number has Same Prime Factors as Integer Power}
+Tags: Natural Numbers
+
+\begin{theorem}
+Let $x$ be a [[Definition:Natural Number|natural number]] such that $x > 1$.
+Let $n \ge 1$ be a [[Definition:Strictly Positive Integer|(strictly) positive integer]].
+The [[Definition:Integer Power|$n$th power]] of $x$ has the same [[Definition:Prime Factor|prime factors]] as $x$.
+\end{theorem}
+
+\begin{proof}
+{{handwaving}}
+Let $p$ a [[Definition:Prime Number|prime number]] such that $p$ divides $x^n$.
+This is possible because $x > 1$, so $x^n > 1$, hence $x^n$ has prime divisors due to [[Fundamental Theorem of Arithmetic]].
+To prove the statement, we need to show $p$ divides $x$.
+We will prove this statement by the [[Principle of Mathematical Induction]] on $n$.
+=== Basis of the Induction ===
+
+We have $n = 1$
+Clearly, since $p$ divides $x^1 = x$, then $p$ divides $x$.
+{{qed |lemma}}
+=== Inductive Step ===
+Suppose that for a given $n$, if $p$ divides $x^n$ then $p$ divides $x$.
+Then, if $p$ divides $x^{n+1}$, by definition of [[Definition:Prime Number/Definition 6|prime number]], either $p$ divides $x^n$ or $p$ divides $x$.
+If $p$ divides $x^n$, we get from induction hypothesis that $p$ divides $x$.
+The other case trivially leads to our conclusion.
+{{qed|lemma}}
+Hence the result, by [[Principle of Mathematical Induction]].
+{{qed}}
+Conversely, let $p$ a prime number such that it divides $x$. Then, $p$ divides $x * x^{n-1} = x^n$, as required.
+{{qed}}
+[[Category:Natural Numbers]]
+crq4vrwlccilc19abse60k1dm99lvau
+\end{proof}<|endoftext|>
+\section{Rule of Simplification/Sequent Form/Formulation 1/Form 1}
+Tags: Rule of Simplification
+
+\begin{theorem}
+:$p \land q \vdash p$
+\end{theorem}<|endoftext|>
+\section{Rule of Simplification/Sequent Form/Formulation 1/Form 2}
+Tags: Rule of Simplification
+
+\begin{theorem}
+:$p \land q \vdash q$
+\end{theorem}<|endoftext|>
+\section{Alternative Definition of Ordinal in Well-Founded Theory}
+Tags: Ordinals
+
+\begin{theorem}
+A [[Definition:Set|set]] $S$ is an [[Definition:Ordinal|ordinal]] {{iff}} $S$ is [[Definition:Transitive Set|transitive]] and $\forall x, y \in S: \left({x \in y \lor x = y \lor y \in x}\right)$.
+\end{theorem}
+
+\begin{proof}
+=== Forward Implication ===
+Let $S$ be an [[Definition:Ordinal|ordinal]].
+By [[Alternative Definition of Ordinal]], $S$ is [[Definition:Transitive Set|transitive]] and [[Definition:Strict Well-Ordering|strictly well-ordered]] by the [[Definition:Epsilon Relation|epsilon relation]].
+By [[Strict Well-Ordering is Strict Total Ordering]], $S$ is [[Definition:Strict Total Ordering|strictly totally ordered]] by $\in$.
+Thus:
+:$\forall x, y \in S: \left({ x \in y \lor x = y \lor y \in x }\right)$
+{{qed|lemma}}
+=== Reverse Implication ===
+Let $S$ be a [[Definition:Transitive Set|transitive set]] such that for any $x, y \in S$, $x \in y \lor y \in x \lor x = y$.
+We first show that $\in$ is a [[Definition:Strict Ordering|strict ordering]] of $S$.
+Asymmetric: Let $x, y \in S$.
+By [[Epsilon is Foundational]], $\{ x,y \}$ has an [[Definition:Minimal Element under Relation|$\Epsilon$-minimal]] [[Definition:Element|element]].
+Thus $x \notin t$ or $y \notin x$.
+Transitive: Let $x, y, z \in S$ with $x \in y$ and $y \in z$.
+By assumption, $x = z$, $x \in z$, or $z \in x$.
+Suppose for the sake of contradiction that $x = z$.
+Then $x \in y$ and $y \in x$, contradicting the fact that $\Epsilon$ is [[Definition:Asymmetric Relation|asymmetric]].
+Suppose that $z \in x$.
+Then $x \in y$, $y \in z$, and $z \in x$.
+Thus the set $\left\{ {x, y, z}\right\}$ has no [[Definition:Minimal Element under Relation|$\Epsilon$-minimal]] [[Definition:Element|element]], contradicting [[Epsilon is Foundational]].
+Thus $x \in z$.
+Thus $\in$ is a [[Definition:Strict Ordering|strict ordering]] of $S$.
+Let $T$ be a non-empty subset of $S$.
+By [[Epsilon is Foundational]], $S$ has an $\Epsilon$-minimal element, $m$.
+Since a [[Definition:Minimal Element under Relation|minimal element]] of a [[Definition:Strict Total Ordering|strictly totally ordered set]] is the [[Definition:Smallest Element|smallest element]], $\Epsilon$ strictly well-orders $S$.
+{{LinkWanted|Is there a link to the above statement?}}
+Thus by [[Alternative Definition of Ordinal]], $S$ is an ordinal.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Foundational Relation is Antireflexive/Corollary}
+Tags: Foundational Relations
+
+\begin{theorem}
+Let $\left({S, \preceq}\right)$ be an [[Definition:Ordered Set|ordered set]].
+Suppose that $S$ is non-empty.
+Then $\preceq$ is not a [[Definition:Foundational Relation|foundational relation]].
+\end{theorem}
+
+\begin{proof}
+Since $S$ is non-empty, it has an element $x$.
+By the definition of [[Definition:Ordering|ordering]], $\preceq$ is a [[Definition:Reflexive Relation|reflexive relation]].
+Thus $x \preceq x$.
+By [[Foundational Relation is Antireflexive]], $\preceq$ is not a [[Definition:Foundational Relation|foundational relation]].
+{{qed}}
+[[Category:Foundational Relations]]
+qrrdxd0b32u7n4vqpuyl10bmczd2tla
+\end{proof}<|endoftext|>
+\section{Reflexive Reduction of Well-Founded Ordering is Foundational Relation}
+Tags: Reflexive Reductions, Order Theory, Foundational Relations
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $\preceq$ be a [[Definition:Well-Founded Ordering|well-founded ordering]] of $S$.
+Let $\prec$ be the [[Definition:Reflexive Reduction|reflexive reduction]] of $\preceq$.
+Then $\prec$ is a [[Definition:Foundational Relation|foundational relation]].
+\end{theorem}
+
+\begin{proof}
+Let $T$ be a [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $S$.
+Since $\preceq$ is a [[Definition:Well-Founded Ordering|well-founded ordering]], $T$ has a [[Definition:Minimal Element|minimal element]] with respect to the [[Definition:Ordering|ordering]] $\preceq$.
+That is, there is an [[Definition:Element|element]] $m \in T$ such that $\forall x \in T: \left({x \npreceq m}\right) \lor \left({x = m}\right)$.
+Let $x \in T$.
+Then $x \npreceq m$ or $x = m$.
+By the definition of [[Definition:Reflexive Reduction|reflexive reduction]], $\prec$ is a [[Definition:Subset|subset]] of $\preceq$.
+Thus if $x \npreceq m$, $x \nprec m$.
+If $x = m$ then by [[Reflexive Reduction is Antireflexive]], $x \nprec m$.
+As this holds for all $x \in T$, $m$ is [[Definition:Minimal Element under Relation|$\prec$-minimal]] in $T$.
+As each [[Definition:Non-Empty Set|non-empty]] [[Definition:Subset|subset]] of $S$ has a [[Definition:Minimal Element under Relation|$\prec$-minimal]], $\prec$ is a [[Definition:Foundational Relation|foundational relation]] on $S$.
+{{qed}}
+[[Category:Reflexive Reductions]]
+[[Category:Order Theory]]
+[[Category:Foundational Relations]]
+52y9rgrl5pcrhyplotzmqa4ol0yzujt
+\end{proof}<|endoftext|>
+\section{Epsilon Relation is Proper}
+Tags: Class Theory
+
+\begin{theorem}
+Let $\mathbb U$ be the [[Definition:Universal Class|universal class]].
+Let $\Epsilon$ be the [[Definition:Epsilon Relation|epsilon relation]].
+Then $\left({\mathbb U, \Epsilon}\right)$ is a [[Definition:Proper Relational Structure|proper relational structure]].
+\end{theorem}
+
+\begin{proof}
+{{NotZFC}}
+Let $x \in \mathbb U$.
+Then by the [[Axiom:Axiom of Extension|Axiom of Extension]]:
+: $x = \Epsilon^{-1} \left({x}\right)$
+where $\Epsilon^{-1} \left({x}\right)$ denotes the [[Definition:Preimage of Element under Relation|preimage]] of $x$ under $\Epsilon$.
+Since $x$ is a [[Definition:Set|set]], $\prec^{-1} \left({x}\right) = x$ is a set.
+As this holds for all $x \in \mathbb U$, $\left({\mathbb U, \Epsilon}\right)$ is a [[Definition:Proper Relational Structure|proper relational structure]].
+{{qed}}
+[[Category:Class Theory]]
+770yldin2t1ypcot2usopgmypzljk5b
+\end{proof}<|endoftext|>
+\section{Rule of Simplification/Sequent Form/Formulation 2/Form 1}
+Tags: Rule of Simplification
+
+\begin{theorem}
+:$\vdash p \land q \implies p$
+\end{theorem}<|endoftext|>
+\section{Rule of Simplification/Sequent Form/Formulation 2/Form 2}
+Tags: Rule of Simplification
+
+\begin{theorem}
+:$\vdash p \land q \implies q$
+\end{theorem}<|endoftext|>
+\section{Relationship between Transitive Closure Definitions}
+Tags: Set Theory
+
+\begin{theorem}
+Let $x$ be a [[Definition:Set|set]].
+Let $a$ be the [[Definition:Smallest Set by Set Inclusion|smallest set]] such that $x \in a$ and $a$ is [[Definition:Transitive Set|transitive]].
+Let $b$ be the [[Definition:Smallest Set by Set Inclusion|smallest set]] such that $x \subseteq b$ and $b$ is [[Definition:Transitive Set|transitive]].
+Then $a = b \cup \set x$.
+\end{theorem}
+
+\begin{proof}
+We have that:
+:$x \in a$
+and $a$ is [[Definition:Transitive Set|transitive]].
+So:
+:$x \subseteq a$
+Thus by the definition of $b$ and of [[Definition:Smallest Set by Set Inclusion|smallest set]]:
+:$b \subseteq a$
+Since we also have $x \in a$:
+:$b \cup \set x \subseteq a$
+$x \in \set x$, so:
+:$x \in b \cup \set x$
+$b \cup \set x$ is [[Definition:Transitive Set|transitive]]:
+If $p \in b$ then:
+:$p \subseteq b \subseteq b \cup \set x$.
+If $p \in \set x$ then:
+:$p = x$
+So by the definition of $b$:
+:$p \subseteq b \subseteq b \cup \set x$
+Thus by the definition of $a$:
+:$a \subseteq b \cup \set x$
+Thus the theorem holds by definition of [[Definition:Set Equality/Definition 2|set equality]].
+{{qed}}
+[[Category:Set Theory]]
+l7ziwup2v5r1puow7hgnuvd76gft23u
+\end{proof}<|endoftext|>
+\section{Ordinal is not Element of Itself}
+Tags: Ordinals, Ordinal is not Element of Itself
+
+\begin{theorem}
+Let $x$ be an [[Definition:Ordinal|ordinal]].
+Then $x \notin x$.
+\end{theorem}
+
+\begin{proof}
+By [[Successor Set of Ordinal is Ordinal]], the [[Definition:Successor Set|successor]] of $x$ is an [[Definition:Ordinal|ordinal]].
+That is, $x^+ = x \cup \set x$ is an [[Definition:Ordinal|ordinal]].
+By [[Set is Element of Successor]], $x \in x^+$.
+Because $x^+$ is an [[Definition:Ordinal|ordinal]], it is [[Definition:Strict Well-Ordering|strictly well-ordered]] by the [[Definition:Epsilon Restriction|epsilon restriction]] $\Epsilon {\restriction_{x^+} }$.
+Because a strict ordering is [[Definition:Antireflexive Relation|antireflexive]] and $x \in x^+$, we conclude that $x \notin x$.
+{{qed}}
+\end{proof}
+
+\begin{proof}
+This result follows immediately from [[Set is Not Element of Itself]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Set is Element of Successor}
+Tags: Ordinals
+
+\begin{theorem}
+Let $x$ be a [[Definition:set|set]].
+Let $x^+$ be the [[Definition:Successor Set|successor]] of $x$.
+Then $x \in x^+$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Successor Set|successor set]]:
+: $x^+ = x \cup \{x\}$.
+By the definition of [[Definition:singleton|singleton]], $x \in \{x\}$.
+Thus by the definition of [[Definition:Set Union|union]], $x \in x^+$.
+{{qed}}
+[[Category:Ordinals]]
+7sif0tawwnckd2ghx1u58z7u5b04ugc
+\end{proof}<|endoftext|>
+\section{Element of Ordinal is Ordinal}
+Tags: Ordinals
+
+\begin{theorem}
+Let $n$ be an [[Definition:ordinal|ordinal]].
+Let $m \in n$.
+Then $m$ is also an ordinal.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Ordinal|ordinal]], $n$ is [[Definition:Transitive Class|transitive]].
+Thus $m \subseteq n$.
+By [[Subset of Strictly Well-Ordered Set is Strictly Well-Ordered]], it follows that $m$ is [[Definition:Strict Well-Ordering|strictly well-ordered]] by the [[Definition:Epsilon Restriction|epsilon restriction]] $\Epsilon {\restriction_m}$.
+It is now to be shown that $m$ is [[Definition:Transitive Set|transitive]].
+If $m = \varnothing$ then the result follows by [[Empty Set is Transitive]].
+If $m \ne \varnothing$, then let $x \in m$.
+If $x = \varnothing$, then $x \subseteq m$ by [[Empty Set is Subset of All Sets]].
+If $x \ne \varnothing$, then let $y \in x$.
+It suffices to show that $y \in m$.
+Since $m \subseteq n$, it follows that $x \in n$.
+Also, $y \in x \land x \in n \implies y \in n$ because $n$ is [[Definition:Transitive Set|transitive]].
+And so $x \in n$, $y \in n$, and $m \in n$.
+A [[Definition:Strict Well-Ordering|strict well-ordering]] is [[Definition:Transitive Relation|transitive]] by definition.
+Therefore:
+:$y \in x \land x \in m \implies y \in m$
+Hence the result.
+{{qed}}
+[[Category:Ordinals]]
+17fv3n075gb8vz4h5nkxs3ksn9aqhpm
+\end{proof}<|endoftext|>
+\section{Modulus of Exponential of Imaginary Number is One}
+Tags: Complex Modulus, Modulus of Exponential of Imaginary Number is One
+
+\begin{theorem}
+Let $\cmod z$ denote the [[Definition:Complex Modulus|modulus]] of a [[Definition:Complex Number|complex number]] $z$.
+Let $e^z$ be the [[Definition:Complex Exponential Function|complex exponential]] of $z$.
+Let $x$ be [[Definition:Wholly Real|wholly real]].
+Then:
+:$\cmod {e^{i x} } = 1$
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = e^{i x}
+ | r = \cos x + i \sin x
+ | c = [[Euler's Formula]]
+}}
+{{eqn | ll= \leadsto
+ | l = \cmod {e^{i x} }
+ | r = \cmod {\cos x + i \sin x}
+}}
+{{eqn | r = \sqrt {\paren {\map \Re {\cos x + i \sin x} }^2 + \paren {\map \Im {\cos x + i \sin x} }^2}
+ | c = {{Defof|Complex Modulus}}
+}}
+{{eqn | r = \sqrt {\cos^2 x + \sin^2 x}
+ | c = as $x$ is [[Definition:Wholly Real|wholly real]]
+}}
+{{eqn | r = 1
+ | c = [[Sum of Squares of Sine and Cosine]]
+}}
+{{end-eqn}}
+{{qed}}
+[[Category:Complex Modulus]]
+[[Category:Modulus of Exponential of Imaginary Number is One]]
+4e54obxfjkhpr31zu5htm7dromjp3mj
+\end{proof}<|endoftext|>
+\section{Absolute Value of Power}
+Tags: Absolute Value Function
+
+\begin{theorem}
+Let $x$, $y$ be [[Definition:Real Number|real numbers]].
+Let $x^y$, [[Definition:Power (Algebra)|$x$ to the power of $y$]], be real.
+Then:
+:$\size {x^y} = \size x^y$
+\end{theorem}
+
+\begin{proof}
+If $x = 0$, the theorem [[Definition:Clearly|clearly]] holds, by the definition of [[Definition:Power of Zero|powers of zero]].
+Suppose $x \ne 0$.
+We use the interpretation of [[Definition:Real Number|real numbers]] as [[Definition:Wholly Real|wholly real complex numbers]].
+Likewise we interpret the [[Definition:Absolute Value|absolute value]] of $x$ as the [[Definition:Complex Modulus|modulus]] of $x$.
+Then $x$ can be expressed in [[Definition:Polar Form of Complex Number|polar form]]:
+:$x = r e^{i\theta}$
+where $r = \size x$ and $\theta$ is an [[Definition:Argument of Complex Number|argument]] of $x$.
+Then:
+{{begin-eqn}}
+{{eqn | l = x
+ | r = r e^{i\theta}
+}}
+{{eqn | ll= \leadsto
+ | l = x^y
+ | r = \left(r{e^{i\theta} }\right)^y
+}}
+{{eqn | r = r^y e^{i \theta y}
+}}
+{{eqn | ll= \leadsto
+ | l = \size {x^y}
+ | r = \size {r^y e^{i \theta y} }
+}}
+{{eqn | r = \size {r^y} \size {e^{i \theta y} }
+ | c = [[Modulus of Product]]
+}}
+{{eqn | r = \size {r^y}
+ | c = [[Modulus of Exponential of Imaginary Number is One]]
+}}
+{{eqn | r = \size {\size x^y}
+ | c = by definition of $r$
+}}
+{{eqn | r = \size x^y
+ | c = as $\size x^y \ge 0$
+}}
+{{end-eqn}}
+{{qed}}
+{{MissingLinks|exponential properties for $\C$ and that $\size x^y \ge 0$}}
+[[Category:Absolute Value Function]]
+g0rl07jenlyqentmmvi3q5l1rpraizk
+\end{proof}<|endoftext|>
+\section{Count of Rows of Truth Table}
+Tags: Truth Tables
+
+\begin{theorem}
+Let $P$ be a [[Definition:WFF of Propositional Logic|WFF of propositional logic]].
+Suppose $\mathcal P$ is of [[Definition:Finite Set|finite size]] such that it contains $n$ different [[Definition:Letter|letters]].
+Then a [[Definition:Truth Table|truth table]] constructed to express $P$ will contain $2^n$ [[Definition:Row of Truth Table|rows]].
+\end{theorem}
+
+\begin{proof}
+In a [[Definition:Truth Table|truth table]], one [[Definition:Row of Truth Table|row]] is needed for each [[Definition:Boolean Interpretation|boolean interpretation]] of $P$.
+Let $S$ be the [[Definition:Set|set]] of different [[Definition:Letter|letters]] used in $P$.
+The result then follows from applying [[Number of Boolean Interpretations for Finite Set of Variables]] to $S$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Transitive Closure (Set Theory)}
+Tags: Set Theory
+
+\begin{theorem}
+Let $x$ and $y$ be [[Definition:Set|sets]].
+{{TFAE|def = Transitive Closure (Set Theory)|view = Transitive Closure|context = Set Theory}}
+\end{theorem}
+
+\begin{proof}
+Let $x^t$ be the [[Definition:Transitive Closure (Set Theory)/Definition 2|transitive closure of $x$ by Definition 2]].
+Let the [[Definition:mapping|mapping]] $G$ be defined as on that definition page.
+=== $x \in x^t$ ===
+$x \in \set x$ by the definition of [[Definition:singleton|singleton]].
+Since $\map G 0 = \set 0$:
+:$\set x \in \map G \N$
+Thus $x \in x^t$ by the definition of [[Definition:Union of Set of Sets|union]].
+{{qed|lemma}}
+=== $x^t$ is a Set ===
+By [[Denumerable Class is Set]], the [[Definition:Image of Subset under Mapping|image]] of $G$ is a [[Definition:Set|set]].
+Thus $x^t$ is a set by the [[Axiom:Axiom of Unions|Axiom of Unions]].
+{{qed|lemma}}
+=== $x^t$ is a Transitive Set ===
+Let $y \in x^t$ and let $z \in y$.
+By the definition of $x^t$:
+:$\exists n \in \N: y \in \map G n$
+Then by definition of [[Definition:Union of Set of Sets|union]]:
+:$\displaystyle z \in \bigcup \map G n$
+But by the definition of $G$:
+:$z \in \map G {n^+}$
+Thus by the definition of $x^t$:
+:$z \in x^t$
+As this holds for all such $y$ and $z$, $x^t$ is [[Definition:Transitive Set|transitive]].
+{{qed|lemma}}
+=== $x^t$ is Smallest ===
+Let $m$ be a [[Definition:Transitive Set|transitive set]] such that $x \in m$.
+We will show by [[Principle of Mathematical Induction|induction]] that $\map G n \subseteq m$ for each $n \in \N$.
+By [[Union is Smallest Superset]], that will show that $x^t \subseteq m$.
+Because $x \in m$:
+:$\map G 0 = \set x \subseteq m$
+Suppose that $\map G n \subseteq m$.
+Then by [[Union is Increasing]]:
+:$\displaystyle \bigcup \map G n \subseteq \bigcup m$
+{{explain|Transitive set includes its union.}}
+Thus:
+:$\displaystyle \bigcup \map G n \subseteq m$
+{{qed|lemma}}
+By [[Smallest Element is Unique]], $x^t$ is the only set satisfying $(2)$.
+{{qed}}
+[[Category:Set Theory]]
+iqtibc9o9fsqisxkwxh0qhrkjxcvxyn
+\end{proof}<|endoftext|>
+\section{Denumerable Class is Set}
+Tags: Set Theory
+
+\begin{theorem}
+Let $A$ be a [[Definition:Class (Class Theory)|class]].
+Let $\N$ be the [[Definition:Natural Numbers|natural numbers]].
+Suppose that $F: \N \to A$ is a [[Definition:bijection|bijection]].
+Then $A$ is a [[Definition:set|set]].
+\end{theorem}
+
+\begin{proof}
+By the [[Axiom:Axiom of Infinity|Axiom of Infinity]], $\N$ is a [[Definition:set|set]].
+Thus by the [[Axiom:Axiom of Replacement|Axiom of Replacement]], $A$ is also a set.
+{{qed}}
+[[Category:Set Theory]]
+5hmyb1p303uf7bgmuq7hv5xmn9nuw12
+\end{proof}<|endoftext|>
+\section{Relative Complement inverts Subsets}
+Tags: Subsets, Relative Complement
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]].
+Let $A \subseteq S, B \subseteq S$ be [[Definition:Subset|subsets]] of $S$.
+Then:
+:$A \subseteq B \iff \relcomp S B \subseteq \relcomp S A$
+where $\complement_S$ denotes the [[Definition:Relative Complement|complement relative to $S$]].
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = A
+ | o = \subseteq
+ | r = B
+ | c =
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = A \cap B
+ | r = A
+ | c = [[Intersection with Subset is Subset]]
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \relcomp S {A \cap B}
+ | r = \relcomp S A
+ | c = [[Relative Complement of Relative Complement]]
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \relcomp S A \cup \relcomp S B
+ | r = \relcomp S A
+ | c = [[De Morgan's Laws (Set Theory)/Relative Complement/Complement of Intersection|De Morgan's Laws: Complement of Intersection]]
+}}
+{{eqn | ll= \leadstoandfrom
+ | l = \relcomp S B
+ | o = \subseteq
+ | r = \relcomp S A
+ | c = [[Union with Superset is Superset]]
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Union of Subsets is Subset/Family of Sets}
+Tags: Union of Subsets is Subset
+
+\begin{theorem}
+Let $\family {S_i}_{i \mathop \in I}$ be a [[Definition:Indexed Family of Sets|family of sets indexed by $I$]].
+Then for all [[Definition:Set|sets]] $X$:
+:$\displaystyle \paren {\forall i \in I: S_i \subseteq X} \implies \bigcup_{i \mathop \in I} S_i \subseteq X$
+where $\displaystyle \bigcup_{i \mathop \in I} S_i$ is the [[Definition:Union of Family|union of $\family {S_i}$]].
+\end{theorem}
+
+\begin{proof}
+Suppose that $\forall i \in I: S_i \subseteq X$.
+Consider any $\displaystyle x \in \bigcup_{i \mathop \in I} S_i$.
+By definition of [[Definition:Union of Family|set union]]:
+:$\exists i \in I: x \in S_i$
+But as $S_i \subseteq X$ it follows that $x \in X$.
+Thus it follows that:
+:$\displaystyle \bigcup_{i \mathop \in I} S_i \subseteq X$
+So:
+:$\displaystyle \paren {\forall i \in I: S_i \subseteq X} \implies \bigcup_{i \mathop \in I} S_i \subseteq X$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Union Distributes over Union/Sets of Sets}
+Tags: Union Distributes over Union
+
+\begin{theorem}
+Let $A$ and $B$ denote [[Definition:Set of Sets|sets of sets]].
+Then:
+:$\displaystyle \bigcup \left({A \cup B}\right) = \left({\bigcup A}\right) \cup \left({\bigcup B}\right)$
+where $\displaystyle \bigcup A$ denotes the [[Definition:Union of Set of Sets|union of $A$]].
+\end{theorem}
+
+\begin{proof}
+Let $\displaystyle s \in \bigcup \left({A \cup B}\right)$.
+Then by definition of [[Definition:Union of Set of Sets|union of set of sets]]:
+:$\exists X \in A \cup B: s \in X$
+By definition of [[Definition:Set Union|set union]], either:
+:$X \in A$
+or:
+:$X \in B$
+If $X \in A$, then:
+:$s \in \left\{{x: \exists X \in A: x \in X}\right\}$
+If $X \in B$, then:
+:$s \in \left\{{x: \exists X \in B: x \in X}\right\}$
+Thus by definition of [[Definition:Union of Set of Sets|union of set of sets]], either:
+:$\displaystyle s \in \bigcup A$
+or:
+:$\displaystyle s \in \bigcup B$
+So by definition of [[Definition:Set Union|set union]]:
+:$\displaystyle s \in \left({\bigcup A}\right) \cup \left({\bigcup B}\right)$
+So by definition of [[Definition:Subset|subset]]:
+:$\displaystyle \bigcup \left({A \cup B}\right) \subseteq \left({\bigcup A}\right) \cup \left({\bigcup B}\right)$
+Now let $\displaystyle s \in \left({\bigcup A}\right) \cup \left({\bigcup B}\right)$.
+By definition of [[Definition:Set Union|set union]], either:
+:$\displaystyle s \in \bigcup A$
+or:
+:$\displaystyle s \in \bigcup B$
+That is, by definition of [[Definition:Union of Set of Sets|union of set of sets]], either:
+:$s \in \left\{{x: \exists X \in A: x \in X}\right\}$
+or:
+:$s \in \left\{{x: \exists X \in B: x \in X}\right\}$
+{{WLOG}}, let $s \in X$ such that $X \in A$.
+Then by [[Set is Subset of Union]]:
+:$s \in X$ such that $X \in A \cup B$
+That is:
+:$\displaystyle s \in \bigcup \left({A \cup B}\right)$
+Similarly if $x \in X$ such that $X \in B$.
+So by definition of [[Definition:Subset|subset]]:
+:$\displaystyle \left({\bigcup A}\right) \cup \left({\bigcup B}\right) \subseteq \bigcup \left({A \cup B}\right)$
+Hence by definition of [[Definition:Set Equality|equality of sets]]:
+:$\displaystyle \bigcup \left({A \cup B}\right) = \left({\bigcup A}\right) \cup \left({\bigcup B}\right)$
+{{qed}}
+[[Category:Union Distributes over Union]]
+dfrc8pf2hupl9cy8eu3n6tg24uz9uvl
+\end{proof}<|endoftext|>
+\section{Union Distributes over Union/Families of Sets}
+Tags: Union Distributes over Union, Indexed Families
+
+\begin{theorem}
+Let $I$ be an [[Definition:Indexing Set|indexing set]].
+Let $\family {A_\alpha}_{\alpha \mathop \in I}$ and $\family {B_\alpha}_{\alpha \mathop \in I}$ be [[Definition:Indexed Family of Subsets|indexed families of subsets]] of a [[Definition:Set|set]] $S$.
+Then:
+:$\displaystyle \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cup B_\alpha} = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cup \paren {\bigcup_{\alpha \mathop \in I} B_\alpha}$
+where $\displaystyle \bigcup_{\alpha \mathop \in I} A_\alpha$ denotes the [[Definition:Union of Family|union of $\family {A_\alpha}_{\alpha \mathop \in I}$]].
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cup B_\alpha}
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \exists \beta \in I:
+ | l = x
+ | o = \in
+ | r = A_\beta \cup B_\beta
+ | c = {{Defof|Union of Family}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = A_\beta
+ | c = {{Defof|Set Union}}
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = B_\beta
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \bigcup_{\alpha \mathop \in I} A_\alpha
+ | c = [[Set is Subset of Union/Family of Sets|Set is Subset of Union]]
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = \bigcup_{\alpha \mathop \in I} B_\alpha
+ | c = [[Set is Subset of Union/Family of Sets|Set is Subset of Union]]
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cup \paren {\bigcup_{\alpha \mathop \in I} B_\alpha}
+ | c = {{Defof|Set Union}}
+}}
+{{end-eqn}}
+Thus by definition of [[Definition:Subset|subset]]:
+:$\displaystyle \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cup B_\alpha} \subseteq \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cup \paren {\bigcup_{\alpha \mathop \in I} B_\alpha}$
+{{qed|lemma}}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cup \paren {\bigcup_{\alpha \mathop \in I} B_\alpha}
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \bigcup_{\alpha \mathop \in I} A_\alpha
+ | c = {{Defof|Set Union}}
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = \bigcup_{\alpha \mathop \in I} B_\alpha
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \exists \beta \in I:
+ | l = x
+ | o = \in
+ | r = A_\beta
+ | c = {{Defof|Union of Family}}
+}}
+{{eqn | lo= \lor
+ | ll= \exists \beta \in I:
+ | l = x
+ | o = \in
+ | r = B_\beta
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \exists \beta \in I:
+ | l = x
+ | o = \in
+ | r = A_\beta \cup B_\beta
+ | c = {{Defof|Union of Family}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cup B_\alpha}
+ | c =
+}}
+{{end-eqn}}
+Thus by definition of [[Definition:Subset|subset]]:
+:$\displaystyle \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cup \paren {\bigcup_{\alpha \mathop \in I} B_\alpha} \subseteq \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cup B_\alpha}$
+{{qed|lemma}}
+By definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\displaystyle \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cup B_\alpha} = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cup \paren {\bigcup_{\alpha \mathop \in I} B_\alpha}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Intersection Distributes over Intersection/Sets of Sets}
+Tags: Intersection Distributes over Intersection
+
+\begin{theorem}
+Let $A$ and $B$ denote [[Definition:Set of Sets|sets of sets]].
+Then:
+:$\displaystyle \bigcap \paren {A \cap B} = \paren {\bigcap A} \cap \paren {\bigcap B}$
+where $\displaystyle \bigcap A$ denotes the [[Definition:Intersection of Set of Sets|intersection of $A$]].
+\end{theorem}
+
+\begin{proof}
+{{proof wanted}}
+[[Category:Intersection Distributes over Intersection]]
+8pc0zsy2xi5kegx3vbru32vzpsg4j4f
+\end{proof}<|endoftext|>
+\section{Intersection Distributes over Intersection/Families of Sets}
+Tags: Intersection Distributes over Intersection, Indexed Families
+
+\begin{theorem}
+Let $I$ be an [[Definition:Indexing Set|indexing set]].
+Let $\family {A_\alpha}_{\alpha \mathop \in I}$ and $\family {B_\alpha}_{\alpha \mathop \in I}$ be [[Definition:Indexed Family of Subsets|indexed families of subsets]] of a [[Definition:Set|set]] $S$.
+Then:
+:$\displaystyle \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cap B_\alpha} = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cap \paren {\bigcap_{\alpha \mathop \in I} B_\alpha}$
+where $\displaystyle \bigcap_{\alpha \mathop \in I} A_i$ denotes the [[Definition:Intersection of Family|intersection of $\family {A_\alpha}$]].
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cap B_\alpha}
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha \cap B_\alpha
+ | c = {{Defof|Intersection of Family}}
+}}
+{{eqn | lll=\leadsto
+ | ll= \forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha
+ | c = {{Defof|Set Intersection}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = B_\alpha
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \bigcap_{\alpha \mathop \in I} A_\alpha
+ | c ={{Defof|Intersection of Family}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = \bigcap_{\alpha \mathop \in I} B_\alpha
+ | c = {{Defof|Intersection of Family}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cap \paren {\bigcap_{\alpha \mathop \in I} B_\alpha}
+ | c = {{Defof|Set Intersection}}
+}}
+{{end-eqn}}
+Thus by definition of [[Definition:Subset|subset]]:
+:$\displaystyle \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cap B_\alpha} \subseteq \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cap \paren {\bigcap_{\alpha \mathop \in I} B_\alpha}$
+{{qed|lemma}}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cap \paren {\bigcap_{\alpha \mathop \in I} B_\alpha}
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \bigcap_{\alpha \mathop \in I} A_\alpha
+ | c = {{Defof|Set Intersection}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = \bigcap_{\alpha \mathop \in I} B_\alpha
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha
+ | c = {{Defof|Intersection of Family}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = B_\alpha
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha \cap B_\alpha
+ | c = {{Defof|Set Intersection}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cap B_\alpha}
+ | c = {{Defof|Intersection of Family}}
+}}
+{{end-eqn}}
+Thus by definition of [[Definition:Subset|subset]]:
+:$\displaystyle \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cap \paren {\bigcap_{\alpha \mathop \in I} B_\alpha} \subseteq \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cap B_\alpha}$
+{{qed|lemma}}
+By definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\displaystyle \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cap B_\alpha} = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cap \paren {\bigcap_{\alpha \mathop \in I} B_\alpha}$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Intersection Distributes over Intersection/General Result}
+Tags: Intersection Distributes over Intersection
+
+\begin{theorem}
+Let $\left\langle{\mathbb S_i}\right\rangle_{i \in I}$ be an [[Definition:Indexed Family of Sets|$I$-indexed family]] of [[Definition:Set of Sets|sets of sets]].
+Then:
+:$\displaystyle \bigcap_{i \mathop \in I} \bigcap \mathbb S_i = \bigcap \bigcap_{i \mathop \in I} \mathbb S_i$
+\end{theorem}
+
+\begin{proof}
+{{proof wanted}}
+[[Category:Intersection Distributes over Intersection]]
+5b5b3b7i6z9sy2akx45notpt9jfv7cw
+\end{proof}<|endoftext|>
+\section{Intersection Distributes over Union/Family of Sets}
+Tags: Intersection Distributes over Union, Indexed Families
+
+\begin{theorem}
+Let $I$ be an [[Definition:Indexing Set|indexing set]].
+Let $\family {A_\alpha}_{\alpha \mathop \in I}$ be a [[Definition:Indexed Family of Subsets|indexed family of subsets]] of a [[Definition:Set|set]] $S$.
+Let $B \subseteq S$.
+Then:
+:$\displaystyle \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cap B} = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cap B$
+where $\displaystyle \bigcup_{\alpha \mathop \in I} A_\alpha$ denotes the [[Definition:Union of Family|union]] of $\family {A_\alpha}_{\alpha \mathop \in I}$.
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \bigcup_{\alpha \mathop \in I} \paren {A_\alpha \cap B}
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll= \exists \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha \cap B
+ | c = {{Defof|Union of Family}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = A_\alpha
+ | c = {{Defof|Set Intersection}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha}
+ | c = [[Set is Subset of Union/Family of Sets|Set is Subset of Union]]
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cap B
+ | c = {{Defof|Set Intersection}}
+}}
+{{end-eqn}}
+By definition of [[Definition:Subset|subset]]:
+:$\displaystyle \bigcup_{\alpha \mathop \in I} \paren {A_\alpha \cap B} \subseteq \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cap B$
+{{qed|lemma}}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cap B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha}
+ | c = {{Defof|Set Intersection}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll =\exists \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha
+ | c = {{Defof|Union of Family}}
+}}
+{{eqn | lo= \land
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll =\exists \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha \cap B
+ | c = {{Defof|Set Intersection}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \bigcup_{\alpha \mathop \in I} \paren {A_\alpha \cap B}
+ | c = [[Set is Subset of Union/Family of Sets|Set is Subset of Union]]
+}}
+{{end-eqn}}
+By definition of [[Definition:Subset|subset]]:
+:$\displaystyle \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cap B \subseteq \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cap B}$
+{{qed|lemma}}
+By definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\displaystyle \map {\bigcup_{\alpha \mathop \in I} } {A_\alpha \cap B} = \paren {\bigcup_{\alpha \mathop \in I} A_\alpha} \cap B$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Union Distributes over Intersection/Family of Sets}
+Tags: Union Distributes over Intersection, Indexed Families
+
+\begin{theorem}
+Let $I$ be an [[Definition:Indexing Set|indexing set]].
+Let $\family {A_\alpha}_{\alpha \mathop \in I}$ be an [[Definition:Indexed Family of Subsets|indexed family of subsets]] of a [[Definition:Set|set]] $S$.
+Let $B \subseteq S$.
+Then:
+:$\displaystyle \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cup B} = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cup B$
+where $\displaystyle \bigcap_{\alpha \mathop \in I} A_\alpha$ denotes the [[Definition:Intersection of Family|intersection]] of $\family {A_\alpha}_{\alpha \mathop \in I}$.
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cup B}
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll =\forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha \cup B
+ | c = [[Intersection is Subset/Family of Sets|Intersection is Subset]]
+}}
+{{eqn | lll=\leadsto
+ | ll =\forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha
+ | c = {{Defof|Set Union}}
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha}
+ | c = {{Defof|Intersection of Family}}
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cup B
+ | c = {{Defof|Set Union}}
+}}
+{{end-eqn}}
+By definition of [[Definition:Subset|subset]]:
+:$\displaystyle \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cup B} \subseteq \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cup B$
+{{qed|lemma}}
+{{begin-eqn}}
+{{eqn | l = x
+ | o = \in
+ | r = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cup B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha}
+ | c = {{Defof|Set Union}}
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll =\forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha
+ | c = [[Intersection is Subset/Family of Sets|Intersection is Subset]]
+}}
+{{eqn | lo= \lor
+ | l = x
+ | o = \in
+ | r = B
+ | c =
+}}
+{{eqn | lll=\leadsto
+ | ll =\forall \alpha \in I:
+ | l = x
+ | o = \in
+ | r = A_\alpha \cup B
+ | c = {{Defof|Set Union}}
+}}
+{{eqn | lll=\leadsto
+ | l = x
+ | o = \in
+ | r = \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cup B}
+ | c = {{Defof|Intersection of Family}}
+}}
+{{end-eqn}}
+By definition of [[Definition:Subset|subset]]:
+:$\displaystyle \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cup B \subseteq \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cup B}$
+{{qed|lemma}}
+By definition of [[Definition:Set Equality/Definition 2|set equality]]:
+:$\displaystyle \map {\bigcap_{\alpha \mathop \in I} } {A_\alpha \cup B} = \paren {\bigcap_{\alpha \mathop \in I} A_\alpha} \cup B$
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Well-Ordering}
+Tags: Well-Orderings
+
+\begin{theorem}
+{{TFAE|def = Well-Ordering}}
+Let $\left({S, \preceq}\right)$ be a [[Definition:Ordered Set|ordered set]].
+\end{theorem}
+
+\begin{proof}
+=== [[Equivalence of Definitions of Well-Ordering/Definition 1 implies Definition 2|Definition 1 implies Definition 2]] ===
+{{:Equivalence of Definitions of Well-Ordering/Definition 1 implies Definition 2}}
+[[Definition:By Hypothesis|By hypothesis]], every [[Definition:Subset|subset]] of $S$ has a [[Definition:Smallest Element|smallest element]].
+By [[Smallest Element is Minimal]] it follows that every [[Definition:Subset|subset]] of $S$ has a [[Definition:Minimal Element|minimal element]].
+Thus it follows that $\preceq$ is a [[Definition:Well-Ordering|well-ordering]] on $S$ by [[Definition:Well-Ordering/Definition 2|definition 2]].
+{{qed|lemma}}
+=== Definition 2 implies Definition 1 ===
+Let $\preceq$ be a [[Definition:Well-Ordering/Definition 2|well-ordering on $S$ by definition 2]].
+That is:
+:$\preceq$ is a [[Definition:Well-Founded|well-founded]] [[Definition:Total Ordering|total ordering]].
+By definition of [[Definition:Well-Founded|well-founded]], every $T \subseteq S$ has a [[Definition:Minimal Element|minimal element]].
+By [[Minimal Element in Toset is Unique and Smallest]], every $T \subseteq S$ has a [[Definition:Smallest Element|smallest element]].
+The result follows.
+{{qed}}
+[[Category:Well-Orderings]]
+aexj9bpqs6iubuikzd3vbbt2y6mzbcv
+\end{proof}<|endoftext|>
+\section{Equivalence of Definitions of Transitive Closure (Relation Theory)/Finite Chain is Smallest}
+Tags: Equivalence of Definitions of Transitive Closure (Relation Theory)
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $\RR$ be a [[Definition:Endorelation|relation]] on $S$.
+Let $\RR^+$ be the transitive closure of $\RR$ by the [[Definition:Transitive Closure (Relation Theory)/Finite Chain|finite chain definition]].
+That is, for $x, y \in S$ let $x \mathrel {\RR^+} y$ {{iff}} for some [[Definition:Natural Number|natural number]] $n > 0$ there exist $s_0, s_1, \dots, s_n \in S$ such that $s_0 = x$, $s_n = y$, and:
+:$\forall k \in \N_n: s_k \mathrel \RR s_{k+1}$
+Then $\RR^+$ is [[Definition:Transitive Relation|transitive]] and if $\QQ$ is a transitive relation on $S$ such that $\RR \subseteq \QQ$ then $\RR \subseteq \QQ$.
+\end{theorem}
+
+\begin{proof}
+==== $\RR^+$ is transitive ====
+Let $x,y,z \in S$.
+Let $x \mathrel {\RR^+} y$ and $y \mathrel {\RR^+} z$.
+Then for some $m, n \in \N_{>0}$ there are $s_0, s_1, \dots, s_m$ and $t_0, t_1, \dots, t_n$ such that $s_0 = x$, $s_m = y$, $t_0 = y$, $t_n = z$, and the following hold:
+:$\forall k \in \N_m: s_k \mathrel {\RR^+} s_{k + 1}$
+:$\forall k \in \N_n: t_k \mathrel {\RR^+} t_{k + 1}$
+Let $\sequence {u_k}_{k \mathop \in \N_{m + n} }$ be defined thus:
+:$u_k = \cases {s_k & \text{if $k \le m$} \\ t_{k - m} & \text {if $k > m$}}$
+Then clearly $u_k \mathrel {\RR^+} u_{k+1}$ whenever $k < m$ and whenever $k > m$.
+But $u_m = s_m = y = t_0 \mathrel {\RR^+} t_1 = u_{m+1}$, so this holds also for $k = m$.
+Furthermore, $u_0 = s_0 = x$ and $u_{m+n} = t_n = z$.
+Therefore $x \mathrel {\RR^+} z$.
+As this holds for all such $x$ and $z$, $\RR^+$ is [[Definition:Transitive Relation|transitive]].
+{{qed|lemma}}
+==== $\RR^+$ is smallest ====
+Let $\QQ$ be any [[Definition:Transitive Relation|transitive relation]] on $S$ such that $\RR \subseteq \QQ$.
+For any $x, y \in S$ such that $x \mathrel {\RR^+} y$, let $d \left({x, y}\right)$ be the [[Definition:Smallest Element|smallest]] [[Definition:Natural Number|natural number]] $n > 0$ such that there exist $s_0, s_1, \dots, s_n \in S$ such that $s_0 = x$, $s_n = y$, and:
+:$\forall k \in \N_n: s_k \mathrel \RR s_{k + 1}$
+Such an $n$ always exists by the definition of $\RR^+$ and the fact that $\N$ is [[Definition:Well-Ordering|well-ordered]] by $\le$.
+We will show by [[Principle of Mathematical Induction|induction]] on $n$ that for every $x, y$ such that $x \mathrel {\RR^+} y$ and $\map d {x, y} = n$, $x \mathrel \QQ y$.
+This will show that $\RR^+ \subseteq \QQ$.
+If $\map d {x, y} = 1$ then $x \mathrel \RR y$, so $x \mathrel \QQ y$.
+Suppose that the result holds for $n$.
+Let $\map d {x, y} = n + 1$.
+Then there exist $s_0, s_1, \dots, s_{n + 1}$ such that $s_0 = x$, $s_{n + 1} = y$, and:
+:$\forall k \in \N_{n + 1}: s_k \mathrel \RR s_{k + 1}$
+Then dropping the last term:
+:$\forall k \in \N_n: s_k \mathrel \RR s_{k + 1}$
+so $x \mathrel {\RR^+} s_n$.
+{{explain|Explain better: either show that minimality implies this or replace standard induction with complete induction}}
+It should be clear, then, that $\map d {x, s_n} = n$.
+Thus by the inductive hypothesis, $x \mathrel \QQ s_n$.
+Since $\RR \subseteq \QQ$, $s_n \mathrel \QQ s_{n + 1} = y$.
+Since $x \mathrel \QQ s_n$, $s_n \mathrel \QQ y$, and $\QQ$ is transitive:
+:$x \mathrel \QQ y$
+As this holds for all such $x$ and $y$, $\RR^+ \subseteq \QQ$.
+{{qed}}
+[[Category:Equivalence of Definitions of Transitive Closure (Relation Theory)]]
+t1gd2ly0jh4kzcqqzcse4obdyhx8c6i
+\end{proof}<|endoftext|>
+\section{Order-Preserving Bijection on Wosets is Order Isomorphism}
+Tags: Well-Orderings, Order Isomorphisms
+
+\begin{theorem}
+Let $\struct {S, \preceq_1}$ and $\struct {T, \preceq_2}$ be [[Definition:Well-Ordered Set|well-ordered sets]].
+Let $\phi: S \to T$ be a [[Definition:Bijection|bijection]] such that $\phi: S \to T$ is [[Definition:Order-Preserving|order-preserving]]:
+:$\forall x, y \in S: x \preceq_1 y \implies \map \phi x \preceq_2 \map \phi y$
+Then:
+:$\forall x, y \in S: \map \phi x \preceq_2 \map \phi y \implies x \preceq_1 y$
+That is, $\phi: S \to T$ is an [[Definition:Order Isomorphism|order isomorphism]].
+\end{theorem}
+
+\begin{proof}
+A [[Definition:Well-Ordered Set|well-ordered set]] is a [[Definition:Totally Ordered Set|totally ordered set]] by definition.
+A [[Definition:Bijection|bijection]] is a [[Definition:Surjection|surjection]] by definition.
+The result follows from [[Order Isomorphism iff Strictly Increasing Surjection]].
+{{qed}}
+[[Category:Well-Orderings]]
+[[Category:Order Isomorphisms]]
+bd9m2xcftcxo5aowtn8z62ncldgdsm8
+\end{proof}<|endoftext|>
+\section{Inverse Image of Set under Set-Like Relation is Set}
+Tags: Set Theory
+
+\begin{theorem}
+Let $A$ be a [[Definition:Class (Class Theory)|class]].
+Let $\RR$ be a [[Definition:Set-Like Relation|set-like]] [[Definition:Endorelation|endorelation]] on $A$.
+Let $B \subseteq A$ be a [[Definition:set|set]].
+Then $\map {\RR^{-1} } B$, the [[Definition:Inverse Image|inverse image]] of $B$ under $\RR$, is also a [[Definition:set|set]].
+\end{theorem}
+
+\begin{proof}
+Since $\RR$ is [[Definition:Set-Like Relation|set-like]], $\map {\RR^{-1} } {\set x}$ is a [[Definition:Set|set]] for each $x$ in $A$.
+As $B \subseteq A$, this holds also for each $x \in B$.
+{{explain|Explain better.}}
+But then $\displaystyle \map {\RR^{-1} } B = \bigcup_{x \mathop \in B} \map {\RR^{-1} } {\set x}$, which is a [[Definition:Set|set]] by the [[Axiom:Axiom of Unions|Axiom of Unions]].
+{{qed}}
+[[Category:Set Theory]]
+a0lkqyt765j7yboaj8pvu4luf5ejni3
+\end{proof}<|endoftext|>
+\section{Reciprocal of Holomorphic Function}
+Tags: Complex Analysis, Reciprocals
+
+\begin{theorem}
+Let $f: \C \to \C$ be a [[Definition:Complex Function|complex function]].
+Let $U \subseteq \C$ be an [[Definition:Open Set (Complex Analysis)|open set]] such that $f$ has no [[Definition:Root of Function|zeros]] in $U$.
+Suppose further that $f$ is [[Definition:Holomorphic Function|holomorphic]] in $U$.
+Then the [[Definition:Complex Function|complex function]]
+:$\dfrac 1 {f_{\restriction U} } : U \to \C$
+is [[Definition:Holomorphic Function|holomorphic]].
+\end{theorem}
+
+\begin{proof}
+Let $g: U \to \C$ be such that $\map g x = 1 / \map f x$.
+Since $\map f x$ is nonzero for $x \in U$, $g$ is well-defined.
+By [[Quotient Rule for Continuous Functions]], $g$ is continuous.
+Let $z_0 \in U$.
+As $g$ is continuous:
+:$\displaystyle \lim_{h \mathop \to 0} \frac 1 {\map f {z_0 + h} } = \frac 1 {\map f {z_0} }$
+As $f$ is holomorphic:
+:$\displaystyle \lim_{h \mathop \to 0} \frac {\map f {z_0 + h} - \map f {z_0} } h = \map {f'} {z_0}$
+
+By the [[Combination Theorem for Limits of Functions]],
+{{begin-eqn}}
+{{eqn | l = \lim_{h \mathop \to 0} \frac {\map g {z_0 + h} - \map g {z_0} } h
+ | r = \lim_{h \mathop \to 0} \frac {\frac 1 {\map f {z_0 + h} } - \frac 1 {\map f {z_0} } } h
+}}
+{{eqn | r = -\frac 1 {\map f {z_0} } \lim_{h \mathop \to 0} \paren {\paren {\frac 1 {\map f {z_0 + h} } } \paren {\frac {\map f {z_0 + h} - \map f {z_0} } h} }
+}}
+{{eqn | r = -\frac 1 {\map f {z_0}^2} \cdot \map {f'} {z_0}
+}}
+{{end-eqn}}
+It follows that $g$ is holomorphic.
+{{qed}}
+[[Category:Complex Analysis]]
+[[Category:Reciprocals]]
+901s09w466k3i1akdzp6afbndcxrcez
+\end{proof}<|endoftext|>
+\section{Transitive Closure of Set-Like Relation is Set-Like}
+Tags: Set Theory, Transitive Closures
+
+\begin{theorem}
+Let $A$ be a [[Definition:Class (Class Theory)|class]].
+Let $\RR$ be a [[Definition:Set-Like Relation|set-like]] [[Definition:Endorelation|endorelation]] on $A$.
+Let $\RR^+$ be the [[Definition:Transitive Closure (Relation Theory)/Finite Chain|transitive closure]] of $\RR$.
+Then $\RR^+$ is also a [[Definition:Set-Like Relation|set-like relation]].
+\end{theorem}
+
+\begin{proof}
+Let $x \in A$.
+Let $A'$ be the [[Definition:Class (Class Theory)|class]] of all [[Definition:Subset|subsets]] of $A$.
+For each $s \in A'$, $\RR^{-1}$ is a [[Definition:Subset|subset]] of $A$.
+Hence by [[Inverse Image of Set under Set-Like Relation is Set]] and the definition of [[Definition:Endorelation|endorelation]]:
+:$\RR^{-1} \in A'$
+Define a [[Definition:Mapping|mapping]] $G: A' \to A'$ as:
+:$\forall s \in A': \map G s = \map {\RR^{-1} } s$
+[[Principle of Recursive Definition|Recursively define]] a [[Definition:Mapping|mapping]] $f: \N \to A'$ as follows:
+:$\map f n = \begin {cases} \set x & : n = 0 \\ \map G {\map f {n - 1} } & : n > 0 \end {cases}$
+By the [[Axiom:Axiom of Infinity|Axiom of Infinity]] and the [[Axiom:Axiom of Replacement|Axiom of Replacement]]:
+:$\map f \N$ is a set.
+Thus by the [[Axiom:Axiom of Unions|Axiom of Unions]]:
+:$\displaystyle \bigcup \map f \N$ is a [[Definition:Set|set]].
+Let $y \in \map {\paren {\RR^+}^{-1} } x$.
+By the definition of [[Definition:Transitive Closure (Relation Theory)/Finite Chain|transitive closure]]:
+:for some $n \in \N_{>0}$ there are $a_0, a_1, \dots, a_n$ such that $y = a_0 \mathrel \RR a_1 \mathrel \RR \cdots \mathrel \RR a_n = x$.
+{{explain|it's a finite sort of induction, and a simple and common pattern.}}
+Then by [[Principle of Mathematical Induction|induction]] (working from $n$ to $0$), $\displaystyle a_n, a_{n - 1}, \dots, a_0 \in \bigcup \map f \N$.
+As this holds for all such $y$:
+:$\displaystyle \map {\paren {\RR^+}^{-1} } x \subseteq \bigcup \map f \N$
+By the [[Axiom:Axiom of Specification|Axiom of Specification]]:
+:$\map {\paren {\RR^+}^{-1} } x$ is a [[Definition:Set|set]].
+As this holds for all $x \in A$:
+:$\RR^+$ is a [[Definition:Set-Like Relation|set-like relation]].
+{{qed}}
+[[Category:Set Theory]]
+[[Category:Transitive Closures]]
+8wsogx7xf6uebkx6imqtrrgno2wptdm
+\end{proof}<|endoftext|>
+\section{Relational Closure from Transitive Closure}
+Tags: Relational Closures
+
+\begin{theorem}
+Let $A$ be a [[Definition:Set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $\RR$ be a [[Definition:Endorelation|relation]] on $A$.
+Let $\RR^+$ be the [[Definition:Transitive Closure (Relation Theory)|transitive closure]] of $\RR$.
+Let $B \subseteq A$.
+Let $B' = B \cup \map {\paren {\RR^+}^{-1} } B$.
+Let $C$ be an [[Definition:Transitive with Respect to a Relation|$\RR$-transitive]] [[Definition:Subset|subset]] or [[Definition:Subclass|subclass]] of $A$ such that $B \subseteq C$.
+Then:
+:$B'$ is [[Definition:Transitive with Respect to a Relation|$\RR$-transitive]]
+:$B' \subseteq C$
+:If $B$ is a [[Definition:Set|set]] and $\RR$ is [[Definition:Set-Like Relation|set-like]] then $B'$ is a [[Definition:Set|set]]. That is, $B'$ is the [[Definition:Relational Closure|relational closure]] of $B$ under $\RR$.
+\end{theorem}
+
+\begin{proof}
+=== $B'$ is $\RR$-transitive ===
+Let $x \in B'$ and $y \in A$, and let $y \mathrel \RR x$.
+If $x \in B$, then by the definition of [[Definition:Transitive Closure (Relation Theory)|transitive closure]]:
+:$y \mathrel {\RR^+} x$
+so:
+:$y \in B'$
+Let $x \in \map {\paren {\RR^+}^{-1} } B$.
+Then:
+:$x \mathrel {\RR^+} b$
+for some $b \in B$.
+Since $\RR \subseteq \RR^+$, it follows that:
+:$y \mathrel {\RR^+} x$
+Since $\RR^+$ is [[Definition:Transitive Relation|transitive]]:
+:$y \mathrel {\RR^+} b$
+That is:
+:$y \in \map {\paren {\RR^+}^{-1} } B$
+so $y \in B'$.
+As this holds for all such $x$ and $y$, $B'$ is [[Definition:Transitive with Respect to a Relation|$\RR$-transitive]].
+{{qed|lemma}}
+=== $B' \subseteq C$ ===
+Let $x \in B'$.
+Then $x \in B$ or $x \in \map {\paren {\RR^+}^{-1} } B$.
+Let $x \in B$.
+Then because $B \subseteq C$:
+:$x \in C$
+Suppose that $x \in \map {\paren {\RR^+}^{-1} } B$.
+Then for some $b \in B$:
+:$x \mathrel \RR b$
+By the definition of [[Definition:Transitive Closure (Relation Theory)/Finite Chain|transitive closure]]:
+:for some $n \in \N_{>0}$ there exist $a_0, a_1, \dots, a_n$ such that:
+::$x = a_0 \mathrel \RR a_1 \mathrel \RR \cdots \mathrel \RR a_n = b$
+{{explain|Expand to full argument.}}
+Thus by the [[Principle of Mathematical Induction]]:
+:$x \in C$
+{{qed|lemma}}
+=== Set-like implies set ===
+Let $B$ be a [[Definition:Set|set]].
+Let $\RR$ be [[Definition:Set-Like Relation|set-like]].
+Then by [[Inverse Image of Set under Set-Like Relation is Set]]:
+:$\paren {\RR^+}^{-1}$ is a [[Definition:Set|set]].
+Thus $B'$ is a [[Definition:Set|set]] by the [[Axiom:Axiom of Unions|Axiom of Unions]].
+{{qed}}
+[[Category:Relational Closures]]
+a96d1ujc5xam9piqm5vf9dw2yfd416c
+\end{proof}<|endoftext|>
+\section{Minimal WRT Restriction}
+Tags: Restrictions
+
+\begin{theorem}
+Let $A$ be a [[Definition:Set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $\mathcal R$ be a [[Definition:Endorelation|relation]] on $A$.
+Let $B$ be a [[Definition:Subset|subset]] or [[Definition:Subclass|subclass]] of $A$.
+Let $\mathcal R'$ be the [[Definition:Restriction|restriction]] of $\mathcal R$ to $B$.
+Let $m \in B$.
+Then:
+:$m$ is [[Definition:Minimal Element under Relation|$\mathcal R$-minimal]] in $B$
+{{iff}}:
+:$m$ is [[Definition:Minimal Element under Relation|$\mathcal R'$-minimal]] in $B$.
+\end{theorem}
+
+\begin{proof}
+=== Sufficient Condition ===
+Let $m$ be [[Definition:Minimal Element under Relation|$\mathcal R$-minimal]] in $B$.
+Let $x$ be any [[Definition:Element|element]] of $B$.
+Suppose [[Proof by Contradiction|for the sake of contradiction]] that $x \mathrel {\mathcal R'} m$.
+Then since $\mathcal R' \subseteq \mathcal R$:
+:$x \mathrel{\mathcal R} m$
+contradicting the fact that $m$ is [[Definition:Minimal Element under Relation|$\mathcal R$-minimal]] in $B$.
+Thus:
+:$\lnot \left({x \mathrel{\mathcal R'} m}\right)$
+As this holds for all $x \in B$, $m$ is [[Definition:Minimal Element under Relation|$\mathcal R'$-minimal]] in $B$.
+{{qed|lemma}}
+=== Necessary Condition ===
+Let $m$ be [[Definition:Minimal Element under Relation|$\mathcal R'$-minimal]] in $B$.
+Let $x \in B$.
+Suppose [[Proof by Contradiction|for the sake of contradiction]] that $x \mathrel{\mathcal R} m$.
+Then $x, m \in B$.
+Therefore:
+:$\left({x, m}\right) \in B \times B$
+Thus:
+:$\left({x, m}\right) \in \mathcal R \cap \left({B \times B}\right) = \mathcal R'$
+so $x \mathrel{\mathcal R'} m$
+This contradicts the fact that $m$ is [[Definition:Minimal Element under Relation|$\mathcal R'$-minimal]] in $B$.
+Thus:
+:$\lnot \left({x \mathrel{\mathcal R} m}\right)$
+As this holds for all $x \in B$, it follows that $m$ is [[Definition:Minimal Element under Relation|$\mathcal R$-minimal]] in $B$.
+{{qed}}
+[[Category:Restrictions]]
+7apan44wwcbfxgntcgd88jt1jz9mpw0
+\end{proof}<|endoftext|>
+\section{Intersection of Ordinals is Ordinal}
+Tags: Ordinals
+
+\begin{theorem}
+Let $A$ be a non-empty [[Definition:Class (Class Theory)|class]] of [[Definition:Ordinal|ordinals]].
+Then $\bigcap A$ is an ordinal.
+\end{theorem}
+
+\begin{proof}
+Let $i = \bigcap A$.
+=== Set ===
+By [[Intersection of Non-Empty Class is Set]], $i$ is a [[Definition:set|set]].
+{{qed|lemma}}
+=== Transitive ===
+Let $n \in i$ and let $m \in n$.
+Let $a \in A$.
+By the definition of [[Definition:Intersection of Set of Sets|intersection]], $n \in a$.
+Since $a$ is an [[Definition:ordinal|ordinal]], it is [[Definition:Transitive Set|transitive]].
+Thus $m \in a$.
+As this holds for all $a \in A$, $m \in i$.
+Thus $i$ is transitive.
+{{qed|lemma}}
+=== $\in$-connected ===
+Let $x,y \in i$.
+Since $A$ is non-empty, it has an element $a$.
+By the definition of [[Definition:Intersection of Set of Sets|intersection]], $x, y \in a$.
+Since $a$ is an [[Definition:ordinal|ordinal]], it is [[Definition:Connected Relation|$\in$-connected]].
+Thus $x \in y$ or $y \in x$.
+As this holds for all $x, y \in i$, $i$ is $\in$-connected.
+{{qed|lemma}}
+=== Well-founded ===
+Let $b$ be a non-empty [[Definition:subset|subset]] of $i$.
+Let $a \in A$ (such exists because $A$ is non-empty).
+By [[Intersection is Largest Subset]], $b \subseteq a$.
+Since $a$ is an [[Definition:ordinal|ordinal]] it is [[Definition:Well-Founded Set|well-founded]].
+Thus $b$ has an element $x$ such that $x \cap b = \varnothing$.
+As this holds for all such $b$, $i$ is well-founded.
+{{qed}}
+[[Category:Ordinals]]
+i2frgc6mpmaln0n9etgjv69mtis7d0u
+\end{proof}<|endoftext|>
+\section{Meet with Complement is Bottom}
+Tags: Boolean Algebras
+
+\begin{theorem}
+Let $\struct {S, \vee, \wedge, \neg}$ be a [[Definition:Boolean Algebra/Definition 2|Boolean algebra, defined as in Definition 2]].
+Then:
+:$\exists \bot \in S: \forall a \in S: a \wedge \neg a = \bot$
+where $\wedge$ denotes the [[Definition:Boolean Algebra|meet operation in $S$]].
+This element $\bot$ is [[Definition:Unique|unique]] for any given $S$, and is named '''bottom'''.
+\end{theorem}
+
+\begin{proof}
+Let $\exists r, s \in S: r \wedge \neg r = a, \ s \wedge \neg s = b$
+Then:
+{{begin-eqn}}
+{{eqn | l = a
+ | r = r \wedge \neg r
+ | c = [[Definition:By Hypothesis|by hypothesis]]
+}}
+{{eqn | r = \paren {s \wedge \neg s} \vee \paren {r \wedge \neg r}
+ | c = [[Definition:Boolean Algebra/Axioms/Definition 2|Boolean Algebra: Axiom $(BA_2 \ 5)$]]
+}}
+{{eqn | r = \paren {r \wedge \neg r} \vee \paren {s \wedge \neg s}
+ | c = [[Definition:Boolean Algebra/Axioms/Definition 2|Boolean Algebra: Axiom $(BA_2 \ 1)$]]
+}}
+{{eqn | r = s \wedge \neg s
+ | c = [[Definition:Boolean Algebra/Axioms/Definition 2|Boolean Algebra: Axiom $(BA_2 \ 5)$]]
+}}
+{{eqn | r = b
+ | c = [[Definition:By Hypothesis|by hypothesis]]
+}}
+{{end-eqn}}
+Thus, whatever $r$ and $s$ may be:
+: $r \wedge \neg r = s \wedge \neg s$
+This [[Definition:Unique|unique]] element can be assigned the [[Definition:Symbol|symbol]] $\bot$ and named '''bottom''' as required.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Join with Complement is Top}
+Tags: Boolean Algebras
+
+\begin{theorem}
+Let $\struct {S, \vee, \wedge, \neg}$ be a [[Definition:Boolean Algebra/Definition 2|Boolean algebra, defined as in Definition 2]].
+Then:
+:$\exists \top \in S: \forall a \in S: a \vee \neg a = \top$
+where $\wedge$ denotes the [[Definition:Boolean Algebra|meet operation in $S$]].
+This element $\top$ is [[Definition:Unique|unique]] for any given $S$, and is named '''top'''.
+\end{theorem}
+
+\begin{proof}
+Let $\exists r, s \in S: r \vee \neg r = a, \ s \vee \neg s = b$
+Then:
+{{begin-eqn}}
+{{eqn | l = a
+ | r = r \vee \neg r
+ | c = [[Definition:By Hypothesis|by hypothesis]]
+}}
+{{eqn | r = \paren {s \vee \neg s} \wedge \paren {r \vee \neg r}
+ | c = [[Definition:Boolean Algebra/Axioms/Definition 2|Boolean Algebra: Axiom $(\text {BA}_2 \ 5)$]]
+}}
+{{eqn | r = \paren {r \vee \neg r} \wedge \paren {s \vee \neg s}
+ | c = [[Definition:Boolean Algebra/Axioms/Definition 2|Boolean Algebra: Axiom $(\text {BA}_2 \ 1)$]]
+}}
+{{eqn | r = s \vee \neg s
+ | c = [[Definition:Boolean Algebra/Axioms/Definition 2|Boolean Algebra: Axiom $(\text {BA}_2 \ 5)$]]
+}}
+{{eqn | r = b
+ | c = [[Definition:By Hypothesis|by hypothesis]]
+}}
+{{end-eqn}}
+Thus, whatever $r$ and $s$ may be:
+:$r \vee \neg r = s \vee \neg s$
+This [[Definition:Unique|unique]] element can be assigned the [[Definition:Symbol|symbol]] $\top$ and named '''top''' as required.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ordering is Equivalent to Subset Relation/Lemma}
+Tags: Order Theory
+
+\begin{theorem}
+Let $\struct {S, \preceq}$ be an [[Definition:Ordered Set|ordered set]].
+Then:
+: $\forall a_1, a_2 \in S: \paren ({a_1 \preceq a_2 \implies {a_1}^\preceq \subseteq {a_2}^\preceq}$
+where ${a_1}^\preceq$ denotes the [[Definition:Lower Closure of Element|lower closure]] of $a_1$.
+\end{theorem}
+
+\begin{proof}
+Let $a_1 \preceq a_2$.
+Then by the definition of [[Definition:Lower Closure of Element|lower closure]]:
+:$a_1 \in {a_2}^\preceq$
+Let $a_3 \in {a_1}^\preceq$.
+Then by definition:
+:$a_3 \preceq a_1$
+As an [[Definition:Ordering|ordering]] is [[Definition:Transitive Relation|transitive]], it follows that:
+:$a_3 \preceq a_2$
+and so:
+:$a_3 \in {a_2}^\preceq$
+This holds for all $a_3 \in {a_1}^\preceq$.
+Thus by definition of [[Definition:Subset|subset]]:
+:${a_1}^\preceq \subseteq {a_2}^\preceq$
+{{qed}}
+[[Category:Order Theory]]
+lut06i1dlqz5c71uz5dlnzd7qpxmw5f
+\end{proof}<|endoftext|>
+\section{Smallest Element WRT Restricted Ordering}
+Tags: Order Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $\preceq$ be an [[Definition:ordering|ordering]] on $S$.
+Let $T$ be a [[Definition:subset|subset]] or [[Definition:subclass|subclass]] of $S$.
+Let $\preceq'$ be the [[Definition:Restriction|restriction]] of $\preceq$ to $T$.
+Let $m \in T$.
+Then $m$ is the [[Definition:Smallest Element|$\preceq$-smallest element]] of $T$ [[Definition:iff|iff]] $m$ is the $\preceq'$-smallest element of $T$.
+\end{theorem}
+
+\begin{proof}
+{{proof wanted|The same sort of utterly trivial thing as at Minimal WRT Restriction}}
+[[Category:Order Theory]]
+7cev1xljea73ba870wn6hnwi9wa5yg8
+\end{proof}<|endoftext|>
+\section{Restriction to Subset of Strict Total Ordering is Strict Total Ordering}
+Tags: Order Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:Set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $\prec$ be a [[Definition:Strict Total Ordering|strict total ordering]] on $A$.
+Let $T$ be a [[Definition:Subset|subset]] or [[Definition:Subclass|subclass]] of $A$.
+Then the [[Definition:Restriction of Relation|restriction]] of $\prec$ to $B$ is a [[Definition:Strict Total Ordering|strict total ordering]] of $B$.
+\end{theorem}
+
+\begin{proof}
+Follows from:
+: [[Restriction of Transitive Relation is Transitive]]
+: [[Restriction of Antireflexive Relation is Antireflexive]]
+: [[Restriction of Connected Relation is Connected]]
+{{qed}}
+[[Category:Order Theory]]
+5xe55ko8u2svkmxnc3vzi33rnk4odei
+\end{proof}<|endoftext|>
+\section{Restriction of Well-Founded Ordering}
+Tags: Order Theory
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $T$ be a [[Definition:subset|subset]] or [[Definition:subclass|subclass]] of $S$.
+Let $\preceq$ be a [[Definition:Well-Founded|well-founded]] [[Definition:ordering|ordering]] of $A$.
+Let $\preceq'$ be the [[Definition:Restriction of Ordering|restriction]] of $\preceq$ to $T$.
+Then $\preceq'$ is a well-founded ordering of $T$.
+\end{theorem}
+
+\begin{proof}
+By [[Restriction of Ordering is Ordering]], $\preceq'$ is an [[Definition:Ordering|ordering]].
+Let $A$ be a non-empty [[Definition:subset|subset]] of $T$.
+{{explain}}
+Then $A$ is a non-empty subset of $S$.
+Since $\preceq$ is [[Definition:Well-Founded|well-founded]], $A$ has a [[Definition:Minimal Element|minimal element]] $m$ with respect to $\preceq$.
+Let $x \in A$ and suppose $x \preceq' m$.
+Then by the definition of [[Definition:Restriction of Ordering|restriction]], $x \preceq m$.
+Thus by the definition of a minimal element, $x = m$.
+As this holds for all $x \in A$, $m$ is minimal in $A$ with respect to $\preceq'$.
+As this holds for all subsets $A$ of $T$, $\preceq'$ is a well-founded ordering of $T$.
+{{qed}}
+[[Category:Order Theory]]
+gxz77g5woviyhcod6vl0y0nmzpwee3y
+\end{proof}<|endoftext|>
+\section{Restriction of Well-Ordering is Well-Ordering}
+Tags: Well-Orderings
+
+\begin{theorem}
+Let $S$ be a [[Definition:set|set]] or [[Definition:Class (Class Theory)|class]].
+Let $\preceq$ be a [[Definition:Well-Ordering/Definition 2|well-ordering]] of $S$.
+Let $T$ be a [[Definition:subset|subset]] or [[Definition:subclass|subclass]] of $S$.
+Let $\preceq'$ be the [[Definition:Restriction of Ordering|restriction]] of $\preceq$ to $T$.
+Then $\preceq'$ well-orders $T$.
+\end{theorem}
+
+\begin{proof}
+By the definition of [[Definition:Well-Ordering/Definition 2|well-ordering]], $\preceq$ is a [[Definition:Well-Founded|well-founded]] [[Definition:Total Ordering|total ordering]].
+By [[Restriction of Total Ordering is Total Ordering]], $\preceq'$ is a [[Definition:Total Ordering|total ordering]].
+By [[Restriction of Well-Founded Ordering]], $\preceq'$ is [[Definition:Well-Founded|well-founded]].
+Thus $\preceq'$ is a [[Definition:Well-Ordering/Definition 2|well-ordering]].
+{{qed}}
+[[Category:Well-Orderings]]
+2ao0noil1turi1fr4eb23cc72f8hj8v
+\end{proof}<|endoftext|>
+\section{Ordering is Equivalent to Subset Relation/Proof 2}
+Tags: Ordering is Equivalent to Subset Relation
+
+\begin{theorem}
+{{:Ordering is Equivalent to Subset Relation}}
+Specifically:
+Let
+:$\mathbb S := \set {a^\preceq: a \in S}$
+where $a^\preceq$ is the [[Definition:Lower Closure of Element|lower closure]] of $a$.
+That is:
+:$a^\preceq := \set {b \in S: b \preceq a}$
+Let the [[Definition:Mapping|mapping]] $\phi: S \to \mathbb S$ be defined as:
+:$\map \phi a = a^\preceq$
+Then $\phi$ is an [[Definition:Order Isomorphism|order isomorphism]] from $\struct {S, \preceq}$ to $\struct {\mathbb S, \subseteq}$.
+\end{theorem}
+
+\begin{proof}
+From [[Subset Relation is Ordering]], we have that $\struct {\mathbb S, \subseteq}$ is an [[Definition:Ordered Set|ordered set]].
+We are to show that $\phi$ is an [[Definition:Order Isomorphism/Definition 2|order isomorphism]].
+$\phi$ is clearly [[Definition:Surjection|surjective]], as every $a^\preceq$ is defined from some $a \in S$.
+By the [[Ordering is Equivalent to Subset Relation/Lemma|Lemma]], $\phi$ is [[Definition:Order-Preserving Mapping|order-preserving]].
+Suppose that ${a_1}^\preceq \subseteq {a_2}^\preceq$.
+We have that:
+:$a_1 \in {a_1}^\preceq$
+Thus by definition of [[Definition:Subset|subset]]:
+:$a_1 \in {a_2}^\preceq$
+By definition of ${a_2}^\preceq$:
+:$a_1 \preceq a_2$
+Thus $\phi$ is also [[Definition:Order-Reflecting Mapping|order-reflecting]].
+Thus it follows that $\phi$ is an [[Definition:Order Isomorphism|order isomorphism]] between $\struct {S, \preceq}$ and $\struct {\mathbb S, \subseteq}$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Event Space contains Sample Space}
+Tags: Event Spaces
+
+\begin{theorem}
+:$\Omega \in \Sigma$
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = \Sigma
+ | o = \ne
+ | r = \O
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 1)$}}
+}}
+{{eqn | ll= \leadsto
+ | lo= \exists A:
+ | l = A
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Empty Set}}
+}}
+{{eqn | ll= \leadsto
+ | l = \Omega \setminus A
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 2)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = A \cup \paren {\Omega \setminus A}
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 3)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = \Omega
+ | o = \in
+ | r = \Sigma
+ | c = [[Union with Relative Complement]]
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Event Space contains Empty Set}
+Tags: Event Spaces
+
+\begin{theorem}
+:$\O \in \Sigma$
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = \Sigma
+ | o = \ne
+ | r = \O
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 1)$}}
+}}
+{{eqn | ll= \leadsto
+ | lo= \exists A:
+ | l = A
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Empty Set}}
+}}
+{{eqn | ll= \leadsto
+ | l = A \setminus A
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 2)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = \O
+ | o = \in
+ | r = \Sigma
+ | c = [[Set Difference with Self is Empty Set]]
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Power Set of Sample Space is Event Space}
+Tags: Event Spaces, Power Set of Sample Space is Event Space
+
+\begin{theorem}
+Let $\EE$ be an [[Definition:Experiment|experiment]] whose [[Definition:Sample Space|sample space]] is $\Omega$.
+Let $\powerset \Omega$ be the [[Definition:Power Set|power set]] of $\Omega$.
+Then $\powerset \Omega$ is an [[Definition:Event Space|event space]] of $\EE$.
+\end{theorem}
+
+\begin{proof}
+Let $\powerset \Omega := \Sigma$.
+;[[Definition:Event Space|Event Space Axiom $(\text {ES} 1)$]]:
+From [[Empty Set is Subset of All Sets]] we have that $\O \subseteq \Omega$.
+By the definition of [[Definition:Power Set|power set]]:
+:$\O \in \Sigma$
+thus fulfilling [[Definition:Event Space|axiom $(\text {ES} 1)$]].
+{{qed|lemma}}
+;[[Definition:Event Space|Event Space Axiom $(\text {ES} 2)$]]:
+Let $A \in \Sigma$.
+Then by the definition of [[Definition:Power Set|power set]]:
+:$A \subseteq \Omega$
+From [[Set with Relative Complement forms Partition]]:
+:$\Omega \setminus A \subseteq \Omega$
+and so by the definition of [[Definition:Power Set|power set]]:
+:$\Omega \setminus A \in \Sigma$
+thus fulfilling [[Definition:Event Space|axiom $(\text {ES} 2)$]].
+{{qed|lemma}}
+;[[Definition:Event Space|Event Space Axiom $(\text {ES} 3)$]]:
+Let $\sequence {A_i}$ be a [[Definition:Countable Set|countably infinite]] [[Definition:Sequence|sequence]] of [[Definition:Set|sets]] in $\Sigma$.
+Then from [[Power Set is Closed under Countable Unions]]:
+:$\ds \bigcup_{i \mathop \in \N} A_i \in \Sigma$
+thus fulfilling [[Definition:Event Space|axiom $(\text {ES} 3)$]].
+{{qed|lemma}}
+All the [[Definition:Event Space|event space axioms]] are seen to be fulfilled by $\powerset \Omega$.
+Hence the result.
+{{qed}}
+\end{proof}
+
+\begin{proof}
+For $\powerset \Omega$ to be an [[Definition:Event Space|event space]] of $\EE$, it needs to fulfil the following properties:
+:$(1): \quad \powerset \Omega \ne \O$, that is, an event space can not be [[Definition:Empty Set|empty]].
+:$(2): \quad$ If $A \in \powerset \Omega$, then $\relcomp \Omega A \in \powerset \Omega$, that is, the [[Definition:Relative Complement|complement of $A$ relative to $\Omega$]], is also in $\powerset \Omega$.
+:$(3): \quad$ If $A_1, A_2, \ldots \in \powerset \Omega$, then $\ds \bigcup_{i \mathop = 1}^\infty A_i \in \powerset \Omega$, that is, the [[Definition:Set Union|union]] of any [[Definition:Countable|countable]] collection of [[Definition:Element|elements]] of $\powerset \Omega$ is also in $\powerset \Omega$.
+These all follow directly from [[Power Set is Sigma-Algebra]].
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Event Space from Single Subset of Sample Space}
+Tags: Probability Theory
+
+\begin{theorem}
+Let $\EE$ be an [[Definition:Experiment|experiment]] whose [[Definition:Sample Space|sample space]] is $\Omega$.
+Let $\O \subsetneqq A \subsetneqq \Omega$.
+Then $\Sigma := \set {\O, A, \Omega \setminus A, \Omega}$ is an [[Definition:Event Space|event space]] of $\EE$.
+\end{theorem}
+
+\begin{proof}
+;[[Definition:Event Space|Event Space Axiom $(\text {ES} 1)$]]:
+From its definition:
+:$\Sigma \ne \O$
+thus fulfilling [[Definition:Event Space|axiom $(\text {ES} 1)$]].
+{{qed|lemma}}
+;[[Definition:Event Space|Event Space Axiom $(\text {ES} 2)$]]:
+From [[Set Difference with Empty Set is Self]]:
+:$\Omega \setminus \O = \Omega \in \Sigma$
+From [[Set Difference with Self is Empty Set]]:
+:$\Omega \setminus \Omega = \O \in \Sigma$
+By definition:
+:$\Omega \setminus A \in \Sigma$
+From [[Relative Complement of Relative Complement]]:
+:$\Omega \setminus \paren {\Omega \setminus A} = A \in \Sigma$
+Thus [[Definition:Event Space|axiom $(\text {ES} 2)$]] is fulfilled.
+{{qed|lemma}}
+;[[Definition:Event Space|Event Space Axiom $(\text {ES} 3)$]]:
+From [[Union with Empty Set]]:
+:$\forall X \in \Sigma: X \cup \O = X \in \Sigma$
+From [[Union with Superset is Superset]]:
+:$\forall X \in \Sigma: X \cup \Sigma = \Sigma \in \Sigma$
+From [[Union is Idempotent]]:
+:$\forall X \in \Sigma: X \cup X = X \in \Sigma$
+From [[Union with Relative Complement]]:
+:$A \cup \paren {\Omega \setminus A} = \Sigma \in \Sigma$
+It follows that [[Definition:Event Space|axiom $(\text {ES} 3)$]] is fulfilled.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Intersection of Events is Event}
+Tags: Intersections of Events, Event Spaces
+
+\begin{theorem}
+:$A, B \in \Sigma \implies A \cap B \in \Sigma$
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = A, B
+ | o = \in
+ | r = \Sigma
+ | c=
+}}
+{{eqn | ll= \leadsto
+ | l = \Omega \setminus A, \ \Omega \setminus B
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 2)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = \paren {\Omega \setminus A} \cup \paren {\Omega \setminus A}
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 3)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = \Omega \setminus \paren {A \cap B}
+ | o = \in
+ | r = \Sigma
+ | c = [[De Morgan's Laws (Set Theory)/Set Difference/Difference with Intersection|De Morgan's Laws: Difference with Intersection]]
+}}
+{{eqn | ll= \leadsto
+ | l = \Omega \setminus \paren {\Omega \setminus \paren {A \cap B} }
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 2)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = A \cap B
+ | o = \in
+ | r = \Sigma
+ | c = [[Relative Complement of Relative Complement]]
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Set Difference of Events is Event}
+Tags: Event Spaces
+
+\begin{theorem}
+:$A, B \in \Sigma \implies A \setminus B \in \Sigma$
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = A, B
+ | o = \in
+ | r = \Sigma
+ | c=
+}}
+{{eqn | ll= \leadsto
+ | l = A, \Omega \setminus B
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 2)$}}
+}}
+{{eqn | ll= \leadsto
+ | l = A \cap \paren {\Omega \setminus B}
+ | o = \in
+ | r = \Sigma
+ | c = [[Intersection of Events is Event]]
+}}
+{{eqn | ll= \leadsto
+ | l = A \setminus B
+ | o = \in
+ | r = \Sigma
+ | c = [[Set Difference as Intersection with Relative Complement]]
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Composition of Affine Transformations is Affine Transformation}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\EE$, $\FF$ and $\GG$ be [[Definition:Affine Space|affine spaces]] with [[Definition:Difference Space|difference spaces]] $E$, $F$ and $G$ respectively.
+Let $\LL: \EE \to \FF$ and $\MM: \FF \to \GG$ be [[Definition:Affine Transformation|affine transformations]].
+Let $L$ and $M$ be the [[Definition:Tangent Map of Affine Transformation|tangent maps]] of $\LL$ and $\MM$ respectively.
+Then the [[Definition:Composition of Mappings|composition]] $\MM \circ \LL: \EE \to \FF$ is an [[Definition:Affine Transformation|affine transformation]] with [[Definition:Tangent Map of Affine Transformation|tangent map]] $M \circ L$.
+\end{theorem}
+
+\begin{proof}
+Let $\NN = \MM \circ \LL : \EE \to \GG$ be the [[Definition:Composition of Mappings|composition]].
+We want to show that for any $p, q \in \EE$
+:$\map \GG Q = \map \GG p + \map {M \circ L} {\vec {p q} }$
+We find that:
+{{begin-eqn}}
+{{eqn | l = \map \GG q
+ | r = \map {\MM \circ \LL} q
+}}
+{{eqn | r = \map \MM {\map \LL p} + \map L {\vec{p q} }
+ | c = $\LL$ is an [[Definition:Affine Transformation|Affine Transformation]]
+}}
+{{end-eqn}}
+Now let:
+:$p' = \map \LL p$
+and:
+:$q' = \map \LL p + \map L {\vec {p q} }$
+so:
+:$\vec {p' q'} = \map L {\vec {p q} }$
+Then:
+{{begin-eqn}}
+{{eqn | l = \map \GG q
+ | r = \map \MM {q'}
+}}
+{{eqn | r = \map \MM {p'} + \map M {\vec {p' q'} }
+ | c = $\MM$ is an [[Definition:Affine Transformation|Affine Transformation]]
+}}
+{{eqn | r = \map \MM {\map \LL p} + \map M {\map L {\vec {p q} } }
+ | c = Definitions of $p'$ and $q'$
+}}
+{{eqn | r = \map {\MM \circ \LL} p + \map {M \circ L} {\vec {p q} }
+ | c =
+}}
+{{end-eqn}}
+as required.
+{{Qed}}
+[[Category:Affine Geometry]]
+k8rj2qi1mssvk7sdmw9vohrdxlizzmu
+\end{proof}<|endoftext|>
+\section{Symmetric Difference of Events is Event}
+Tags: Event Spaces
+
+\begin{theorem}
+:$A, B \in \Sigma \implies A \ast B \in \Sigma$
+\end{theorem}
+
+\begin{proof}
+{{begin-eqn}}
+{{eqn | l = A, B
+ | o = \in
+ | r = \Sigma
+ | c=
+}}
+{{eqn | ll= \leadsto
+ | l = A \cup B
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Event Space|Event Space: Axiom $(\text {ES} 3)$}}
+}}
+{{eqn | lo= \land
+ | l = A \cap B
+ | o = \in
+ | r = \Sigma
+ | c = [[Intersection of Events is Event]]
+}}
+{{eqn | ll= \leadsto
+ | l = \paren {A \cup B} \setminus \paren {A \cap B}
+ | o = \in
+ | r = \Sigma
+ | c = [[Set Difference of Events is Event]]
+}}
+{{eqn | ll= \leadsto
+ | l = A \ast B
+ | o = \in
+ | r = \Sigma
+ | c = {{Defof|Symmetric Difference|index = 2}}
+}}
+{{end-eqn}}
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Characterization of Affine Transformations}
+Tags: Affine Geometry
+
+\begin{theorem}
+Let $\mathcal E$ and $\mathcal F$ be [[Definition:Affine Space|affine spaces]] over a [[Definition:Field (Abstract Algebra)|field]] $k$.
+Let $\mathcal L: \mathcal E \to \mathcal F$ be a [[Definition:Mapping|mapping]].
+Then $\mathcal L$ is an [[Definition:Affine Transformation|affine transformation]] {{iff}} for all points $p, q \in \mathcal E$ and all $\lambda \in k$:
+:$\mathcal L \left({\lambda p + \left({1 - \lambda}\right) q}\right) = \lambda \mathcal L \left({p}\right) + \left({1 - \lambda}\right) \mathcal L \left({q}\right)$
+where $\lambda p + \left({1 - \lambda}\right) q$ and $\lambda \mathcal L \left({p}\right) + \left({1 - \lambda}\right) \mathcal L \left({q}\right)$ denote [[Definition:Barycenter|barycenters]].
+\end{theorem}
+
+\begin{proof}
+=== Sufficient Condition ===
+Let $\mathcal L$ be an affine transformation.
+Let $L$ be the [[Definition:Tangent Map of Affine Transformation|tangent map]].
+Let $r \in \mathcal E$ be any point.
+Then by definition we have:
+:$\lambda p + \left({1 - \lambda}\right) q = r + \lambda \vec{r p} + \left({1 - \lambda}\right) \vec{r q}$
+Thus we find:
+{{begin-eqn}}
+{{eqn | l = \mathcal L \left({\lambda p + \left({1 - \lambda}\right) q}\right)
+ | r = \mathcal L \left({r}\right) + L \left({\lambda p + \left({1 - \lambda}\right) q}\right)
+ | c = Definition of [[Definition:Affine Transformation|Affine Transformation]]
+}}
+{{eqn | r = \mathcal L \left({r}\right) + \lambda L \left({p}\right) + \left({1 - \lambda}\right) L \left({q}\right)
+ | c = since $L$ is [[Definition:Linear Transformation|linear]]
+}}
+{{eqn | r = \lambda \mathcal L \left({p}\right) + \left({1 - \lambda}\right) \mathcal L \left({q}\right)
+ | c = Definition of [[Definition:Barycenter|Barycenter]]
+}}
+{{end-eqn}}
+{{qed|lemma}}
+=== Necessary Condition ===
+Suppose that for all points $p, q \in \mathcal E$ and all $\lambda \in \R$:
+:$\mathcal L \left({\lambda p + \left({1 - \lambda}\right) q}\right) = \lambda \mathcal L \left({p}\right) + \left({1 - \lambda}\right) \mathcal L \left({q}\right)$
+Let $E$ be the [[Definition:Difference Space|difference space]] of $\mathcal E$.
+[[Definition:Fixed Point|Fix a point]] $p \in \mathcal E$, and define for all $u \in E$:
+:$L\left(u\right) = \mathcal L\left(p + u\right) - \mathcal L\left(p\right)$
+Let $q = p + u$.
+Then:
+:$\mathcal L \left({q}\right) = \mathcal L \left({p}\right) + L \left({u}\right)$
+So to show that $\mathcal L$ is affine, we are required to prove that $L$ is [[Definition:Linear Transformation|linear]].
+That is, we want to show that for all $\lambda \in k$ and all $u, v \in E$:
+:$L \left({\lambda u}\right) = \lambda L \left({u}\right)$
+and:
+:$L \left({u + v}\right) = L \left({u}\right) + L \left({v}\right)$
+First of all:
+{{begin-eqn}}
+{{eqn | l = L \left({\lambda u}\right)
+ | r = \mathcal L \left({p + \lambda u}\right) - \mathcal L \left({p}\right)
+ | c = Definition of $L$
+}}
+{{eqn | r = \mathcal L \left({\left({1 - \lambda}\right) + \lambda \left({p + u}\right)}\right)
+ | c = Definition of [[Definition:Barycenter|Barycenter]]
+}}
+{{eqn | r = \left({1 - \lambda}\right) \mathcal L \left({p}\right) + \lambda \mathcal L \left({p + u}\right) - \mathcal L \left({p}\right)
+ | c = [[Definition:By Hypothesis|By Hypothesis]] on $\mathcal L$
+}}
+{{eqn | r = \lambda \left({\mathcal L \left({p + u}\right) - \mathcal L \left({p}\right)}\right)
+ | c =
+}}
+{{eqn | r = \lambda L \left({u}\right)
+ | c = Definition of $L$
+}}
+{{end-eqn}}
+Now it is to be shown that
+:$L \left({u + v}\right) = L \left({u}\right) + \left({v}\right)$
+First:
+:$p + u + v = \dfrac 1 2 \left({p + 2 u}\right) + \dfrac 1 2 \left({p + 2 v}\right)$
+Now:
+{{begin-eqn}}
+{{eqn | l = \mathcal L \left({p + u + v}\right)
+ | r = \mathcal L \left({\frac 1 2 \left({p + 2 u}\right) + \frac 1 2 \left({p + 2 v}\right)}\right)
+ | c =
+}}
+{{eqn | r = \frac 1 2 \mathcal L \left({p + 2 u}\right) + \frac 1 2 \mathcal L \left({p + 2 v}\right)
+ | c = [[Definition:By Hypothesis|By Hypothesis]] on $\mathcal L$
+}}
+{{eqn | r = \frac 1 2 \left({\mathcal L \left({p + 2 u}\right) - \mathcal L \left({p}\right)}\right) + \frac 1 2 \left({ \mathcal L \left({p + 2 v}\right) - \mathcal L \left({p}\right)}\right) + \mathcal L \left({p}\right)
+ | c =
+}}
+{{eqn | r = \frac 1 2 L \left({2 u}\right) + \frac 1 2 L \left({2 v}\right) + \mathcal L \left({p}\right)
+ | c = Definition of $L$
+}}
+{{eqn | r = L \left({u}\right) + L \left({v}\right) + \mathcal L \left({p}\right)
+ | c = as $L$ preserves scalar multiples
+}}
+{{end-eqn}}
+From the above calculation:
+:$L \left({u + v}\right) = \mathcal L \left({p + u + v}\right) - \mathcal L \left({p}\right) = L \left({u}\right) + L \left({v}\right)$
+This shows that $L$ is linear, and therefore concludes the proof.
+{{Qed}}
+[[Category:Affine Geometry]]
+9splhjisbqidaxnnz69xjkwpx8v7k1c
+\end{proof}<|endoftext|>
+\section{Probability of Empty Event is Zero}
+Tags: Probability Theory
+
+\begin{theorem}
+:$\map \Pr \O = 0$
+\end{theorem}
+
+\begin{proof}
+From the conditions for $\Pr$ to be a [[Definition:Probability Measure|probability measure]], we have:
+:$(1): \quad \forall A \in \Sigma: 0 \le \map \Pr A$
+:$(2): \quad \map \Pr \Omega = 1$
+:$(3): \quad \displaystyle \map \Pr {\bigcup_{i \mathop \ge 1} A_i} = \sum_{i \mathop \ge 1} \map \Pr {A_i}$ where all $A_i$ are [[Definition:Pairwise Disjoint|pairwise disjoint]].
+From the definition of [[Definition:Event Space|event space]], we have:
+:$\Omega \in \Sigma$
+:$A \in \Sigma \implies \relcomp \Omega A \in \Sigma$
+From [[Intersection with Empty Set]]:
+:$\O \cap \Omega = \O$
+Therefore $\O$ and $\Omega$ are [[Definition:Pairwise Disjoint|pairwise disjoint]].
+From [[Union with Empty Set]]:
+:$\O \cup \Omega = \Omega$
+Therefore we have:
+{{begin-eqn}}
+{{eqn | l = \map \Pr \Omega
+ | r = \map \Pr {\O \cup \Omega}
+ | c =
+}}
+{{eqn | r = \map \Pr \O + \map \Pr \Omega
+ | c =
+}}
+{{end-eqn}}
+As $\map \Pr \Omega = 1$, it follows that $\map \Pr \O = 0$.
+{{qed}}
+\end{proof}<|endoftext|>
+\section{Ordinal Membership is Asymmetric}
+Tags: Ordinals
+
+\begin{theorem}
+Let $m$ and $n$ be [[Definition:Ordinal|ordinals]].
+Then it is not the case that $m \in n$ and $n \in m$.
+\end{theorem}
+
+\begin{proof}
+Suppose [[Proof by Contradiction|for the sake of contradiction]] that $m \in n$ and $n \in m$.
+Since $m$ is an [[Definition:Ordinal/Definition 1|ordinal]], it is [[Definition:Transitive Set|transitive]].
+Thus since $m \in n$ and $n \in m$, it follows that $m \in m$.
+But this contradicts [[Ordinal is not Element of Itself]].
+{{qed}}
+[[Category:Ordinals]]
+ldio46x87kr5fy460pxnmroieab85jd
+\end{proof}
\ No newline at end of file