bookshelf/Bookshelf/Enderton/Logic.tex

2047 lines
73 KiB
TeX
Raw Normal View History

\documentclass{report}
\input{../../preamble}
\makecode{../..}
\externaldocument[S:]{Set}
% Truth table start and final color
\definecolor{TTStart}{gray}{0.95}
\definecolor{TTEnd}{rgb}{1,1,0}
\colorlet{TTInvalid}{Salmon}
\newcolumntype{s}{>{\columncolor{TTStart}}c}
\newcolumntype{e}{>{\columncolor{TTEnd}}c}
\begin{document}
\header{A Mathematical Introduction to Logic}{Herbert B. Enderton}
\tableofcontents
\begingroup
2023-05-18 19:03:59 +00:00
\renewcommand\thechapter{R}
2023-05-18 19:03:59 +00:00
\chapter{Reference}%
\hyperlabel{chap:reference}
\section{\defined{Construction Sequence}}%
\hyperlabel{ref:construction-sequence}
A \textbf{construction sequence} is a \nameref{ref:finite-sequence}
$\ltuple{\epsilon_1}{\epsilon_n}$ of \nameref{ref:expression}s such that for
each $i \leq n$ we have at least one of
2023-08-09 13:39:41 +00:00
\begin{align*}
& \epsilon_i \text{ is a sentence symbol} \\
& \epsilon_i = \mathcal{E}_\neg(\epsilon_j) \text{ for some } j < i \\
& \epsilon_i = \mathcal{E}_\square(\epsilon_j, \epsilon_k)
\text{ for some } j < i, k < i
\end{align*}
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
\section{\defined{Expression}}%
\hyperlabel{ref:expression}
An \textbf{expression} is a \nameref{ref:finite-sequence} of symbols.
\section{\defined{Finite Sequence}}%
\hyperlabel{ref:finite-sequence}
$S$ is a \textbf{finite sequence} (or \textbf{string}) of members of set $A$
if and only if, for some positive integer $n$, we have
$S = \ltuple{x_1}{x_n}$, where each $x_i \in A$.
\section{\defined{Formula-Building Operations}}%
\hyperlabel{ref:formula-building-operations}
The \textbf{formula-building operations} (on expressions) are defined by the
equations:
2023-08-09 13:39:41 +00:00
\begin{align*}
\mathcal{E}_{\neg}(\alpha)
& = (\neg \alpha) \\
\mathcal{E}_{\land}(\alpha, \beta)
& = (\alpha \land \beta) \\
\mathcal{E}_{\lor}(\alpha, \beta)
& = (\alpha \lor \beta) \\
\mathcal{E}_{\Rightarrow}(\alpha, \beta)
& = (\alpha \Rightarrow \beta) \\
\mathcal{E}_{\Leftrightarrow}(\alpha, \beta)
& = (\alpha \Leftrightarrow \beta)
\end{align*}
\code*{Bookshelf/Enderton/Logic/Chapter\_1}
{Enderton.Logic.Chapter\_1.Wff}
\lean{Init/Prelude}
{Not}
\lean{Init/Prelude}
{And}
\lean{Init/Prelude}
{Or}
\lean{Init/Core}
{Iff}
\section{\defined{\texorpdfstring{$n$}{n}-tuple}}%
\hyperlabel{ref:n-tuple}
An \textbf{$n$-tuple} is recursively defined as
$$\ltuple{x_1}{x_{n+1}} = \tuple{\ltuple{x_1}{x_n}, x_{n+1}}$$
for $n > 1$.
We also define $\tuple{x} = x$.
\lean*{Init/Prelude}
{Prod}
\section{\defined{Tautological Implication}}%
\hyperlabel{ref:tautological-implication}
Consider a set $\Sigma$ of \nameref{ref:well-formed-formula}s and another wff
$\tau$.
$\Sigma$ \textbf{tautologically implies} $\tau$ (written $\Sigma \vDash \tau$)
if and only if every \nameref{ref:truth-assignment} for the sentence symbols
in $\Sigma$ and $\tau$ that satisfies every member of $\Sigma$ also
satisfies $\tau$.
If $\Sigma$ is singleton $\{\sigma\}$, then we write "$\sigma \vDash \tau$" in
place of "$\{\sigma\} \vDash \tau$."
If both $\sigma \vDash \tau$ and $\tau \vDash \sigma$, then $\sigma$ and
$\tau$ are said to be \textbf{tautologically equivalent} (written
$\sigma \vDash \Dashv \tau$).
\section{\defined{Truth Assignment}}%
\hyperlabel{ref:truth-assignment}
A \textbf{truth assignment} $v$ for a set $\mathcal{S}$ of sentence symbols is
a function $$v \colon \mathcal{S} \rightarrow \{F, T\}$$ assigning either
$T$ or $F$ to each symbol in $\mathcal{S}$.
\suitdivider
\noindent
Let $\bar{\mathcal{S}}$ be the set of \nameref{ref:well-formed-formula}s that
can be built up from $\mathcal{S}$ by the five
\nameref{ref:formula-building-operations}.
We define \textbf{extension} $\bar{v}$ of $v$,
$$\bar{v} \colon \bar{\mathcal{S}} \rightarrow \{F, T\},$$
as the function that satisfies the following conditions for any
$\alpha, \beta \in \mathcal{S}$:
\begin{enumerate}[(1)]
\setcounter{enumi}{-1}
\item For any $A \in \mathcal{S}$, $\bar{v}(A) = v(A)$.
(Thus $\bar{v}$ is indeed an extension of $v$.)
\item $\bar{v}((\neg\alpha)) = \begin{cases}
T & \text{if } \bar{v}(\alpha) = F, \\
F & \text{otherwise}.
\end{cases}$
\item $\bar{v}((\alpha \land \beta)) = \begin{cases}
T & \text{if } \bar{v}(\alpha) = T \text{ and } \bar{v}(\beta) = T, \\
F & \text{otherwise}.
\end{cases}$
\item $\bar{v}((\alpha \lor \beta)) = \begin{cases}
T & \text{if } \bar{v}(\alpha) = T \text{ or }
\bar{v}(\beta) = T \text{ (or both)}, \\
F & \text{otherwise}.
\end{cases}$
\item $\bar{v}((\alpha \Rightarrow \beta)) = \begin{cases}
F & \text{if } \bar{v}(\alpha) = T \text{ and } \bar{v}(\beta) = F, \\
T & \text{otherwise}.
\end{cases}$
\item $\bar{v}((\alpha \Leftrightarrow \beta)) = \begin{cases}
T & \text{if } \bar{v}(\alpha) = \bar{v}(\beta), \\
F & \text{otherwise}.
\end{cases}$
\end{enumerate}
We say that truth assignment $v$ \textbf{satisfies} $\phi$ if and only if
$\bar{v}(\phi) = T$.
\lean*{Init/Prelude}
{True}
\lean{Init/Prelude}
{False}
\section{\defined{Well-Formed Formula}}%
\hyperlabel{ref:well-formed-formula}
A \textbf{well-formed formula} (wff) is an \nameref{ref:expression} that can
be built up from the sentence symbols by applying some finite number of
times the \nameref{ref:formula-building-operations}.
\code*{Bookshelf/Enderton/Logic/Chapter\_1}
{Enderton.Logic.Chapter\_1.Wff}
\endgroup
% Reset counter to mirror Enderton's book.
\setcounter{chapter}{0}
\addtocounter{chapter}{-1}
\chapter{Useful Facts About Sets}%
\hyperlabel{chap:useful-facts-about-sets}
\section{\unverified{Lemma 0A}}%
\hyperlabel{sec:lemma-0a}
2023-08-09 13:39:41 +00:00
\begin{lemma}[0A]
Assume that $\ltuple{x_1}{x_m} = \ltuple{y_1, \ldots, y_m}{y_{m+k}}$.
Then $x_1 = \ltuple{y_1}{y_{k+1}}$.
2023-08-09 13:39:41 +00:00
\end{lemma}
2023-08-09 13:39:41 +00:00
\begin{proof}
For natural number $m$, let $P(m)$ be the statement:
\begin{induction}
\hyperlabel{sec:lemma-0a-ih}
If $\ltuple{x_1}{x_m} = \ltuple{y_1, \ldots, y_m}{y_{m+k}}$
then $x_1 = \ltuple{y_1}{y_{k+1}}$.
\end{induction}
\noindent
We proceed by induction on $m$.
\paragraph{Base Case}%
Suppose $\tuple{x_1} = \ltuple{y_1}{y_{1 + k}}$.
By definition of an \nameref{ref:n-tuple}, $\tuple{x_1} = x_1$.
Thus $x_1 = \ltuple{y_1}{y_{k + 1}}$.
Hence $P(1)$ holds true.
\paragraph{Inductive Step}%
Suppose for $m \geq 1$ that $P(m)$ is true and assume
\begin{equation}
\hyperlabel{sec:lemma-0a-eq1}
\ltuple{x_1}{x_{m+1}} = \ltuple{y_1, \ldots, y_{m+1}}{y_{m+1+k}}.
\end{equation}
By definition of an \nameref{ref:n-tuple}, we can decompose
\eqref{sec:lemma-0a-eq1} into the following two identities
\begin{align*}
x_{m+1} & = y_{m+1+k} \\
\ltuple{x_1}{x_m} & = \ltuple{y_1}{y_{m+k}}.
\end{align*}
By \ihref{sec:lemma-0a-ih}, $P(m)$ implies $x_1 = \ltuple{y_1}{y_{k+1}}$.
Hence $P(m+1)$ holds true.
\paragraph{Conclusion}%
By induction, $P(m)$ holds true for all $m \geq 1$.
2023-08-09 13:39:41 +00:00
\end{proof}
\chapter{Sentential Logic}%
\hyperlabel{chap:sentential-logic}
\section{The Language of Sentential Logic}%
\hyperlabel{sec:language-sentential-logic}
\subsection{\unverified{Induction Principle}}%
\hyperlabel{sub:induction-principle-1}
2023-08-09 13:39:41 +00:00
\begin{theorem}
If $S$ is a set of \nameref{ref:well-formed-formula}s containing all the
sentence symbols and closed under all five
\nameref{ref:formula-building-operations}, then $S$ is the set of
\textit{all} wffs.
2023-08-09 13:39:41 +00:00
\end{theorem}
\code{Bookshelf/Enderton/Logic/Chapter\_1}
{Enderton.Logic.Chapter\_1.Wff.rec}
2023-08-09 13:39:41 +00:00
\begin{proof}
We note every well-formed formula can be characterized by a
\nameref{ref:construction-sequence}.
For natural number $m$, let $P(m)$ be the statement:
\begin{induction}
\hyperlabel{sub:induction-principle-1-ih}
Every wff characterized by a construction sequence of length $m$ is in
$S$.
\end{induction}
\noindent
We proceed by strong induction on $m$.
\paragraph{Base Case}%
Let $\phi$ denote a wff characterized by a construction sequence of length
$1$.
Then it must be that $\phi$ is a single sentence symbol.
By hypothesis, $S$ contains all the sentence symbols.
Thus $P(1)$ holds true.
\paragraph{Inductive Step}%
Suppose $P(0)$, $P(1)$, $\ldots$, $P(m)$ holds true and let $\phi$ denote
a wff characterized by a construction sequence of length $m + 1$.
By definition of a construction sequence, one of the following holds:
\begin{align}
& \phi \text{ is a sentence symbol}
2023-08-21 20:51:49 +00:00
& \hyperlabel{sub:induction-principle-1-eq1} \\
& \phi = \mathcal{E}_\neg(\epsilon_j)
\text{ for some } j < m + 1
2023-08-21 20:51:49 +00:00
& \hyperlabel{sub:induction-principle-1-eq2} \\
& \phi = \mathcal{E}_\square(\epsilon_j, \epsilon_k)
\text{ for some } j < m + 1, k < m + 1
2023-08-21 20:51:49 +00:00
& \hyperlabel{sub:induction-principle-1-eq3}
\end{align}
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
We consider each case in turn.
\subparagraph{\eqref{sub:induction-principle-1-eq1}}%
By hypothesis, all sentence symbols are in $S$.
Thus $\phi \in S$.
\subparagraph{\eqref{sub:induction-principle-1-eq2}}%
Suppose $\phi = \mathcal{E}_\neg(\epsilon_j)$ for some $j < m + 1$.
By \ihref{sub:induction-principle-1-ih}, $\epsilon_j$ is in $S$.
By hypothesis, $S$ is closed under $\mathcal{E}_\neg$.
Thus $\phi \in S$.
\subparagraph{\eqref{sub:induction-principle-1-eq3}}%
Suppose $\phi = \mathcal{E}_\square(\epsilon_j, \epsilon_k)$ for some
$j < m + 1, k < m + 1$,
By \ihref{sub:induction-principle-1-ih}, $\epsilon_j$ and $\epsilon_k$
is in $S$.
By hypothesis, $S$ is closed under $\mathcal{E}_\square$ for all
possible candidates of $\square$.
Thus $\phi \in S$.
\subparagraph{Subconclusion}%
Since the above three cases are exhaustive, $P(m + 1)$ holds.
\paragraph{Conclusion}%
By strong induction, $P(m)$ holds true for all natural numbers $m \geq 1$.
Since every well-formed formula is characterized by a construction
sequence, the set of all wffs is a subset of $S$.
Likewise, it obviously holds that $S$ is a subset of all wffs.
Thus $S$ is precisely the set of all wffs.
2023-08-09 13:39:41 +00:00
\end{proof}
\subsection{\unverified{Balanced Parentheses}}%
\hyperlabel{sub:balanced-parentheses}
\begin{lemma}
All \nameref{ref:well-formed-formula}s have an equal number of left and
right parentheses.
\end{lemma}
\begin{proof}
Define $$S = \{ \phi \mid
\phi \text{ is a wff with a balanced number of parentheses} \}.$$
We prove that (i) all the sentence symbols are members of $S$ and (ii)
$S$ is closed under the five \nameref{ref:formula-building-operations}.
We then conclude with (iii) the proof of the theorem statement.
\paragraph{(i)}%
\hyperlabel{par:balanced-parentheses-i}
By definition, well-formed formulas comprising a single sentence symbol
do not have any parentheses.
Thus all sentence symbols are members of $S$.
\paragraph{(ii)}%
\hyperlabel{par:balanced-parentheses-ii}
Let $\alpha, \beta \in S$.
By definition, $\mathcal{E}_{\neg}(\alpha) = (\neg\alpha)$.
Thus one additional left and right parenthesis is introduced.
Since $\alpha$ is assumed to have an equal number of left and right
parentheses, $\mathcal{E}_{\neg}(\alpha) \in S$.
Likewise,
$\mathcal{E}_{\square}(\alpha, \beta) = (\alpha \mathop{\square} \beta)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Again, an additional left and right parenthesis is introduced.
Since $\alpha$ and $\beta$ are assumed to have a balanced number of
parentheses, $\mathcal{E}_{\square}(\alpha, \beta) \in S$.
Hence $S$ is closed under the five formula-building operations.
\paragraph{(iii)}%
By \nameref{par:balanced-parentheses-i} and
\nameref{par:balanced-parentheses-ii}, the
\nameref{sub:induction-principle-1} implies $S$ is the set of all wffs.
Thus all well-formed formulas have an equal number of left and right
parentheses.
\end{proof}
\subsection{\verified{Parentheses Count}}%
\hyperlabel{sub:parentheses-count}
\begin{lemma}
Let $\phi$ be a \nameref{ref:well-formed-formula} and $c$ be the number of
places at which a sentential connective symbol exists.
Then there is $2c$ parentheses in $\phi$.
\end{lemma}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.paren\_count\_double\_sentential\_count}
\begin{proof}
Define $$S = \{ \phi \mid
\phi \text{ is a wff with } 2c \text{ parentheses} \}.$$
We prove that (i) all the sentence symbols are members of $S$ and (ii)
$S$ is closed under the five \nameref{ref:formula-building-operations}.
We then conclude with (iii) the proof of the theorem statement.
\paragraph{(i)}%
\hyperlabel{par:parentheses-count-i}
A sentence symbol, by itself, has no sentential connectives.
Likewise, it has 0 parentheses.
Thus $S$ contains every sentence symbol.
\paragraph{(ii)}%
\hyperlabel{par:parentheses-count-ii}
Let $\alpha, \beta \in S$.
By definition, $\mathcal{E}_{\neg}(\alpha) = (\neg \alpha)$.
Then $\mathcal{E}_{\neg}(\alpha)$ introduces two additional parentheses
and one additional sentential connective symbol.
Thus $\mathcal{E}_{\neg}(\alpha) \in S$.
Likewise,
$\mathcal{E}_{\square}(\alpha, \beta) = (\alpha \mathop{\square} \beta)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
$\mathcal{E}_{\square}(\alpha, \beta)$ also introduces two additional
parentheses and one additional connective symbol.
Thus $\mathcal{E}_{\square}(\alpha, \beta) \in S$.
Hence $S$ is closed under the five formula-building operations.
\paragraph{(iii)}%
By \nameref{par:parentheses-count-i} and
\nameref{par:parentheses-count-ii}, the
\nameref{sub:induction-principle-1} implies $S$ is the set of all wffs.
Thus every wff has $2c$ parentheses in $\phi$, where $c$ denotes the
number of places at which a sentential connective symbol exists.
\end{proof}
\section{Truth Assignments}%
\hyperlabel{sec:truth-assignments}
\subsection{\sorry{Theorem 12A}}%
\hyperlabel{sub:theorem-12a}
\begin{theorem}[12A]
For any \nameref{ref:truth-assignment} $v$ for a set $\mathcal{S}$ there is
a unique extension
$\bar{v} \colon \bar{\mathcal{S}} \rightarrow \{F, T\}$.
\end{theorem}
\begin{proof}
TODO
\end{proof}
\subsection{\sorry{Compactness Theorem}}%
\hyperlabel{sub:compactness-theorem}
\begin{theorem}
Let $\Sigma$ be an infinite set of \nameref{ref:well-formed-formula}s such
that for any finite subset $\Sigma_0$ of $\Sigma$, there is a
\nameref{ref:truth-assignment} that satisfies every member of $\Sigma_0$.
Then there is a truth assignment that satisfies every member of $\Sigma$.
\end{theorem}
\begin{proof}
TODO
\end{proof}
\section{A Parsing Algorithm}%
\hyperlabel{sec:parsing-algorithmm}
\subsection{\unverified{Lemma 13A}}%
\hyperlabel{sub:lemma-13a}
\begin{lemma}[13A]
Every wff has the same number of left as right parentheses.
\end{lemma}
\begin{proof}
Refer to \nameref{sub:balanced-parentheses}.
\end{proof}
\subsection{\sorry{Lemma 13B}}%
\hyperlabel{sub:lemma-13b}
\begin{lemma}[13B]
Any proper initial segment of a wff contains an excess of left parentheses.
Thus no proper initial segment of a wff can itself be a wff.
\end{lemma}
\begin{proof}
TODO
\end{proof}
\section{Exercises 1}%
\hyperlabel{sec:exercises-1}
\subsection{\unverified{Exercise 1.1.1}}%
\hyperlabel{sub:exercise-1.1.1}
2023-08-09 13:39:41 +00:00
Give three sentences in English together with translations into our formal
language.
The sentences should be chosen so as to have an interesting structure, and the
2023-08-09 13:39:41 +00:00
translations should each contain 15 or more symbols.
2023-08-09 13:39:41 +00:00
\begin{answer}
We begin first with the English sentences:
\begin{enumerate}[(i)]
\item He can juggle beach balls, bowling pins, and hackysacks unless
he is tired, in which case he can only juggle beach balls.
\item
If Lauren goes to the moves with Sam, he will watch Barbie and
eat popcorn, but if Lauren does not, he will watch Oppenheimer and
eat gummy worms.
\item
Trees produce oxygen if they are alive and well, able to pull
nutrients from the earth, and receive ample water.
\end{enumerate}
\paragraph{(i)}%
We use the following translation: "To juggle beach balls" (B),
"to juggle bowling pins" (P), "to juggle hackysacks" (H), and
"he is tired" (T).
This yields the following translation:
$$(B \land ((\neg T) \Rightarrow (P \land H))).$$
\paragraph{(ii)}%
We use the following translation: "Lauren goes to the movies" (L),
"Sam will watch Oppenheimer" (O), "Sam will watch "Barbie" (B),
"Sam will eat popcorn" (P), and "Sam will eay gummy worms" (G).
This yields the following translation:
$$(((L \land B) \land P) \lor (((\neg L) \land O) \land G)).$$
\paragraph{(iii)}%
We use the following translation: "Trees produce oxygen" (O),
"the tree is alive" (A), "the tree is well" (W), "can pull nutrients
from the earth" (N), and "receives ample water" (R).
This yields the following translation:
$$(O \iff (((A \land W) \land N) \land R)).$$
2023-08-09 13:39:41 +00:00
\end{answer}
\subsection{\pending{Exercise 1.1.2}}%
\hyperlabel{sub:exercise-1.1.2}
2023-08-09 13:39:41 +00:00
Show that there are no wffs of length 2, 3, or 6, but that any other positive
length is possible.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_1\_2\_i}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_1\_2\_ii}
2023-08-09 13:39:41 +00:00
\begin{proof}
Define $$S = \{ \phi \mid
\phi \text{ is a wff and the length of } \phi
\text{ is not } 2, 3, \text{or } 6. \}.$$
We prove that (i) all the sentence symbols are members of $S$ and (ii)
$S$ is closed under the five \nameref{ref:formula-building-operations}.
We then conclude with (iii) the proof of the theorem statement.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.1.2-i}
Sentence symbols, by definition, have length 1.
Thus every sentence symbol is a member of $S$.
\paragraph{(ii)}%
\hyperlabel{par:exercise-1.1.2-ii}
Define $L$ to be the length function mapping arbitrary wff to its length.
Let $\alpha, \beta \in S$.
Then $L(\alpha)$ and $L(\beta)$ each evaluate to 1, 4, 5, or a value
larger than 6.
By definition, $\mathcal{E}_{\neg}(\alpha) = (\neg \alpha)$.
Thus $L(\mathcal{E}_{\neg}(\alpha)) = L(\alpha) + 3$.
Enumerating through the possible values of $L(\alpha)$ shows
$\mathcal{E}_{\neg}(\alpha) \in S$.
Likewise,
$\mathcal{E}_{\square}(\alpha, \beta) = (\alpha \mathop{\square} \beta)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Thus $L(\mathcal{E}_{\square}(\alpha, \beta)) = L(\alpha) + L(\beta) + 3$.
Again, enumerating through the possible values of $L(\alpha)$ and
$L(\beta)$ shows $\mathcal{E}_{\square}(\alpha, \beta) \in S$.
Hence $S$ is closed under the five formula-building operations.
\paragraph{(iii)}%
By \nameref{par:exercise-1.1.2-i} and \nameref{par:exercise-1.1.2-ii}, the
\nameref{sub:induction-principle-1} implies $S$ is the set of all wffs.
It remains to be shown that a wff of any positive length excluding 2, 3,
and 6 are possible.
Let $\phi_1 = A_1$, $\phi_2 = (A_1 \land A_2)$, and
$\phi_3 = ((A_1 \land A_2) \land A_3)$.
Note these are wffs of lengths 1, 5, and 9 respectively.
Then $n$ repeated applications of $\mathcal{E}_{\neg}$ yields wffs of
length $1 + 3n$, $5 + 3n$, and $9 + 3n$ respectively.
But
\begin{align*}
& \{ 1 + 3n \mid n \in \mathbb{N} \}, \\
& \{ 5 + 3n \mid n \in \mathbb{N} \}, \text{ and } \\
& \{ 9 + 3n \mid n \in \mathbb{N} \}
\end{align*}
form a \nameref{S:ref:partition} of set $\mathbb{N} - \{ 2, 3, 6 \}$.
Thus a wff of any other positive length besides 2, 3, and 6 is possible.
2023-08-09 13:39:41 +00:00
\end{proof}
\subsection{\verified{Exercise 1.1.3}}%
\hyperlabel{sub:exercise-1.1.3}
2023-08-09 13:39:41 +00:00
Let $\alpha$ be a wff; let $c$ be the number of places at which binary
connective symbols $(\land, \lor, \Rightarrow, \Leftrightarrow)$ occur in
$\alpha$; let $s$ be the number of places at which sentence symbols occur in
$\alpha$.
(For example, if $\alpha$ is $(A \Rightarrow (\neg A))$ then $c = 1$ and
$s = 2$.)
Show by using the induction principle that $s = c + 1$.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_1\_3}
2023-08-09 13:39:41 +00:00
\begin{proof}
Define
\begin{equation}
\hyperlabel{sub:exercise-1.1.3-eq1}
S = \{\phi \mid \phi \text{ is a wff such that } s = c + 1\}.
\end{equation}
We prove that (i) all the sentence symbols are members of $S$ and (ii)
$S$ is closed under the five \nameref{ref:formula-building-operations}.
We then conclude with (iii) the proof of the theorem statement.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.1.3-i}
Let $\phi = A_n$ be an arbitrary sentence symbol.
The number of places at which sentence symbols occur in $\phi$ is 1.
The number of places at which binary connective symbols occur in $\phi$ is
0.
Hence $\phi \in S$.
\paragraph{(ii)}%
\hyperlabel{par:exercise-1.1.3-ii}
Let $\alpha, \beta \in S$.
Denote the number of places at which sentence symbols occur in each as
$s_\alpha$ and $s_\beta$ respectively.
Likewise, denote the number of places at which binary connective symbols
occur as $c_\alpha$ and $c_\beta$.
By definition, $\mathcal{E}_{\neg}(\alpha) = (\neg\alpha)$.
The number of sentence and binary connective symbols in
$\mathcal{E}_{\neg}(\alpha)$ does not change.
Thus $\mathcal{E}_{\neg}(\alpha) \in S$.
Likewise,
$\mathcal{E}_{\square}(\alpha, \beta) = (\alpha \mathop{\square} \beta)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Therefore $\mathcal{E}_{\square}(\alpha, \beta)$ has $s_\alpha + s_\beta$
sentence symbols and $c_\alpha + c_\beta + 1$ binary connective symbols.
But \eqref{sub:exercise-1.1.3-eq1} implies
\begin{align*}
s_\alpha + s_\beta
& = (c_\alpha + 1) + (c_\beta + 1) \\
& = (c_\alpha + c_\beta + 1) + 1,
\end{align*}
meaning $\mathcal{E}_{\square}(\alpha, \beta) \in S$.
Hence $S$ is closed under the five formula-building operations.
\paragraph{(iii)}%
\hyperlabel{par:exercise-1.1.3-iii}
By \nameref{par:exercise-1.1.3-i} and \nameref{par:exercise-1.1.3-ii}, the
\nameref{sub:induction-principle-1} indicates $S$ is the set of all
wffs.
2023-08-09 13:39:41 +00:00
\end{proof}
\subsection{\unverified{Exercise 1.1.4}}%
\hyperlabel{sub:exercise-1.1.4}
2023-08-09 13:39:41 +00:00
Assume we have a construction sequence ending in $\phi$, where $\phi$ does not
contain the symbol $A_4$.
Suppose we delete all the expressions in the construction sequence that
contain $A_4$.
Show that the result is still a legal construction sequence.
2023-08-09 13:39:41 +00:00
\begin{proof}
Let $S$ denote a \nameref{ref:construction-sequence}
$\ltuple{\epsilon_1}{\epsilon_n}$ such that $\epsilon_n = \phi$.
Let $S' = \ltuple{\epsilon_{i_1}}{\epsilon_{i_m}}$ denote the construction
sequence resulting from deleting all expressions in $S$ containing $A_4$.
Fix $1 \leq j \leq m$.
Then there exists some $1 \leq k \leq n$ such that
$\epsilon_{i_j} = \epsilon_k$.
By definition of a construction sequence, there are three cases to consider:
\paragraph{Case 1}%
Suppose $\epsilon_k$ is a sentence symbol.
Then $\epsilon_{i_j}$ is also sentence symbol.
\paragraph{Case 2}%
Suppose $\epsilon_k = \mathcal{E}_{\neg}(\epsilon_a)$ for some $a < k$.
It must be that $A_4$ is not found in $\epsilon_a$, else an immediate
contradiction is raised.
Therefore $\epsilon_a$ is a member of $S'$ that precedes $\epsilon_{i_j}$.
Hence $\epsilon_{i_j} = \mathcal{E}_{\neg}(\epsilon_{i_a})$ for some
$a < j$.
\paragraph{Case 3}%
Suppose $\epsilon_k = \mathcal{E}_{\square}(\epsilon_a, \epsilon_b)$ for
some $a, b < k$ where $\square$ is one of the binary connectives
$\land$, $\lor$, $\Rightarrow$, $\Leftrightarrow$.
It must be that $A_4$ is found in neither $\epsilon_a$ nor $\epsilon_b$,
else an immediate contradiction is raised.
Therefore $\epsilon_a$ and $\epsilon_b$ is a member of $S'$, both of which
precede $\epsilon_{i_j}$.
Hence
$\epsilon_{i_j} = \mathcal{E}_{\square}(\epsilon_{i_a}, \epsilon_{i_b})$
for some $a, b < j$.
\paragraph{Conclusion}%
Since the above cases are exhaustive and apply to an arbitrary member of
$S'$, it must be that every member of $S'$ is valid.
Hence $S'$ is still a legal construction sequence.
2023-08-09 13:39:41 +00:00
\end{proof}
\subsection{\verified{Exercise 1.1.5}}%
\hyperlabel{sub:exercise-1.1.5}
2023-08-09 13:39:41 +00:00
Suppose that $\alpha$ is a wff not containing the negation symbol $\neg$.
\subsubsection{\verified{Exercise 1.1.5a}}%
\hyperlabel{ssub:exercise-1.1.5a}
Show that the length of $\alpha$ (i.e., the number of symbols in the string)
is odd.
2023-08-09 13:39:41 +00:00
\textit{Suggestion}: Apply induction to show that the length is of the form
$4k + 1$.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_1\_5a}
\begin{proof}
Define $L$ to be the length function mapping arbitrary
\nameref{ref:well-formed-formula} to its length and let
\begin{equation}
\hyperlabel{ssub:exercise-1.1.5a-eq1}
S = \{\phi \mid
\phi \text{ is a wff containing } \neg \text{ or }
\exists k \in \mathbb{N}, L(\phi) = 4k + 1\}.
\end{equation}
We prove that (i) all the sentence symbols are members of $S$ and (ii)
$S$ is closed under the five \nameref{ref:formula-building-operations}.
We then conclude with (iii) the proof of the theorem statement.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.1.5a-i}
Every sentence symbol has length 1 by definition.
That is, every sentence symbol has length $(4)(0) + 1$.
Hence $S$ contains every sentence symbol.
\paragraph{(ii)}%
\hyperlabel{par:exercise-1.1.5a-ii}
Let $\alpha, \beta \in S$.
Then there exists some $k_\alpha$ and $k_\beta$ such that
$L(\alpha) = 4k_\alpha + 1$ and $L(\beta) = 4k_\beta + 1$.
Clearly $S$ is closed under $\mathcal{E}_{\neg}$.
Next consider
$\mathcal{E}_{\square}(\alpha, \beta) = (\alpha \mathop{\square} \beta)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Then
\begin{align*}
L(\alpha, \beta)
& = L(\alpha) + L(\beta) + 3 \\
& = (4k_\alpha + 1) + (4k_\beta + 1) + 3 \\
& = 4k_\alpha + 4k_\beta + 4 + 1 \\
& = 4(k_\alpha + k_\beta + 1) + 1.
\end{align*}
Therefore, there exists a $k \in \mathbb{N}$, namely
$k = k_\alpha + k_\beta + 1$, such that
$L(\mathcal{E}_{\square}(\alpha, \beta)) = 4k + 1$.
Hence $S$ is closed under the five formula-building operations.
\paragraph{(iii)}%
By \nameref{par:exercise-1.1.5a-i} and \nameref{par:exercise-1.1.5a-ii},
the \nameref{sub:induction-principle-1} indicates $S$ is the set of all
wffs.
Thus all well-formed formulas not containing symbol $\neg$ has length
$4k + 1$ for some $k \in \mathbb{N}$.
Therefore these well-formed formulas have odd length.
\end{proof}
\subsubsection{\verified{Exercise 1.1.5b}}%
\hyperlabel{ssub:exercise-1.1.5b}
Show that more than a quarter of the symbols are sentence symbols.
\textit{Suggestion}: Apply induction to show that the number of sentence
symbols is of the form $k + 1$.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_1\_5b}
\begin{proof}
Let $\phi$ be a \nameref{ref:well-formed-formula}.
By \nameref{sub:exercise-1.1.3}, the number of sentence symbols of $\phi$ is
$k + 1$, where $k$ is the number of places at which binary connective
symbols occur in $\phi$.
By \nameref{sub:parentheses-count}, the number of parentheses in $\phi$ is
$2k$.
Thus $\phi$ has length $(k + 1) + k + 2k = 4k + 1$.
But $$\frac{k + 1}{4k + 1} > \frac{k + 1}{4k + 4} = \frac{1}{4}.$$
Hence more than a quarter of the symbols of $\phi$ are sentence symbols.
2023-08-09 13:39:41 +00:00
\end{proof}
\subsection{\verified{Exercise 1.2.1}}%
\hyperlabel{sub:exercise-1.2.1}
Show that neither of the following two formulas tautologically implies the
other:
\begin{gather}
(A \Leftrightarrow (B \Leftrightarrow C)),
\hyperlabel{sub:exercise-1.2.1-eq1} \\
((A \land (B \land C)) \lor ((\neg A) \land ((\neg B) \land (\neg C)))).
\hyperlabel{sub:exercise-1.2.1-eq2}
\end{gather}
\textit{Suggestion}: Only two \nameref{ref:truth-assignment}s are needed, not
eight.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_1\_i}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_1\_ii}
\begin{proof}
First, suppose $A = T$, $B = F$, and $C = F$.
Then \eqref{sub:exercise-1.2.1-eq1} evaluates to $T$ but
\eqref{sub:exercise-1.2.1-eq2} evaluates to $F$.
Therefore $\eqref{sub:exercise-1.2.1-eq1} \not\vDash
\eqref{sub:exercise-1.2.1-eq2}$.
Next, suppose $A = F$, $B = F$, and $C = F$.
Then \eqref{sub:exercise-1.2.1-eq2} evaluates to $T$ but
\eqref{sub:exercise-1.2.1-eq1} evaluates to $F$.
Therefore $\eqref{sub:exercise-1.2.1-eq2} \not\vDash
\eqref{sub:exercise-1.2.1-eq1}$.
\end{proof}
\subsection{\verified{Exercise 1.2.2a}}%
\hyperlabel{sub:exercise-1.2.2a}
Is $(((P \Rightarrow Q) \Rightarrow P) \Rightarrow P)$ a tautology?
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_2a}
\begin{proof}
Yes.
To prove, consider the following truth table:
$$\begin{array}{s|c|s|c|s|e|s}
(((P & \Rightarrow & Q) & \Rightarrow & P) & \Rightarrow & P) \\
\hline
T & T & T & T & T & T & T \\
T & F & F & T & T & T & T \\
F & T & T & F & F & T & F \\
F & T & F & F & F & T & F
\end{array}$$
\end{proof}
\subsection{\verified{Exercise 1.2.2b}}%
\hyperlabel{sub:exercise-1.2.2b}
Define $\sigma_k$ recursively as follows: $\sigma_0 = (P \Rightarrow Q)$ and
$\sigma_{k + 1} = (\sigma_k \Rightarrow P)$. For which values of $k$ is
$\sigma_k$ a tautology? (Part (a) corresponds to $k = 2$.)
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_2b\_i}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_2b\_ii}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_2b\_iii}
\begin{proof}
We prove that $\sigma_k$ is a tautology if and only if $k$ is an even
integer greater than zero.
To do so, we show (i) that $\sigma_k$ is a tautology for all even $k > 0$,
(ii) $\sigma_0$ is not a tautology, and (iii) $\sigma_k$ is not a
tautology for all odd $k$.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.2.2b-i}
Let $P(k)$ be the predicate, "$\sigma_k$ is a tautology."
We prove $P(k)$ holds true for all even $k > 0$ via induction.
\subparagraph{Base Case}%
Let $k = 2$.
By definition,
$$\sigma_2 = (((P \Rightarrow Q) \Rightarrow P) \Rightarrow P).$$
\nameref{sub:exercise-1.2.2a} indicates $\sigma_2$ is a tautology.
Hence $P(2)$ is true.
\subparagraph{Inductive Step}%
Suppose $P(k)$ holds for some even $k > 0$.
By definition,
$$\sigma_{k + 2} = ((\sigma_{k} \Rightarrow P) \Rightarrow P).$$
Consider the truth table of the above:
$$\begin{array}{c|c|s|e|s}
((\sigma_k & \Rightarrow & P) & \Rightarrow & P) \\
\hline
T & T & T & T & T \\
T & T & T & T & T \\
T & F & F & T & F \\
T & F & F & T & F
\end{array}$$
This shows $\sigma_{k+2}$ is a tautology.
Hence $P(k + 2)$ is true.
\subparagraph{Subconclusion}%
By induction, $P(k)$ is true for all even $k > 0$.
\paragraph{(ii)}%
By definition, $$\sigma_0 = (P \Rightarrow Q).$$
This is clearly not a tautology since $\sigma_0$ evaluates to $F$ when
$P = T$ and $Q = F$.
\paragraph{(iii)}%
Let $k > 0$ be an odd natural number.
There are two cases to consider:
\subparagraph{Case 1}%
Suppose $k = 1$.
Then $\sigma_k = \sigma_1 = ((P \Rightarrow Q) \Rightarrow P)$.
The following truth table shows $\sigma_1$ is not a tautology:
$$\begin{array}{s|c|s|e|s}
(((P & \Rightarrow & Q) & \Rightarrow & P) \\
\hline
T & T & T & T & T \\
T & F & F & T & T \\
F & T & T & F & F \\
F & T & F & F & F
\end{array}$$
\subparagraph{Case 2}%
Suppose $k > 1$.
Then $k - 1 > 0$ is an even number.
By definition, $$\sigma_k = (\sigma_{k-1} \Rightarrow P).$$
By \eqref{par:exercise-1.2.2b-i}, $\sigma_{k-1}$ is a tautology.
The following truth table shows $\sigma_k$ is not:
$$\begin{array}{c|e|s}
(\sigma_{k-1} & \Rightarrow & P) \\
\hline
T & T & T \\
T & T & T \\
T & F & F \\
T & F & F
\end{array}$$
\end{proof}
\subsection{\verified{Exercise 1.2.3a}}%
\hyperlabel{sub:exercise-1.2.3a}
Determine whether or not $((P \Rightarrow Q) \lor (Q \Rightarrow P))$ is a
tautology.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_3a}
\begin{proof}
Consider the following truth table:
$$\begin{array}{s|c|s|e|s|c|s}
((P & \Rightarrow & Q) & \lor & (Q & \Rightarrow & P)) \\
\hline
T & T & T & T & T & T & T \\
T & F & F & T & F & T & T \\
F & T & T & T & T & F & F \\
F & T & F & T & F & T & F
\end{array}$$
The above makes it immediately evident that
$((P \Rightarrow Q) \lor (Q \Rightarrow P))$ is a tautology.
\end{proof}
\subsection{\verified{Exercise 1.2.3b}}%
\hyperlabel{sub:exercise-1.2.3b}
Determine whether or not $((P \land Q) \Rightarrow R)$ tautologically implies
$((P \Rightarrow R) \lor (Q \Rightarrow R))$.
\code*{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_3b}
\begin{proof}
Consider the following truth table:
$$\begin{array}{s|s|s|e|e}
P & Q & R &
((P \land Q) \Rightarrow R) &
((P \Rightarrow R) \lor (Q \Rightarrow R)) \\
\hline
T & T & T & T & T \\
T & T & F & F & F \\
T & F & T & T & T \\
T & F & F & T & T \\
F & T & T & T & T \\
F & T & F & T & T \\
F & F & T & T & T \\
F & F & F & T & T
\end{array}$$
The above makes it immediately evident that
$((P \land Q) \Rightarrow R)$ tautologically implies
$((P \Rightarrow R) \lor (Q \Rightarrow R))$.
\end{proof}
\subsection{\unverified{Exercise 1.2.4}}%
\hyperlabel{sub:exercise-1.2.4}
Show that the following hold:
\begin{enumerate}[(a)]
\item $\Sigma; \alpha \vDash \beta$ iff
$\Sigma \vDash (\alpha \Rightarrow \beta)$.
\item $\alpha \vDash \Dashv \beta$ iff
$\vDash (\alpha \Leftrightarrow \beta)$.
\end{enumerate}
(Recall that $\Sigma; \alpha = \Sigma \cup \{\alpha\}$, the set $\Sigma$
together with the one possibly new member $\alpha$.)
\begin{proof}
\paragraph{(a)}%
We prove each direction of the biconditional.
\subparagraph{($\Rightarrow$)}%
Assume $\Sigma; \alpha \vDash \beta$.
Let $v$ be a truth assignment for the sentence symbols in
$\Sigma; \alpha$ and $\beta$.
Then if $v$ satisfies every member of $\Sigma$ and $\alpha$, it must
also satisfy $\beta$.
Denote $\bar{v}(\Sigma)$ as the proposition that $v$ satisfies every
member of $\Sigma$ and consider the following truth table:
$$\begin{array}{s|s|s|e}
\bar{v}(\Sigma) & \bar{v}(\alpha) & \bar{v}(\beta) &
\bar{v}((\alpha \Rightarrow \beta)) \\
\hline
T & T & T & T \\
\rowcolor{TTInvalid}
T & T & F & F \\
T & F & T & T \\
T & F & F & T \\
F & T & T & T \\
F & T & F & F \\
F & F & T & T \\
F & F & F & T
\end{array}$$
The red row denotes a contradiction: it is not possible for
$\bar{v}(\Sigma)$ and $\bar{v}(\alpha)$ to be true but
$\bar{v}(\beta)$ to be false.
All remaining rows show that when $\bar{v}(\Sigma)$ is true, so is
$\bar{v}((\alpha \Rightarrow \beta))$.
Thus $\Sigma \vDash (\alpha \Rightarrow \beta)$.
\subparagraph{($\Leftarrow$)}%
Assume $\Sigma \vDash (\alpha \Rightarrow \beta)$.
Let $v$ be a \nameref{ref:truth-assignment} for the sentence symbols in
$\Sigma$, $\alpha$, and $\beta$.
Then if $v$ satisfies every member of $\Sigma$, it must also satisfy
$(\alpha \Rightarrow \beta)$.
Denote $\bar{v}(\Sigma)$ as the proposition that $v$ satisfies every
member of $\Sigma$.
By definition, $\bar{v}((\alpha \Rightarrow \beta)) = T$ if and only if
$\bar{v}(\alpha) = F$ or $\bar{v}(\alpha)$ and $\bar{v}(\beta)$ are
both true.
Thus the only situation in which both $\bar{v}(\Sigma) = T$ and
$\bar{v}(\alpha) = T$ corresponds to when $\bar{v}(\beta) = T$.
Hence $\Sigma; \alpha \vDash \beta$.
\paragraph{(b)}%
We prove each direction of the biconditional.
\subparagraph{($\Rightarrow$)}%
Suppose $\alpha \vDash \Dashv \beta$.
Let $v$ be a \nameref{ref:truth-assignment} for the sentence symbols in
$\alpha$ and $\beta$.
Consider the following truth table:
$$\begin{array}{s|e|s}
(\alpha & \Leftrightarrow & \beta) \\
\hline
T & T & T \\
\rowcolor{TTInvalid}
T & F & F \\
\rowcolor{TTInvalid}
F & F & T \\
F & T & F
\end{array}$$
The red rows indicate possibilites that cannot occur, for
$\alpha \vDash \beta$ and $\beta \vDash \alpha$ by hypothesis.
Of the remaining rows, $(\alpha \Leftrightarrow \beta)$ is true.
Hence $\vDash (\alpha \Leftrightarrow \beta)$.
\subparagraph{($\Leftarrow$)}%
Assume $\vDash (\alpha \Rightarrow \beta)$.
Let $v$ be a \nameref{ref:truth-assignment} for the sentence symbols in
$\alpha$ and $\beta$.
By definition, $\bar{v}((\alpha \Leftrightarrow \beta)) = T$ if and only
if $\bar{v}(\alpha) = \bar{v}(\beta)$.
Thus if $\bar{v}(\alpha)$ is true, so must $\bar{v}(\beta)$.
That is, $\alpha \vDash \beta$.
Likewise, if $\bar{v}(\beta)$ is true, so must $\bar{v}(\alpha)$.
Therefore $\beta \vDash \alpha$.
Hence $\alpha \vDash \Dashv \beta$.
\end{proof}
\subsection{\verified{Exercise 1.2.5}}%
\hyperlabel{sub:exercise-1.2.5}
Prove or refute each of the following assertions:
\begin{enumerate}[(a)]
\item If either $\Sigma \vDash \alpha$ or $\Sigma \vDash \beta$, then
$\Sigma \vDash (\alpha \lor \beta)$.
\item If $\Sigma \vDash (\alpha \lor \beta)$, then either
$\Sigma \vDash \alpha$ or $\Sigma \vDash \beta$.
\end{enumerate}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_5a}
\code{Enderton.Logic.Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_5b}
\begin{proof}
\paragraph{(a)}%
WLOG, suppose $\Sigma \vDash \alpha$.
That is, every truth assignment for sentence symbols found in $\Sigma$ and
$\alpha$ that satisfies every member of $\Sigma$ also satisfies
$\alpha$.
Let $v$ be one of these truth assignments.
Denote $\bar{v}(\Sigma)$ as the proposition that $v$ satisfies every
member of $\Sigma$ and consider the following truth table:
$$\begin{array}{s|s|s|e}
\bar{v}(\Sigma) & \bar{v}(\alpha) & \bar{v}(\beta) &
\bar{v}((\alpha \lor \beta)) \\
\hline
T & T & T & T \\
T & T & F & T \\
\rowcolor{TTInvalid}
T & F & T & T \\
\rowcolor{TTInvalid}
T & F & F & T \\
F & T & T & T \\
F & T & F & T \\
F & F & T & T \\
F & F & F & F
\end{array}$$
The red rows indicate possiblities that cannot occur since
$\Sigma \vDash \alpha$ by hypothesis.
All remaining rows show that when $\bar{v}(\Sigma)$ is true, so is
$\bar{v}((\alpha \lor \beta))$.
Hence $\Sigma \vDash (\alpha \lor \beta)$.
\paragraph{(b)}%
We proceed by counterexample.
Suppose $\Sigma = \emptyset$.
That is, assume $(\alpha \lor \beta)$ is a tautology, i.e.
$\vDash (\alpha \lor \beta)$.
Consider the following truth table:
$$\begin{array}{s|e|s}
(\alpha & \lor & \beta) \\
\hline
T & T & T \\
T & T & F \\
F & T & T \\
\rowcolor{TTInvalid}
F & F & F
\end{array}$$
The red row indicates an impossibility, since $(\alpha \lor \beta)$ should
always be true by hypothesis.
But this table also clearly demonstrates that $\not\vDash \alpha$ and
$\not\vDash \beta$.
Thus the conditional statement proposed must not be generally true.
\end{proof}
\subsection{\unverified{Exercise 1.2.6a}}%
\hyperlabel{sub:exercise-1.2.6a}
Show that if $v_1$ and $v_2$ are \nameref{ref:truth-assignment}s which agree
on all the sentence symbols in the wff $\alpha$, then
$\bar{v}_1(\alpha) = \bar{v}_2(\alpha)$.
Use the \nameref{sub:induction-principle-1}.
\begin{proof}
Let $\sigma$ map a \nameref{ref:well-formed-formula} $\phi$ to the set of
sentence symbols found in $\phi$.
Define
\begin{equation}
\hyperlabel{sub:exercise-1.2.6a-eq1}
S = \{\phi \mid ((\sigma(\phi) = \sigma(\alpha)) \Rightarrow
(\bar{v}_1(\phi) = \bar{v}_2(\phi)))\}.
\end{equation}
We prove that (i) the set of sentence symbols is found in $\phi$ and (ii)
$S$ is closed under the five \nameref{ref:formula-building-operations}.
Afterward we show that (iii) our theorem statement holds.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.2.6a-i}
Let $A_n$ denote an arbitrary sentence symbol.
Suppose $\sigma(A_n) = \{A_n\} = \sigma(\alpha)$.
But then $\bar{v}_1(A_n) = \bar{v}_2(A_n)$ since, by hypothesis, $v_1$
and $v_2$ agree on all the sentence symbols found in $\alpha$.
Hence $S$ contains all the sentence symbols.
\paragraph{(ii)}%
\hyperlabel{par:exercise-1.2.6a-ii}
Let $\beta, \gamma \in S$.
There are three cases to consider:
\subparagraph{Case 1}%
\hyperlabel{spar:exercise-1.2.6a-ii-1}
Suppose $\sigma(\beta) \neq \sigma(\alpha)$.
By definition, $\mathcal{E}_{\neg}(\beta) = (\neg\beta)$.
Then clearly $\sigma(\mathcal{E}_{\neg}(\beta)) \neq \sigma(\alpha)$.
Therefore $\mathcal{E}_{\neg}(\beta) \in S$.
Likewise,
$\mathcal{E}_{\square}(\beta, \gamma) =
(\beta \mathop{\square} \gamma)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Again, it clearly follows that
$\mathcal{E}_{\square}(\beta, \gamma) \neq \sigma(\alpha)$.
Thus $\mathcal{E}_{\square}(\beta, \gamma) \in S$.
\subparagraph{Case 2}%
Suppose $\sigma(\gamma) \neq \sigma(\alpha)$.
This case mirrors \nameref{spar:exercise-1.2.6a-ii-1}.
\subparagraph{Case 3}%
Suppose $\sigma(\beta) = \sigma(\alpha) = \sigma(\alpha)$.
By definition, $\mathcal{E}_{\neg}(\beta) = (\neg\beta)$.
Then clearly $\sigma(\mathcal{E}_{\neg}(\beta)) = \sigma(\alpha)$.
Since
\begin{align*}
\bar{v}_1((\neg\beta))
& = (\neg\bar{v}_1(\beta)) \\
& = (\neg\bar{v}_2(\beta)) & \eqref{sub:exercise-1.2.6a-eq1} \\
& = \bar{v}_2((\neg\beta)),
\end{align*}
it follows that $\mathcal{E}_{\neg}(\beta) \in S$.
Likewise,
$\mathcal{E}_{\square}(\beta, \gamma) =
(\beta \mathop{\square} \gamma)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Again, it clearly follows that
$\mathcal{E}_{\square}(\beta, \gamma) \neq \sigma(\alpha)$.
Since
\begin{align*}
\bar{v}_1((\alpha \mathop{\square} \beta))
& = (\bar{v}_1(\alpha) \mathop{\square} \bar{v}_1(\beta)) \\
& = (\bar{v}_2(\alpha) \mathop{\square} \bar{v}_2(\beta)
& \eqref{sub:exercise-1.2.6a-eq1} \\
& = \bar{v}_2((\alpha \mathop{\square} \beta)),
\end{align*}
it follows that $\mathcal{E}_{\square}(\beta, \gamma) \in S$.
\subparagraph{Subconclusion}%
The above three cases are exhaustive.
Thus it follows $S$ is closed under the five formula-buildiong
operations.
\paragraph{(iii)}%
By \nameref{par:exercise-1.2.6a-i} and \nameref{par:exercise-1.2.6a-ii},
the \nameref{sub:induction-principle-1} indicates $S$ is the set of all
wffs.
Since $\alpha$ is a well-formed formula, it follows $\alpha \in S$.
Therefore
$$((\sigma(\alpha) = \sigma(\alpha)) \Rightarrow
\bar{v}_1(\alpha) = \bar{v}_2(\alpha)).$$
The antecedent clearly holds true.
Hence $\bar{v}_1(\alpha) = \bar{v}_2(\alpha)$ as expected.
\end{proof}
2023-08-21 20:51:49 +00:00
\subsection{\unverified{Exercise 1.2.6b}}%
\hyperlabel{sub:exercise-1.2.6b}
Let $\mathcal{S}$ be a set of sentence symbols that includes those in $\Sigma$
and $\tau$ (and possibly more).
Show that $\Sigma \vDash \tau$ iff every truth assignment for $\mathcal{S}$
which satisfies every member of $\Sigma$ also satisfies $\tau$.
(This is an easy consequence of part (a). The point of part (b) is that we do
not need to worry about getting the domain of a truth assignment
\textit{exactly} perfect, as long as it is big enough. For example, one
option would be always to use truth assignments on the set of \textit{all}
sentence symbols. The drawback is that these are infinite objects, and there
are a great many -- uncountably many -- of them.)
\begin{proof}
2023-08-21 20:51:49 +00:00
Let $\mathcal{S}$ be a set of sentence symbols that includes those in
$\Sigma$ and $\tau$ (and possibly more).
Let $\mathcal{S}' \subseteq \mathcal{S}$ be the set containing precisely
the sentence symbols found in $\Sigma$ and $\tau$.
Let $v$ be a truth assignment for $S'$ that satisfies every member of
$\Sigma$ and $w$ be an arbitrary extension of $v$ for $S$.
By construction, $v$ and $w$ agree on all the sentence symbols found in both
$\Sigma$ and $\tau$.
\nameref{sub:exercise-1.2.6a} then implies $\bar{v}(\tau) = \bar{w}(\tau)$
and $\bar{v}(\sigma) = \bar{w}(\sigma)$ for all $\sigma \in \Sigma$.
Thus $v$ satisfies every member of $\Sigma$ if and only if $w$ satisfies
every member of $\Sigma$.
Likewise, $v$ satisfies $\tau$ if and only if $w$ satisfies $\tau$.
Hence, by definition of \nameref{ref:tautological-implication},
$\Sigma \vDash \tau$ if and only if every truth assignment for the
sentence symbols in $S'$ that satisfies every member of $\Sigma$ also
satisfies $\tau$ if and only if every truth assignment for the sentence
symbols in $S$ that satisfies every member of $\Sigma$ also satisfies
$\tau$.
\end{proof}
2023-08-21 20:51:49 +00:00
\subsection{\unverified{Exercise 1.2.7}}%
\hyperlabel{sub:exercise-1.2.7}
You are in a land inhabited by people who either always tell the truth or
always tell falsehoods.
You come to a fork in the road and you need to know which fork leads to the
capital.
There is a local resident there, but he has time only to reply to one
yes-or-no question.
What one question should you ask so as to learn which fork to take?
\textit{Suggestion}: Make a table.
\begin{proof}
2023-08-21 20:51:49 +00:00
Consider the self-referential question,
"Would you respond 'yes' to the question,
'Should I take the left road to get to the capital?'"
Let $I$ denote whether the inhabitant is truthful, and $L$ denote whether
the left road actually goes to the capital.
We have $R$ denote the answer given by the inhabitant in the following
"truth table":
$$\begin{array}{s|s|e}
I & L & R \\
\hline
T & T & \text{Yes} \\
T & F & \text{No} \\
F & T & \text{Yes} \\
F & F & \text{No}
\end{array}$$
Regardless of the inhabitant's honesty, we receive the answer "Yes" if and
only if the left road actually goes to the capital.
\end{proof}
2023-08-21 20:51:49 +00:00
\subsection{\unverified{Exercise 1.2.8}}%
\hyperlabel{sub:exercise-1.2.8}
2023-08-21 20:51:49 +00:00
(Substitution) Consider a sequence $\alpha_1, \alpha_2, \ldots$ of wffs.
For each wff $\phi$ let $\phi^*$ be the result of replacing the sentence
symbol $A_n$ by $\alpha_n$ for each $n$.
2023-08-21 20:51:49 +00:00
\subsubsection{\unverified{Exercise 1.2.8a}}%
\hyperlabel{ssub:exercise-1.2.8a}
Let $v$ be a truth assignment for the set of all sentence symbols; define $u$
to be the truth assignment for which $u(A_n) = \bar{v}(\alpha_n)$.
Show that $\bar{u}(\phi) = \bar{v}(\phi^*)$.
2023-08-21 20:51:49 +00:00
Use the \nameref{sub:induction-principle-1}.
\begin{proof}
2023-08-21 20:51:49 +00:00
Let $$S = \{\phi \mid
\phi \text{ is a wff such that } \bar{u}(\phi) = \bar{v}(\phi^*)\}.$$
We prove that (i) $S$ contains the set of all sentence symbols and (ii) $S$
is closed under the five \nameref{ref:formula-building-operations}.
Afterward we prove that (iii) our theorem statement holds.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.2.8-i}
Let $\phi = A_n$ be an arbitrary sentence symbol.
By definition, $u(A_n) = \bar{v}(\alpha_n)$.
Then $$\bar{u}(\phi)
= \bar{u}(A_n) \\
= u(A_n) \\
= \bar{v}(\alpha_n) \\
= \bar{v}(\phi^*).$$
Hence every sentence symbol is in $S$.
\paragraph{(ii)}%
\hyperlabel{par:exercise-1.2.8-ii}
Let $\beta, \gamma \in S$.
That is, $\bar{u}(\beta) = \bar{v}(\beta^*)$ and
$\bar{u}(\gamma) = \bar{v}(\gamma^*)$.
By definition, $\mathcal{E}_{\neg}(\beta) = (\neg\beta)$.
Therefore
\begin{align*}
\bar{u}(\mathcal{E}_{\neg}(\beta))
& = (\neg\bar{u}(\beta)) \\
& = (\neg\bar{v}(\beta^*)) \\
& = \bar{v}((\neg\beta^*)) \\
& = \bar{v}((\neg\beta)^*) \\
& = \bar{v}(\mathcal{E}_{\neg}(\beta)^*).
\end{align*}
Likewise,
$\mathcal{E}_{\square}(\beta, \gamma) =
(\beta \mathop{\square} \gamma)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
Therefore
\begin{align*}
\bar{u}(\mathcal{E}_{\square}(\beta, \gamma))
& = \bar{u}(\beta) \mathop{\square} \bar{u}(\gamma) \\
& = \bar{v}(\beta^*) \mathop{\square} \bar{v}(\gamma^*) \\
& = \bar{v}((\beta^* \mathop{\square} \gamma^*)) \\
& = \bar{v}((\beta \mathop{\square} \gamma)^*) \\
& = \bar{v}(\mathcal{E}_{\square}(\beta, \gamma)^*).
\end{align*}
Hence $S$ is closed under the five formula-building operations.
\paragraph{(iii)}%
By \nameref{par:exercise-1.2.8-i} and \nameref{par:exercise-1.2.8-ii},
the \nameref{sub:induction-principle-1} implies $S$ is the set of all
wffs.
Thus for any well-formed formula $\phi$,
$\bar{u}(\phi) = \bar{v}(\phi^*)$.
\end{proof}
2023-08-21 20:51:49 +00:00
\subsubsection{\unverified{Exercise 1.2.8b}}%
\hyperlabel{ssub:exercise-1.2.8b}
Show that if $\phi$ is a tautology, then so is $\phi^*$.
(For example, one of our selected tautologies is
$((A \land B) \Leftrightarrow (B \land A))$. From this we can conclude, by
substitution, that
$((\alpha \land \beta) \Leftrightarrow (\beta \land \alpha))$ is a
tautology, for any wffs $\alpha$ and $\beta$.)
\begin{proof}
2023-08-21 20:51:49 +00:00
Suppose $\phi$ is a tautology and let $S$ be the set of all sentence
symbols.
Let $v$ be a truth assignment for $S$ and define $u$ to be the truth
assignment for which $u(A_n) = \bar{v}(\alpha_n)$.
By \nameref{ssub:exercise-1.2.8a}, $\bar{u}(\phi) = \bar{v}(\phi^*)$.
Since $\phi$ is a tautology, $\bar{u}(\phi)$ is true meaning
$\bar{v}(\phi^*)$ is also true.
Since $v$ is an arbitrary truth assignment, it follows that every truth
assignment for $S$ satisfies $\phi^*$.
By \nameref{sub:exercise-1.2.6b}, $\vDash \phi^*$, i.e. $\phi^*$ is a
tautology.
\end{proof}
2023-08-21 20:51:49 +00:00
\subsection{\unverified{Exercise 1.2.9}}%
\hyperlabel{sub:exercise-1.2.9}
(Duality) Let $\alpha$ be a wff whose only connective symbols are $\land$,
$\lor$, and $\neg$.
Let $\alpha^*$ be the result of interchanging $\land$ and $\lor$ and replacing
each sentence symbol by its negation.
2023-08-21 20:51:49 +00:00
Show that $\alpha^*$ is tautologically equivalent to $(\neg\alpha)$.
Use the \nameref{sub:induction-principle-1}.
2023-08-21 20:51:49 +00:00
\code*{Common/Logic/Basic}
{not\_and\_de\_morgan}
\code{Common/Logic/Basic}
{not\_or\_de\_morgan}
\begin{proof}
2023-08-21 20:51:49 +00:00
Let
\begin{align*}
S = \{ \alpha \mid
& \alpha \text{ is a wff containing a } \Rightarrow \text{ or } \\
& \alpha \text{ is a wff containing a } \Leftrightarrow \text{ or } \\
& \alpha^* \vDash \Dashv (\neg\alpha) \}.
\end{align*}
We prove that (i) $S$ contains the set of all sentence symbols and (ii) $S$
is closed under the five \nameref{ref:formula-building-operations}.
Afterward we prove that (iii) our theorem statement holds.
\paragraph{(i)}%
\hyperlabel{par:exercise-1.2.9-i}
Let $\alpha = A_n$ be an arbitrary sentence symbol.
By definition, $$\alpha^* = A_n^* = (\neg A_n) = (\neg\alpha).$$
Hence every sentence symbol is in $S$.
\paragraph{(ii)}%
\hyperlabel{par:exercise-1.2.9-ii}
Let $\alpha, \beta \in S$.
Suppose $\alpha$ contains a $\Rightarrow$ or $\Leftrightarrow$ symbol.
Then $\mathcal{E}_{\neg}(\alpha)$ also does.
The same holds for $\beta$.
Furthermore, if either $\alpha$ or $\beta$ contains a $\Rightarrow$ or
$\Leftrightarrow$ symbol, then so does
$\mathcal{E}_{\square}(\alpha, \beta)$
where $\square$ is one of the binary connectives $\land$, $\lor$,
$\Rightarrow$, $\Leftrightarrow$.
In any of these above cases, it is trivial to see each of the five-formula
building operations take a wff from $S$ and produce another wff in $S$.
Now, suppose neither $\alpha$ nor $\beta$ contain a $\Rightarrow$ or
$\Leftrightarrow$ symbol.
Then it must be that $\alpha^* \vDash\Dashv (\neg\alpha)$ and
$\beta^* \vDash\Dashv (\neg\beta)$.
Consider first $\mathcal{E}_{\neg}(\alpha) = (\neg\alpha)$.
By definition,
$$\mathcal{E}_{\neg}(\alpha)^*
= (\neg\alpha^*)
\vDash\Dashv (\neg(\neg\alpha))
= (\neg(\mathcal{E}_{\neg}(\alpha))).$$
Therefore $\mathcal{E}_{\neg}(\alpha) \in S$.
Likewise, consider $\mathcal{E}_{\square}(\alpha, \beta)$ where $\square$
is one of the binary connectives $\land$, $\lor$, $\Rightarrow$,
$\Leftrightarrow$.
It trivially follows that $\mathcal{E}_{\Rightarrow}(\alpha, \beta) \in S$
and $\mathcal{E}_{\Leftrightarrow}(\alpha, \beta) \in S$.
We cover the remaining two cases in turn:
\subparagraph{Case 1}%
Suppose $\square = \land$.
Then
\begin{align*}
\mathcal{E}_{\land}(\alpha, \beta)^*
& = (\alpha \land \beta)^* \\
& = (\alpha^* \lor \beta^*) \\
& \vDash\Dashv ((\neg\alpha) \lor (\neg\beta)) \\
& \vDash\Dashv \neg(\alpha \land \beta) \\
& = (\neg(\mathcal{E}_{\land}(\alpha, \beta))),
\end{align*}
where the last tautological equivalence follows from De Morgan's laws.
\subparagraph{Case 2}%
Suppose $\square = \lor$.
Then
\begin{align*}
\mathcal{E}_{\lor}(\alpha, \beta)^*
& = (\alpha \lor \beta)^* \\
& = (\alpha^* \land \beta^*) \\
& \vDash\Dashv ((\neg\alpha) \land (\neg\beta)) \\
& \vDash\Dashv \neg(\alpha \lor \beta) \\
& = (\neg(\mathcal{E}_{\lor}(\alpha, \beta))),
\end{align*}
where the last tautological equivalence follows from De Morgan's laws.
\subparagraph{Subconclusion}%
The foregoing analysis shows that $S$ is indeed closed under the five
formula-building operations.
\paragraph{(iii)}%
By \nameref{par:exercise-1.2.9-i} and \nameref{par:exercise-1.2.9-ii},
the \nameref{sub:induction-principle-1} implies $S$ is the set of all
wffs.
Thus for any well-formed formula $\alpha$ whose only connective symbols
are $\land$, $\lor$, and $\neg$, $\alpha^* \vDash\Dashv (\neg\alpha)$.
\end{proof}
\subsection{\unverified{Exercise 1.2.10}}%
\hyperlabel{sub:exercise-1.2.10}
Say that a set $\Sigma_1$ of wffs is \textit{equivalent} to a set $\Sigma_2$
of wffs iff for any wff $\alpha$, we have $\Sigma_1 \vDash \alpha$ iff
$\Sigma_2 \vDash \alpha$.
A set $\Sigma$ is \textit{independent} iff no member of $\Sigma$ is
tautologically implied by the remaining members in $\Sigma$.
Show that the following hold.
\subsubsection{\unverified{Exercise 1.2.10a}}%
\hyperlabel{ssub:exercise-1.2.10a}
A finite set of wffs has an independent equivalent subset.
\begin{proof}
For natural number $n$, let $P(n)$ be the statement:
\begin{induction}
\hyperlabel{sub:exercise-1.2.10a-ih}
A set of wffs \nameref{S:ref:equinumerous} to $n$ has an independent
equivalent subset.
\end{induction}
\noindent
We proceed by induction on $n$.
\paragraph{Base Case}%
Consider a finite set of wffs equinumerous to $0$.
This is simply the empty set.
It is vacuously true that $\emptyset$ is independent.
Thus $\emptyset \subseteq \emptyset$ is an independent equivalent subset
meaning $P(0)$ is true.
\paragraph{Inductive Step}%
Suppose $P(n)$ holds true for some $n \geq 0$.
That is, every finite set of wffs equinumerous to $n$ has an independent
equivalent subset.
Consider now set $\Sigma$ of wffs equinumerous to $n + 1$.
There are two possibilities to consider:
\subparagraph{Case 1}%
Suppose $\Sigma$ is independent.
Then $\Sigma \subseteq \Sigma$ is an independent equivalent subset.
\subparagraph{Case 2}%
Suppose $\Sigma$ is not independent.
Then there exists some $\sigma \in \Sigma$ such that $\sigma$ is
tautologically implied by the remaining members of $\Sigma$.
Let $\Sigma_1 = \Sigma - \{\sigma\}$.
By \ihref{sub:exercise-1.2.10a-ih}, $\Sigma_1$ has an independent
equivalent subset $\Sigma_2$.
Now let $\phi$ be an arbitrary wff.
Then
\begin{align*}
\Sigma_2 \vDash \phi
& \Rightarrow \Sigma_1 \vDash \phi
& \text{def'n of equivalent} \\
& \Rightarrow \Sigma_1; \sigma \vDash \phi
& \sigma \text{ is redundant} \\
& \Rightarrow \Sigma \vDash \phi.
\end{align*}
Likewise,
\begin{align*}
\Sigma \vDash \phi
& \Rightarrow \Sigma_1; \sigma \vDash \phi \\
& \Rightarrow \Sigma_1 \vDash \phi
& \sigma \text{ is redundant} \\
& \Rightarrow \Sigma_2 \vDash \phi.
& \text{def'n of equivalent}
\end{align*}
Thus $\Sigma_2$ is an independent equivalent subset of $\Sigma$.
\subparagraph{Subconclusion}%
The above two cases are exhaustive.
Hence $P(n + 1)$ holds true.
\paragraph{Conclusion}%
By induction, it follows $P(n)$ holds true for all $n \geq 0$.
That is, every set of wffs equinumerous to a natural number has an
independent equivalent subset.
In other words, every finite set of wffs has an independent equivalent
subset.
\end{proof}
\subsubsection{\unverified{Exercise 1.2.10b}}%
\hyperlabel{ssub:exercise-1.2.10b}
An infinite set need not have an independent equivalent subset.
\begin{proof}
Let $$S = \{A_1 \land \cdots \land A_n \mid n \in \omega\}$$ be an infinite
set of wffs.
For the sake of contradiction, suppose $S$ has an independent equivalent
subset $S'$.
There are two cases to consider:
\paragraph{Case 1}%
Suppose $S' = \emptyset$.
Then it trivally follows $S'$ is not equivalent to $S$, a contradiction.
\paragraph{Case 1}%
Suppose $S' \neq \emptyset$.
By the \nameref{S:sub:well-ordering-natural-numbers}, there exists a least
$n \in \mathbb{N}$ such that $\phi = A_1 \land \cdots \land A_n$ is in
$S'$.
It cannot be that another element of $S'$ exists since such an element
would tautologically imply $\phi$, contradicting independence.
Thus $S' = \{\phi\}$.
But $\{\phi\}$ cannot be equivalent to $S$ since it has no information
about sentence symbol e.g. $A_{n+1}$, another contradiction.
\paragraph{Conclusion}%
The above two cases are exhaustive and both yield contradictions.
It must be that $S$ does not have an independent equivalent subset.
\end{proof}
\subsection{\unverified{Exercise 1.2.11}}%
\hyperlabel{sub:exercise-1.2.11}
Show that a truth assignment $v$ satisfies the wff
$$(\cdots (A_1 \Leftrightarrow A_2)
\Leftrightarrow \cdots \Leftrightarrow A_n)$$
iff $v(A_i) = F$ for an even number of $i$'s, $1 \leq i \leq n$.
(By the associative law for $\Leftrightarrow$, the placement of the
parentheses is not crucial.)
\begin{proof}
Define $\sigma_n$ recursively as follows:
$\sigma_0 = (A_1 \Leftrightarrow A_2)$ and
$\sigma_{n+1} = (\sigma_n \Leftrightarrow A_{n+3})$.
For natural number $n$, let $P(n)$ be the statement:
\begin{induction}
\hyperlabel{sub:exercise-1.2.11-ih}
Truth assignment $v$ satisfies $\sigma_n$ if and only if $v(A_i) = F$
for an even number of $i$'s, $1 \leq i \leq n + 2$.
\end{induction}
\noindent
We proceed by induction on $n$.
\paragraph{Base Case}%
Let $n = 0$.
Then $\sigma_n = \sigma_0 = (A_1 \Leftrightarrow A_2)$.
We proceed by truth table:
$$\begin{array}{s|e|s}
(A_1 & \Leftrightarrow & A_2) \\
\hline
T & T & T \\
T & F & F \\
F & F & T \\
F & T & F
\end{array}$$
Here we see $A_1 \Leftrightarrow A_2$ is true if and only if both $A_1$
and $A_2$ are true or neither $A_1$ nor $A_2$ are true.
Thus $P(0)$ holds true.
\paragraph{Inductive Step}%
Suppose $P(n)$ holds true for some $n \geq 0$.
Consider now $$\sigma_{n+1} = (\sigma_k \Leftrightarrow A_{n+3}).$$
Let $v$ be a truth assignment for $A_1, \ldots, A_{n+3}$.
There are two cases to consider:
\subparagraph{Case 1}%
Suppose $v(A_i) = F$ for an even number of $i$'s, $1 \leq i \leq n + 2$.
By \ihref{sub:exercise-1.2.11-ih}, $v$ satisfies $\sigma_n$.
We now have the following truth table:
$$\begin{array}{s|e|s}
(\sigma_n & \Leftrightarrow & A_{n+3}) \\
\hline
T & T & T \\
T & F & F \\
\end{array}$$
In this case, it follows $v$ satisfies $\sigma_{n+1}$ if and only if
$v(A_i) = F$ for an even number of $i$'s, $1 \leq i \leq n + 3$.
\subparagraph{Case 2}%
Suppose $v(A_i) = F$ for an odd number of $i$'s, $1 \leq i \leq n + 2$.
By \ihref{sub:exercise-1.2.11-ih}, $v$ does not satisfy $\sigma_n$.
We now have the following truth table:
$$\begin{array}{s|e|s}
(\sigma_n & \Leftrightarrow & A_{n+3}) \\
\hline
F & F & T \\
F & T & F \\
\end{array}$$
In this case, it follows $v$ satisfies $\sigma_{n+1}$ if and only if
$v(A_i) = F$ for an even number of $i$'s, $1 \leq i \leq n + 3$.
\subparagraph{Subconclusion}%
The above two cases are exhaustive.
Hence $P(n + 1)$ holds true.
\paragraph{Conclusion}%
By induction, it follows $P(n)$ holds true for all $n \geq 0$.
That is, truth assignment $v$ satisfies
$$(\cdots (A_1 \Leftrightarrow A_2)
\Leftrightarrow \cdots \Leftrightarrow A_n)$$
if and only if $v(A_i) = F$ for an even number of $i$'s,
$1 \leq i \leq n$.
\end{proof}
\subsection{\unverified{Exercise 1.2.12}}%
\hyperlabel{sub:exercise-1.2.12}
There are three suspects for a murder: Adams, Brown, and Clark.
Adams says "I didn't do it. The victim was an old acquaintance of Brown's.
But Clark hated him."
Brown states "I didn't do it. I didn't even know the guy. Besides I was out of
town all that week."
Clark says "I didn't do it. I saw both Adams and Brown downtown with the
victim that day; one of them must have done it."
Assume that the two innocent men are telling the truth, but that the guilty
man might not be.
Who did it?
\begin{proof}
It must be that Brown is the guilty one.
Adam claims the victim was an old acquaintance of Brown's.
Clark claims Brown was downtown with the victim that day.
Brown's testimony conflicts with both of these statements.
\end{proof}
\subsection{\unverified{Exercise 1.2.13}}%
\hyperlabel{sub:exercise-1.2.13}
An advertisement for a tennis magazine states, "If I'm not playing tennis,
I'm watching tennis. And if I'm not watching tennis, I'm reading about
tennis."
We can assume that the speaker cannot do more than one of these activities at
a time.
What is the speaker doing?
(Translate the given sentences into our formal language; consider the possible
truth assignments.)
\begin{proof}
Let $P$ denote playing tennis, $W$ denote watching tennis, and $R$ denote
reading about tennis.
These statements can be translated as:
\begin{enumerate}[(a)]
\item $\neg P \Rightarrow W$.
\item $\neg W \Rightarrow R$.
\end{enumerate}
Thus either the speaker is playing tennis, or, if not, he is watching
tennis.
Since we assume the speaker cannot do more than one of these activities at
a time, reading is never a possibility.
\end{proof}
\subsection{\sorry{Exercise 1.2.14}}%
\hyperlabel{sub:exercise-1.2.14}
Let $\mathcal{S}$ be the set of all sentence symbols, and assume that
$v \colon \mathcal{S} \rightarrow \{F, T\}$ is a truth assignment.
Show there is \textit{at most} one extension $\bar{v}$ meeting conditions 0-5
listed at the beginning of this section.
(Suppose that $\bar{v}_1$ and $\bar{v}_2$ are both such extensions. Use the
\nameref{sub:induction-principle-1} to show that $\bar{v}_1 = \bar{v}_2$.
\begin{proof}
The conditions 0-5 can be found at \nameref{ref:truth-assignment}.
TODO
\end{proof}
\subsection{\verified{Exercise 1.2.15}}%
\hyperlabel{sub:exercise-1.2.15}
Of the following three formulas, which tautologically implies which?
\begin{enumerate}[(a)]
\item $(A \Leftrightarrow B)$
\item $(\neg((A \Rightarrow B) \Rightarrow (\neg(B \Rightarrow A))))$
\item $(((\neg A) \lor B) \land (A \lor (\neg B)))$
\end{enumerate}
\code{Bookshelf/Enderton/Logic/Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_15\_i}
\code{Bookshelf/Enderton/Logic/Chapter\_1}
{Enderton.Logic.Chapter\_1.exercise\_1\_2\_15\_ii}
\begin{proof}
All three are tautologically equivalent.
We prove that (i) (a) is tautologically equivalent to (b) and (ii) (a) is
tautologically equivalent to (c). It then immediately follows that (b) is
tautologically equivalent to (c).
\paragraph{(i)}%
By \nameref{sub:exercise-1.2.4}, $(a) \vDash\Dashv (b)$ if and only if
$\vDash ((a) \Leftrightarrow (b))$.
We now construct the corresponding truth table:
$$\begin{array}{s|c|s|e|c|s|c|s|c|c|s|c|s}
(A & \Leftrightarrow & B) & \Leftrightarrow &
(\neg & ((A & \Rightarrow & B) &
\Rightarrow & (\neg & (B & \Rightarrow & A)))) \\
\hline
T & T & T & T & T & T & T & T & F & F & T & T & T \\
T & F & F & T & F & T & F & F & T & F & F & T & T \\
F & F & T & T & F & F & T & T & T & T & T & F & F \\
F & T & F & T & T & F & T & F & F & F & F & T & F
\end{array}$$
Therefore (a) and (b) are tautologically equivalent.
\paragraph{(ii)}%
By \nameref{sub:exercise-1.2.4}, $(a) \vDash\Dashv (c)$ if and only if
$\vDash ((a) \Leftrightarrow (c))$.
We now construct the corresponding truth table:
$$\begin{array}{s|c|s|e|c|s|c|s|c|s|c|c|s}
(A & \Leftrightarrow & B) & \Leftrightarrow &
(((\neg & A) & \lor & B) & \land & (A & \lor & (\neg & B))) \\
\hline
T & T & T & T & F & T & T & T & T & T & T & F & T \\
T & F & F & T & F & T & F & F & F & T & T & T & F \\
F & F & T & T & T & F & T & T & F & F & F & F & T \\
F & T & F & T & T & F & T & F & T & F & T & T & F
\end{array}$$
\end{proof}
\subsection{\sorry{Exercise 1.3.1}}%
\hyperlabel{sub:exercise-1.3.1}
Rewrite the tautologies in the "selected list" at the end of Section 1.2, but
using the conventions of the present section to minimize the number of
parentheses.
\begin{answer}
TODO
\end{answer}
\subsection{\sorry{Exercise 1.3.2}}%
\hyperlabel{sub:exercise-1.3.2}
Give an example of wffs $\alpha$ and $\beta$ and expressions $\gamma$ and
$\delta$ such that $(\alpha \land \beta) = (\gamma \land \delta)$ but
$\alpha \neq \gamma$.
\begin{answer}
TODO
\end{answer}
\subsection{\sorry{Exercise 1.3.3}}%
\hyperlabel{sub:exercise-1.3.3}
Carry out the argument for \nameref{sub:lemma-13b} for the case of the
operation $\mathcal{E}_{\neg}$.
\begin{answer}
TODO
\end{answer}
\subsection{\sorry{Exercise 1.3.4}}%
\hyperlabel{sub:exercise-1.3.4}
Suppose that we modify our definition of wff by omitting all \textit{right}
parentheses.
Thus instead of
$$((A \land (\neg B)) \Rightarrow (C \lor D))$$
we use $$((A \land (\neg B \Rightarrow (C \land D.$$
Show that we still have unique readability (i.e., each wff still has only one
possible decomposition).
\textit{Suggestion}: These expressions have the same number of parentheses as
connective symbols.
\begin{answer}
TODO
\end{answer}
\subsection{\sorry{Exercise 1.3.5}}%
\hyperlabel{sub:exercise-1.3.5}
The English language has a tendency to use two-part connectives: "both
$\ldots$ and $\ldots$" "either $\ldots$ or $\ldots$" "if $\ldots$, then
$\ldots$."
How does this affect unique readability in English?
\begin{answer}
TODO
\end{answer}
\subsection{\sorry{Exercise 1.3.6}}%
\hyperlabel{sub:exercise-1.3.6}
We have given an algorithm for analyzing a wff by constructing its tree from
the top down.
There are also ways of constructing the tree from the bottom up.
This can be done by looking through the formula for innermost pairs of
parentheses.
Give a complete description of an algorithm of this sort.
\begin{answer}
TODO
\end{answer}
\subsection{\sorry{Exercise 1.3.7}}%
\hyperlabel{sub:exercise-1.3.7}
Suppose that left and right parentheses are indistinguishable.
Thus, instead of $\alpha \lor (\beta \land \gamma))$ we have
$|\alpha \lor |\beta \land \gamma||$.
Do formulas still have unique decomposition?
\begin{answer}
TODO
\end{answer}
\end{document}