Compare commits
23 Commits
finite-set
...
main
Author | SHA1 | Date |
---|---|---|
Joshua Potter | bf9888c050 | |
Joshua Potter | 9a36a65c0e | |
Joshua Potter | fbe1e685d3 | |
Joshua Potter | 48bd97ef3f | |
Joshua Potter | aeb3cafa5d | |
Joshua Potter | f76457cd6f | |
Joshua Potter | 9864ffd7a0 | |
Joshua Potter | 889281ae98 | |
Joshua Potter | 4c9f07634f | |
Joshua Potter | 4f8c3383f1 | |
Joshua Potter | cdba12f161 | |
Joshua Potter | b596478a36 | |
Joshua Potter | f215a3180a | |
Joshua Potter | b97b8fbbca | |
Joshua Potter | 857d0ea83e | |
Joshua Potter | 6ffa7f94fd | |
Joshua Potter | 4f371ac9b8 | |
Joshua Potter | 05639fd07e | |
Joshua Potter | 9dca45a997 | |
Joshua Potter | 7907803093 | |
Joshua Potter | c985f9f8a5 | |
Joshua Potter | edef7e9b58 | |
Joshua Potter | e29795c55e |
|
@ -1,8 +1,10 @@
|
|||
# Lean
|
||||
build
|
||||
lakefile.olean
|
||||
lake-packages
|
||||
_target
|
||||
leanpkg.path
|
||||
.lake/
|
||||
|
||||
# TeX
|
||||
*.aux
|
||||
|
@ -20,3 +22,9 @@ leanpkg.path
|
|||
*.synctex.gz
|
||||
*.toc
|
||||
.*.lb
|
||||
|
||||
# direnv
|
||||
.direnv/
|
||||
|
||||
# nix
|
||||
result
|
||||
|
|
|
@ -1,2 +1,16 @@
|
|||
import Bookshelf.Apostol.Chapter_I_03
|
||||
import Bookshelf.Apostol.Chapter_1_11
|
||||
|
||||
/-! # Calculus, Vol. 1: One-Variable Calculus, with an Introduction to Linear Algebra
|
||||
|
||||
## Apostol, Tom M.
|
||||
|
||||
### LaTeX
|
||||
|
||||
Full set of [proofs and exercises](Bookshelf/Apostol.pdf).
|
||||
|
||||
### Lean
|
||||
|
||||
* [Chapter I.03: A Set of Axioms for the Real-Number System](Bookshelf/Apostol/Chapter_I_03.html)
|
||||
* [Chapter 1.11: Exercises](Bookshelf/Apostol/Chapter_1_11.html)
|
||||
-/
|
|
@ -8,21 +8,21 @@ namespace Apostol.Chapter_1_11
|
|||
|
||||
open BigOperators
|
||||
|
||||
/-- #### Exercise 4a
|
||||
/-- ### Exercise 4a
|
||||
|
||||
`⌊x + n⌋ = ⌊x⌋ + n` for every integer `n`.
|
||||
-/
|
||||
theorem exercise_4a (x : ℝ) (n : ℤ) : ⌊x + n⌋ = ⌊x⌋ + n :=
|
||||
Int.floor_add_int x n
|
||||
|
||||
/-- #### Exercise 4b.1
|
||||
/-- ### Exercise 4b.1
|
||||
|
||||
`⌊-x⌋ = -⌊x⌋` if `x` is an integer.
|
||||
-/
|
||||
theorem exercise_4b_1 (x : ℤ) : ⌊-x⌋ = -⌊x⌋ := by
|
||||
simp only [Int.floor_int, id_eq]
|
||||
|
||||
/-- #### Exercise 4b.2
|
||||
/-- ### Exercise 4b.2
|
||||
|
||||
`⌊-x⌋ = -⌊x⌋ - 1` otherwise.
|
||||
-/
|
||||
|
@ -42,7 +42,7 @@ theorem exercise_4b_2 (x : ℝ) (h : ∃ n : ℤ, x ∈ Set.Ioo ↑n (↑n + (1
|
|||
· exact (Set.mem_Ioo.mp hn).left
|
||||
· exact le_of_lt (Set.mem_Ico.mp hn').right
|
||||
|
||||
/-- #### Exercise 4c
|
||||
/-- ### Exercise 4c
|
||||
|
||||
`⌊x + y⌋ = ⌊x⌋ + ⌊y⌋` or `⌊x⌋ + ⌊y⌋ + 1`.
|
||||
-/
|
||||
|
@ -50,7 +50,7 @@ theorem exercise_4c (x y : ℝ)
|
|||
: ⌊x + y⌋ = ⌊x⌋ + ⌊y⌋ ∨ ⌊x + y⌋ = ⌊x⌋ + ⌊y⌋ + 1 := by
|
||||
have hx : x = Int.floor x + Int.fract x := Eq.symm (add_eq_of_eq_sub' rfl)
|
||||
have hy : y = Int.floor y + Int.fract y := Eq.symm (add_eq_of_eq_sub' rfl)
|
||||
by_cases Int.fract x + Int.fract y < 1
|
||||
by_cases h : Int.fract x + Int.fract y < 1
|
||||
· refine Or.inl ?_
|
||||
rw [Int.floor_eq_iff]
|
||||
simp only [Int.cast_add]
|
||||
|
@ -72,7 +72,7 @@ theorem exercise_4c (x y : ℝ)
|
|||
rw [← sub_lt_iff_lt_add', ← sub_sub, add_sub_cancel, add_sub_cancel]
|
||||
exact add_lt_add (Int.fract_lt_one x) (Int.fract_lt_one y)
|
||||
|
||||
/-- #### Exercise 5
|
||||
/-- ### Exercise 5
|
||||
|
||||
The formulas in Exercises 4(d) and 4(e) suggest a generalization for `⌊nx⌋`.
|
||||
State and prove such a generalization.
|
||||
|
@ -81,7 +81,7 @@ theorem exercise_5 (n : ℕ) (x : ℝ)
|
|||
: ⌊n * x⌋ = Finset.sum (Finset.range n) (fun i => ⌊x + i/n⌋) :=
|
||||
Real.Floor.floor_mul_eq_sum_range_floor_add_index_div n x
|
||||
|
||||
/-- #### Exercise 4d
|
||||
/-- ### Exercise 4d
|
||||
|
||||
`⌊2x⌋ = ⌊x⌋ + ⌊x + 1/2⌋`
|
||||
-/
|
||||
|
@ -94,7 +94,7 @@ theorem exercise_4d (x : ℝ)
|
|||
simp
|
||||
rw [add_comm]
|
||||
|
||||
/-- #### Exercise 4e
|
||||
/-- ### Exercise 4e
|
||||
|
||||
`⌊3x⌋ = ⌊x⌋ + ⌊x + 1/3⌋ + ⌊x + 2/3⌋`
|
||||
-/
|
||||
|
@ -108,7 +108,7 @@ theorem exercise_4e (x : ℝ)
|
|||
conv => rhs; rw [← add_rotate']; arg 2; rw [add_comm]
|
||||
rw [← add_assoc]
|
||||
|
||||
/-- #### Exercise 7b
|
||||
/-- ### Exercise 7b
|
||||
|
||||
If `a` and `b` are positive integers with no common factor, we have the formula
|
||||
`∑_{n=1}^{b-1} ⌊na / b⌋ = ((a - 1)(b - 1)) / 2`. When `b = 1`, the sum on the
|
||||
|
@ -118,14 +118,14 @@ Derive the result analytically as follows: By changing the index of summation,
|
|||
note that `Σ_{n=1}^{b-1} ⌊na / b⌋ = Σ_{n=1}^{b-1} ⌊a(b - n) / b⌋`. Now apply
|
||||
Exercises 4(a) and (b) to the bracket on the right.
|
||||
-/
|
||||
theorem exercise_7b (ha : a > 0) (hb : b > 0) (hp : Nat.coprime a b)
|
||||
theorem exercise_7b (ha : a > 0) (hb : b > 0) (hp : Nat.Coprime a b)
|
||||
: ∑ n in (Finset.range b).filter (· > 0), ⌊n * ((a : ℕ) : ℝ) / b⌋ =
|
||||
((a - 1) * (b - 1)) / 2 := by
|
||||
sorry
|
||||
|
||||
section
|
||||
|
||||
/-- #### Exercise 8
|
||||
/-- ### Exercise 8
|
||||
|
||||
Let `S` be a set of points on the real line. The *characteristic function* of
|
||||
`S` is, by definition, the function `Χ` such that `Χₛ(x) = 1` for every `x` in
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import Common.Set
|
||||
import Mathlib.Data.Real.Basic
|
||||
import Mathlib.Data.Real.Archimedean
|
||||
|
||||
/-! # Apostol.Chapter_I_03
|
||||
|
||||
|
@ -100,7 +100,7 @@ theorem is_lub_neg_set_iff_is_glb_set_neg (S : Set ℝ)
|
|||
_ = IsGreatest (lowerBounds S) (-x) := by rw [is_least_neg_set_eq_is_greatest_set_neq]
|
||||
_ = IsGLB S (-x) := rfl
|
||||
|
||||
/-- #### Theorem I.27
|
||||
/-- ### Theorem I.27
|
||||
|
||||
Every nonempty set `S` that is bounded below has a greatest lower bound; that
|
||||
is, there is a real number `L` such that `L = inf S`.
|
||||
|
@ -147,7 +147,7 @@ lemma leq_nat_abs_ceil_self (x : ℝ) : x ≤ Int.natAbs ⌈x⌉ := by
|
|||
|
||||
/-! ## The Archimedean property of the real-number system -/
|
||||
|
||||
/-- #### Theorem I.29
|
||||
/-- ### Theorem I.29
|
||||
|
||||
For every real `x` there exists a positive integer `n` such that `n > x`.
|
||||
-/
|
||||
|
@ -159,7 +159,7 @@ theorem exists_pnat_geq_self (x : ℝ) : ∃ n : ℕ+, ↑n > x := by
|
|||
_ = n := rfl
|
||||
exact ⟨n, this⟩
|
||||
|
||||
/-- #### Theorem I.30
|
||||
/-- ### Theorem I.30
|
||||
|
||||
If `x > 0` and if `y` is an arbitrary real number, there exists a positive
|
||||
integer `n` such that `nx > y`.
|
||||
|
@ -174,7 +174,7 @@ theorem exists_pnat_mul_self_geq_of_pos {x y : ℝ}
|
|||
rw [div_mul, div_self (show x ≠ 0 from LT.lt.ne' hx), div_one] at p'
|
||||
exact ⟨n, p'⟩
|
||||
|
||||
/-- #### Theorem I.31
|
||||
/-- ### Theorem I.31
|
||||
|
||||
If three real numbers `a`, `x`, and `y` satisfy the inequalities
|
||||
`a ≤ x ≤ a + y / n` for every integer `n ≥ 1`, then `x = a`.
|
||||
|
@ -270,7 +270,7 @@ lemma mem_imp_ge_lub {x : ℝ} (h : IsLUB S s) : x ∈ upperBounds S → x ≥ s
|
|||
intro hx
|
||||
exact h.right hx
|
||||
|
||||
/-- #### Theorem I.32a
|
||||
/-- ### Theorem I.32a
|
||||
|
||||
Let `h` be a given positive number and let `S` be a set of real numbers. If `S`
|
||||
has a supremum, then for some `x` in `S` we have `x > sup S - h`.
|
||||
|
@ -321,7 +321,7 @@ lemma mem_imp_le_glb {x : ℝ} (h : IsGLB S s) : x ∈ lowerBounds S → x ≤ s
|
|||
intro hx
|
||||
exact h.right hx
|
||||
|
||||
/-- #### Theorem I.32b
|
||||
/-- ### Theorem I.32b
|
||||
|
||||
Let `h` be a given positive number and let `S` be a set of real numbers. If `S`
|
||||
has an infimum, then for some `x` in `S` we have `x < inf S + h`.
|
||||
|
@ -343,7 +343,7 @@ theorem inf_imp_exists_lt_inf_add_delta {S : Set ℝ} {s h : ℝ} (hp : h > 0)
|
|||
exact le_of_not_gt (not_and.mp (nb x) hx)
|
||||
rwa [← mem_lower_bounds_iff_forall_ge] at nb'
|
||||
|
||||
/-- #### Theorem I.33a (Additive Property)
|
||||
/-- ### Theorem I.33a (Additive Property)
|
||||
|
||||
Given nonempty subsets `A` and `B` of `ℝ`, let `C` denote the set
|
||||
`C = {a + b : a ∈ A, b ∈ B}`. If each of `A` and `B` has a supremum, then `C`
|
||||
|
@ -393,7 +393,7 @@ theorem sup_minkowski_sum_eq_sup_add_sup (A B : Set ℝ) (a b : ℝ)
|
|||
_ ≤ a' + b' + 1 / n := le_of_lt hab'
|
||||
_ ≤ c + 1 / n := add_le_add_right hc' (1 / n)
|
||||
|
||||
/-- #### Theorem I.33b (Additive Property)
|
||||
/-- ### Theorem I.33b (Additive Property)
|
||||
|
||||
Given nonempty subsets `A` and `B` of `ℝ`, let `C` denote the set
|
||||
`C = {a + b : a ∈ A, b ∈ B}`. If each of `A` and `B` has an infimum, then `C`
|
||||
|
@ -443,7 +443,7 @@ theorem inf_minkowski_sum_eq_inf_add_inf (A B : Set ℝ)
|
|||
_ ≤ a + b := le_of_lt hab'
|
||||
· exact hc.right hlb
|
||||
|
||||
/-- #### Theorem I.34
|
||||
/-- ### Theorem I.34
|
||||
|
||||
Given two nonempty subsets `S` and `T` of `ℝ` such that `s ≤ t` for every `s` in
|
||||
`S` and every `t` in `T`. Then `S` has a supremum, and `T` has an infimum, and
|
||||
|
@ -489,7 +489,7 @@ theorem forall_mem_le_forall_mem_imp_sup_le_inf (S T : Set ℝ)
|
|||
_ < x := hx.right
|
||||
simp at this
|
||||
|
||||
/-- #### Exercise 1
|
||||
/-- ### Exercise 1
|
||||
|
||||
If `x` and `y` are arbitrary real numbers with `x < y`, prove that there is at
|
||||
least one real `z` satisfying `x < z < y`.
|
||||
|
@ -506,7 +506,7 @@ theorem exercise_1 (x y : ℝ) (h : x < y) : ∃ z, x < z ∧ z < y := by
|
|||
_ < x + z := (add_lt_add_iff_left x).mpr hz'
|
||||
_ = y := hz.right
|
||||
|
||||
/-- #### Exercise 2
|
||||
/-- ### Exercise 2
|
||||
|
||||
If `x` is an arbitrary real number, prove that there are integers `m` and `n`
|
||||
such that `m < x < n`.
|
||||
|
@ -514,7 +514,7 @@ such that `m < x < n`.
|
|||
theorem exercise_2 (x : ℝ) : ∃ m n : ℝ, m < x ∧ x < n := by
|
||||
refine ⟨x - 1, ⟨x + 1, ⟨?_, ?_⟩⟩⟩ <;> norm_num
|
||||
|
||||
/-- #### Exercise 3
|
||||
/-- ### Exercise 3
|
||||
|
||||
If `x > 0`, prove that there is a positive integer `n` such that `1 / n < x`.
|
||||
-/
|
||||
|
@ -525,7 +525,7 @@ theorem exercise_3 (x : ℝ) (h : x > 0) : ∃ n : ℕ+, 1 / n < x := by
|
|||
conv at hr => arg 2; rw [mul_comm, ← mul_assoc]; simp
|
||||
rwa [one_mul] at hr
|
||||
|
||||
/-- #### Exercise 4
|
||||
/-- ### Exercise 4
|
||||
|
||||
If `x` is an arbitrary real number, prove that there is exactly one integer `n`
|
||||
which satisfies the inequalities `n ≤ x < n + 1`. This `n` is called the
|
||||
|
@ -540,7 +540,7 @@ theorem exercise_4 (x : ℝ) : ∃! n : ℤ, n ≤ x ∧ x < n + 1 := by
|
|||
rw [← Int.floor_eq_iff] at hy
|
||||
exact Eq.symm hy
|
||||
|
||||
/-- #### Exercise 5
|
||||
/-- ### Exercise 5
|
||||
|
||||
If `x` is an arbitrary real number, prove that there is exactly one integer `n`
|
||||
which satisfies `x ≤ n < x + 1`.
|
||||
|
@ -559,7 +559,7 @@ theorem exercise_5 (x : ℝ) : ∃! n : ℤ, x ≤ n ∧ n < x + 1 := by
|
|||
rwa [add_sub_cancel] at this
|
||||
· exact hy.left
|
||||
|
||||
/-! #### Exercise 6
|
||||
/-! ### Exercise 6
|
||||
|
||||
If `x` and `y` are arbitrary real numbers, `x < y`, prove that there exists at
|
||||
least one rational number `r` satisfying `x < r < y`, and hence infinitely many.
|
||||
|
@ -569,7 +569,7 @@ in the real-number system.
|
|||
###### TODO
|
||||
-/
|
||||
|
||||
/-! #### Exercise 7
|
||||
/-! ### Exercise 7
|
||||
|
||||
If `x` is rational, `x ≠ 0`, and `y` irrational, prove that `x + y`, `x - y`,
|
||||
`xy`, `x / y`, and `y / x` are all irrational.
|
||||
|
@ -577,14 +577,14 @@ If `x` is rational, `x ≠ 0`, and `y` irrational, prove that `x + y`, `x - y`,
|
|||
###### TODO
|
||||
-/
|
||||
|
||||
/-! #### Exercise 8
|
||||
/-! ### Exercise 8
|
||||
|
||||
Is the sum or product of two irrational numbers always irrational?
|
||||
|
||||
###### TODO
|
||||
-/
|
||||
|
||||
/-! #### Exercise 9
|
||||
/-! ### Exercise 9
|
||||
|
||||
If `x` and `y` are arbitrary real numbers, `x < y`, prove that there exists at
|
||||
least one irrational number `z` satisfying `x < z < y`, and hence infinitely
|
||||
|
@ -593,7 +593,7 @@ many.
|
|||
###### TODO
|
||||
-/
|
||||
|
||||
/-! #### Exercise 10
|
||||
/-! ### Exercise 10
|
||||
|
||||
An integer `n` is called *even* if `n = 2m` for some integer `m`, and *odd* if
|
||||
`n + 1` is even. Prove the following statements:
|
||||
|
@ -618,7 +618,7 @@ def isEven (n : ℤ) := ∃ m : ℤ, n = 2 * m
|
|||
|
||||
def isOdd (n : ℤ) := isEven (n + 1)
|
||||
|
||||
/-! #### Exercise 11
|
||||
/-! ### Exercise 11
|
||||
|
||||
Prove that there is no rational number whose square is `2`.
|
||||
|
||||
|
@ -629,7 +629,7 @@ contradiction.]
|
|||
###### TODO
|
||||
-/
|
||||
|
||||
/-! #### Exercise 12
|
||||
/-! ### Exercise 12
|
||||
|
||||
The Archimedean property of the real-number system was deduced as a consequence
|
||||
of the least-upper-bound axiom. Prove that the set of rational numbers satisfies
|
||||
|
|
|
@ -4,3 +4,17 @@ import Bookshelf.Avigad.Chapter_4
|
|||
import Bookshelf.Avigad.Chapter_5
|
||||
import Bookshelf.Avigad.Chapter_7
|
||||
import Bookshelf.Avigad.Chapter_8
|
||||
|
||||
/-! # Theorem Proving in Lean
|
||||
|
||||
## Avigad, Jeremy.
|
||||
|
||||
### Lean
|
||||
|
||||
* [Chapter 2: Dependent Type Theory](Bookshelf/Avigad/Chapter_2.html)
|
||||
* [Chapter 3: Propositions and Proofs](Bookshelf/Avigad/Chapter_3.html)
|
||||
* [Chapter 4: Quantifiers and Equality](Bookshelf/Avigad/Chapter_4.html)
|
||||
* [Chapter 5: Tactics](Bookshelf/Avigad/Chapter_5.html)
|
||||
* [Chapter 7: Inductive Types](Bookshelf/Avigad/Chapter_7.html)
|
||||
* [Chapter 8: Induction and Recursion](Bookshelf/Avigad/Chapter_8.html)
|
||||
-/
|
|
@ -3,7 +3,7 @@
|
|||
Dependent Type Theory
|
||||
-/
|
||||
|
||||
/-! #### Exercise 1
|
||||
/-! ### Exercise 1
|
||||
|
||||
Define the function `Do_Twice`, as described in Section 2.4.
|
||||
-/
|
||||
|
@ -20,7 +20,7 @@ def doTwiceTwice (f : (Nat → Nat) → (Nat → Nat)) (x : Nat → Nat) := f (f
|
|||
|
||||
end ex1
|
||||
|
||||
/-! #### Exercise 2
|
||||
/-! ### Exercise 2
|
||||
|
||||
Define the functions `curry` and `uncurry`, as described in Section 2.4.
|
||||
-/
|
||||
|
@ -35,7 +35,7 @@ def uncurry (f : α → β → γ) : (α × β → γ) :=
|
|||
|
||||
end ex2
|
||||
|
||||
/-! #### Exercise 3
|
||||
/-! ### Exercise 3
|
||||
|
||||
Above, we used the example `vec α n` for vectors of elements of type `α` of
|
||||
length `n`. Declare a constant `vec_add` that could represent a function that
|
||||
|
@ -70,7 +70,7 @@ variable (c d : vec Prop 2)
|
|||
|
||||
end ex3
|
||||
|
||||
/-! #### Exercise 4
|
||||
/-! ### Exercise 4
|
||||
|
||||
Similarly, declare a constant `matrix` so that `matrix α m n` could represent
|
||||
the type of `m` by `n` matrices. Declare some constants to represent functions
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Propositions and Proofs
|
||||
-/
|
||||
|
||||
/-! #### Exercise 1
|
||||
/-! ### Exercise 1
|
||||
|
||||
Prove the following identities.
|
||||
-/
|
||||
|
@ -104,7 +104,7 @@ theorem imp_imp_not_imp_not : (p → q) → (¬q → ¬p) :=
|
|||
|
||||
end ex1
|
||||
|
||||
/-! #### Exercise 2
|
||||
/-! ### Exercise 2
|
||||
|
||||
Prove the following identities. These require classical reasoning.
|
||||
-/
|
||||
|
@ -150,7 +150,7 @@ theorem imp_imp_imp : (((p → q) → p) → p) :=
|
|||
|
||||
end ex2
|
||||
|
||||
/-! #### Exercise 3
|
||||
/-! ### Exercise 3
|
||||
|
||||
Prove `¬(p ↔ ¬p)` without using classical logic.
|
||||
-/
|
||||
|
|
|
@ -40,7 +40,7 @@ theorem forall_or_distrib
|
|||
|
||||
end ex1
|
||||
|
||||
/-! #### Exercise 2
|
||||
/-! ### Exercise 2
|
||||
|
||||
It is often possible to bring a component of a formula outside a universal
|
||||
quantifier, when it does not depend on the quantified variable. Try proving
|
||||
|
@ -78,7 +78,7 @@ theorem forall_swap : (∀ x, r → p x) ↔ (r → ∀ x, p x) :=
|
|||
|
||||
end ex2
|
||||
|
||||
/-! #### Exercise 3
|
||||
/-! ### Exercise 3
|
||||
|
||||
Consider the "barber paradox," that is, the claim that in a certain town there
|
||||
is a (male) barber that shaves all and only the men who do not shave themselves.
|
||||
|
@ -101,7 +101,7 @@ theorem barber_paradox (h : ∀ x : men, shaves barber x ↔ ¬shaves x x) : Fal
|
|||
|
||||
end ex3
|
||||
|
||||
/-! #### Exercise 4
|
||||
/-! ### Exercise 4
|
||||
|
||||
Remember that, without any parameters, an expression of type `Prop` is just an
|
||||
assertion. Fill in the definitions of `prime` and `Fermat_prime` below, and
|
||||
|
@ -143,7 +143,7 @@ def Fermat'sLastTheorem : Prop :=
|
|||
|
||||
end ex4
|
||||
|
||||
/-! #### Exercise 5
|
||||
/-! ### Exercise 5
|
||||
|
||||
Prove as many of the identities listed in Section 4.4 as you can.
|
||||
-/
|
||||
|
@ -228,7 +228,7 @@ theorem exists_self_iff_self_exists (a : α) : (∃ x, r → p x) ↔ (r → ∃
|
|||
|
||||
end ex5
|
||||
|
||||
/-! #### Exercise 6
|
||||
/-! ### Exercise 6
|
||||
|
||||
Give a calculational proof of the theorem `log_mul` below.
|
||||
-/
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace Avigad.Chapter5
|
|||
|
||||
namespace ex1
|
||||
|
||||
/-! ##### Exercises 3.1 -/
|
||||
/-! #### Exercises 3.1 -/
|
||||
|
||||
section ex3_1
|
||||
|
||||
|
@ -154,7 +154,7 @@ theorem imp_imp_not_imp_not : (p → q) → (¬q → ¬p) := by
|
|||
|
||||
end ex3_1
|
||||
|
||||
/-! ##### Exercises 3.2 -/
|
||||
/-! #### Exercises 3.2 -/
|
||||
|
||||
section ex3_2
|
||||
|
||||
|
@ -223,7 +223,7 @@ theorem imp_imp_imp : (((p → q) → p) → p) := by
|
|||
|
||||
end ex3_2
|
||||
|
||||
/-! ##### Exercises 3.3 -/
|
||||
/-! #### Exercises 3.3 -/
|
||||
|
||||
section ex3_3
|
||||
|
||||
|
@ -235,7 +235,7 @@ theorem iff_not_self (hp : p) : ¬(p ↔ ¬p) := by
|
|||
|
||||
end ex3_3
|
||||
|
||||
/-! ##### Exercises 4.1 -/
|
||||
/-! #### Exercises 4.1 -/
|
||||
|
||||
section ex4_1
|
||||
|
||||
|
@ -264,7 +264,7 @@ theorem forall_or_distrib : (∀ x, p x) ∨ (∀ x, q x) → ∀ x, p x ∨ q x
|
|||
|
||||
end ex4_1
|
||||
|
||||
/-! ##### Exercises 4.2 -/
|
||||
/-! #### Exercises 4.2 -/
|
||||
|
||||
section ex4_2
|
||||
|
||||
|
@ -316,7 +316,7 @@ theorem forall_swap : (∀ x, r → p x) ↔ (r → ∀ x, p x) := by
|
|||
|
||||
end ex4_2
|
||||
|
||||
/-! ##### Exercises 4.3 -/
|
||||
/-! #### Exercises 4.3 -/
|
||||
|
||||
section ex4_3
|
||||
|
||||
|
@ -336,7 +336,7 @@ theorem barber_paradox (h : ∀ x : men, shaves barber x ↔ ¬ shaves x x)
|
|||
|
||||
end ex4_3
|
||||
|
||||
/-! ##### Exercises 4.5 -/
|
||||
/-! #### Exercises 4.5 -/
|
||||
|
||||
section ex4_5
|
||||
|
||||
|
@ -448,7 +448,7 @@ end ex4_5
|
|||
|
||||
end ex1
|
||||
|
||||
/-! #### Exercise 2
|
||||
/-! ### Exercise 2
|
||||
|
||||
Use tactic combinators to obtain a one line proof of the following:
|
||||
-/
|
||||
|
|
|
@ -5,7 +5,7 @@ Inductive Types
|
|||
|
||||
namespace Avigad.Chapter7
|
||||
|
||||
/-! #### Exercise 1
|
||||
/-! ### Exercise 1
|
||||
|
||||
Try defining other operations on the natural numbers, such as multiplication,
|
||||
the predecessor function (with `pred 0 = 0`), truncated subtraction (with
|
||||
|
@ -77,7 +77,7 @@ end Nat
|
|||
|
||||
end ex1
|
||||
|
||||
/-! #### Exercise 2
|
||||
/-! ### Exercise 2
|
||||
|
||||
Define some operations on lists, like a `length` function or the `reverse`
|
||||
function. Prove some properties, such as the following:
|
||||
|
@ -178,7 +178,7 @@ theorem reverse_reverse (t : List α)
|
|||
|
||||
end ex2
|
||||
|
||||
/-! #### Exercise 3
|
||||
/-! ### Exercise 3
|
||||
|
||||
Define an inductive data type consisting of terms built up from the following
|
||||
constructors:
|
||||
|
|
|
@ -5,7 +5,7 @@ Induction and Recursion
|
|||
|
||||
namespace Avigad.Chapter8
|
||||
|
||||
/-! #### Exercise 1
|
||||
/-! ### Exercise 1
|
||||
|
||||
Open a namespace `Hidden` to avoid naming conflicts, and use the equation
|
||||
compiler to define addition, multiplication, and exponentiation on the natural
|
||||
|
@ -29,7 +29,7 @@ def exp : Nat → Nat → Nat
|
|||
|
||||
end ex1
|
||||
|
||||
/-! #### Exercise 2
|
||||
/-! ### Exercise 2
|
||||
|
||||
Similarly, use the equation compiler to define some basic operations on lists
|
||||
(like the reverse function) and prove theorems about lists by induction (such as
|
||||
|
@ -48,7 +48,7 @@ def reverse : List α → List α
|
|||
|
||||
end ex2
|
||||
|
||||
/-! #### Exercise 3
|
||||
/-! ### Exercise 3
|
||||
|
||||
Define your own function to carry out course-of-value recursion on the natural
|
||||
numbers. Similarly, see if you can figure out how to define `WellFounded.fix` on
|
||||
|
@ -86,7 +86,7 @@ noncomputable def brecOn {motive : Nat → Sort u}
|
|||
|
||||
end ex3
|
||||
|
||||
/-! #### Exercise 4
|
||||
/-! ### Exercise 4
|
||||
|
||||
Following the examples in Section Dependent Pattern Matching, define a function
|
||||
that will append two vectors. This is tricky; you will have to define an
|
||||
|
@ -113,7 +113,7 @@ end Vector
|
|||
|
||||
end ex4
|
||||
|
||||
/-! #### Exercise 5
|
||||
/-! ### Exercise 5
|
||||
|
||||
Consider the following type of arithmetic expressions.
|
||||
-/
|
||||
|
@ -149,7 +149,7 @@ def sampleVal : Nat → Nat
|
|||
-- Try it out. You should get 47 here.
|
||||
#eval eval sampleVal sampleExpr
|
||||
|
||||
/-! ##### Constant Fusion
|
||||
/-! ### Constant Fusion
|
||||
|
||||
Implement "constant fusion," a procedure that simplifies subterms like `5 + 7
|
||||
to `12`. Using the auxiliary function `simpConst`, define a function "fuse": to
|
||||
|
|
|
@ -1 +1,14 @@
|
|||
import Bookshelf.Enderton.Logic.Chapter_1
|
||||
|
||||
/-! # A Mathematical Introduction to Logic
|
||||
|
||||
## Enderton, Herbert B.
|
||||
|
||||
### LaTeX
|
||||
|
||||
Full set of [proofs and exercises](Bookshelf/Enderton/Logic.pdf).
|
||||
|
||||
### Lean
|
||||
|
||||
* [Chapter 1: Sentential Logic](Bookshelf/Enderton/Logic/Chapter_1.html)
|
||||
-/
|
|
@ -131,7 +131,7 @@ lemma no_neg_sentential_count_eq_binary_count {φ : Wff} (h : ¬φ.hasNotSymbol)
|
|||
unfold sententialSymbolCount binarySymbolCount
|
||||
rw [ih₁ h.left, ih₂ h.right]
|
||||
|
||||
/-- #### Parentheses Count
|
||||
/-- ### Parentheses Count
|
||||
|
||||
Let `φ` be a well-formed formula and `c` be the number of places at which a
|
||||
sentential connective symbol exists. Then there is `2c` parentheses in `φ`.
|
||||
|
@ -180,7 +180,7 @@ theorem length_eq_sum_symbol_count (φ : Wff)
|
|||
|
||||
end Wff
|
||||
|
||||
/-! #### Exercise 1.1.2
|
||||
/-! ### Exercise 1.1.2
|
||||
|
||||
Show that there are no wffs of length `2`, `3`, or `6`, but that any other
|
||||
positive length is possible.
|
||||
|
@ -296,7 +296,7 @@ theorem exercise_1_1_2_ii (n : ℕ) (hn : n ≠ 2 ∧ n ≠ 3 ∧ n ≠ 6)
|
|||
|
||||
end Exercise_1_1_2
|
||||
|
||||
/-- #### Exercise 1.1.3
|
||||
/-- ### Exercise 1.1.3
|
||||
|
||||
Let `α` be a wff; let `c` be the number of places at which binary connective
|
||||
symbols (`∧`, `∨`, `→`, `↔`) occur in `α`; let `s` be the number of places at
|
||||
|
@ -320,7 +320,7 @@ theorem exercise_1_1_3 (φ : Wff)
|
|||
rw [ih₁, ih₂]
|
||||
ring
|
||||
|
||||
/-- #### Exercise 1.1.5 (a)
|
||||
/-- ### Exercise 1.1.5 (a)
|
||||
|
||||
Suppose that `α` is a wff not containing the negation symbol `¬`. Show that the
|
||||
length of `α` (i.e., the number of symbols in the string) is odd.
|
||||
|
@ -355,7 +355,7 @@ theorem exercise_1_1_5a (α : Wff) (hα : ¬α.hasNotSymbol)
|
|||
rw [hk₁, hk₂]
|
||||
ring
|
||||
|
||||
/-- #### Exercise 1.1.5 (b)
|
||||
/-- ### Exercise 1.1.5 (b)
|
||||
|
||||
Suppose that `α` is a wff not containing the negation symbol `¬`. Show that more
|
||||
than a quarter of the symbols are sentence symbols.
|
||||
|
@ -390,7 +390,7 @@ theorem exercise_1_1_5b (α : Wff) (hα : ¬α.hasNotSymbol)
|
|||
]
|
||||
exact inv_lt_one (by norm_num)
|
||||
|
||||
/-! #### Exercise 1.2.1
|
||||
/-! ### Exercise 1.2.1
|
||||
|
||||
Show that neither of the following two formulas tautologically implies the
|
||||
other:
|
||||
|
@ -430,7 +430,7 @@ theorem exercise_1_2_2a (P Q : Prop)
|
|||
: (((P → Q) → P) → P) := by
|
||||
tauto
|
||||
|
||||
/-! #### Exercise 1.2.2 (b)
|
||||
/-! ### Exercise 1.2.2 (b)
|
||||
|
||||
Define `σₖ` recursively as follows: `σ₀ = (P → Q)` and `σₖ₊₁ = (σₖ → P)`. For
|
||||
which values of `k` is `σₖ` a tautology? (Part (a) corresponds to `k = 2`.)
|
||||
|
@ -475,18 +475,18 @@ theorem exercise_1_2_2b_iii {k : ℕ} (h : Odd k)
|
|||
have ⟨r, hr⟩ := h
|
||||
refine ⟨r, hr, ?_⟩
|
||||
by_contra nr
|
||||
have : r = 0 := Nat.eq_zero_of_nonpos r nr
|
||||
have : r = 0 := Nat.eq_zero_of_not_pos nr
|
||||
rw [this] at hr
|
||||
simp only [mul_zero, zero_add] at hr
|
||||
exact absurd hr hk
|
||||
unfold σ
|
||||
rw [hn₁]
|
||||
simp only [Nat.add_eq, add_zero, not_forall, exists_prop, and_true]
|
||||
simp only [Nat.add_eq, add_zero, imp_false, not_not]
|
||||
exact exercise_1_2_2b_i False Q hn₂
|
||||
|
||||
end Exercise_1_2_2
|
||||
|
||||
/-- #### Exercise 1.2.3 (a)
|
||||
/-- ### Exercise 1.2.3 (a)
|
||||
|
||||
Determine whether or not `((P → Q)) ∨ (Q → P)` is a tautology.
|
||||
-/
|
||||
|
@ -494,7 +494,7 @@ theorem exercise_1_2_3a (P Q : Prop)
|
|||
: ((P → Q) ∨ (Q → P)) := by
|
||||
tauto
|
||||
|
||||
/-- #### Exercise 1.2.3 (b)
|
||||
/-- ### Exercise 1.2.3 (b)
|
||||
|
||||
Determine whether or not `((P ∧ Q) → R))` tautologically implies
|
||||
`((P → R) ∨ (Q → R))`.
|
||||
|
@ -503,7 +503,7 @@ theorem exercise_1_2_3b (P Q R : Prop)
|
|||
: ((P ∧ Q) → R) ↔ ((P → R) ∨ (Q → R)) := by
|
||||
tauto
|
||||
|
||||
/-! #### Exercise 1.2.5
|
||||
/-! ### Exercise 1.2.5
|
||||
|
||||
Prove or refute each of the following assertions:
|
||||
|
||||
|
@ -519,7 +519,7 @@ theorem exercise_1_2_6b
|
|||
: (False ∨ True) ∧ ¬ False := by
|
||||
simp
|
||||
|
||||
/-! #### Exercise 1.2.15
|
||||
/-! ### Exercise 1.2.15
|
||||
|
||||
Of the following three formulas, which tautologically implies which?
|
||||
(a) `(A ↔ B)`
|
||||
|
|
|
@ -5,3 +5,20 @@ import Bookshelf.Enderton.Set.Chapter_4
|
|||
import Bookshelf.Enderton.Set.Chapter_6
|
||||
import Bookshelf.Enderton.Set.OrderedPair
|
||||
import Bookshelf.Enderton.Set.Relation
|
||||
|
||||
/-! # Elements of Set Theory
|
||||
|
||||
## Enderton, Herbert B.
|
||||
|
||||
### LaTeX
|
||||
|
||||
Full set of [proofs and exercises](Bookshelf/Enderton/Set.pdf).
|
||||
|
||||
### Lean
|
||||
|
||||
* [Chapter 1: Introduction](Bookshelf/Enderton/Set/Chapter_1.html)
|
||||
* [Chapter 2: Axioms and Operations](Bookshelf/Enderton/Set/Chapter_2.html)
|
||||
* [Chapter 3: Relations and Functions](Bookshelf/Enderton/Set/Chapter_3.html)
|
||||
* [Chapter 4: Natural Numbers](Bookshelf/Enderton/Set/Chapter_4.html)
|
||||
* [Chapter 6: Cardinal Numbers and the Axiom of Choice](Bookshelf/Enderton/Set/Chapter_6.html)
|
||||
-/
|
|
@ -64,6 +64,47 @@
|
|||
A \textbf{binary operation} on a set $A$ is a \nameref{ref:function} from
|
||||
$A \times A$ into $A$.
|
||||
|
||||
\section{\defined{Cardinal Arithmetic}}%
|
||||
\hyperlabel{sec:cardinal-arithmetic}
|
||||
|
||||
Let $\kappa$ and $\lambda$ be any cardinal numbers.
|
||||
\begin{enumerate}[(a)]
|
||||
\item $\kappa + \lambda = \card{(K \cup L)}$, where $K$ and $L$ are any
|
||||
disjoint sets of cardinality $\kappa$ and $\lambda$, respectively.
|
||||
\item $\kappa \cdot \lambda = \card{(K \times L)}$, where $K$ and $L$ are
|
||||
any sets of cardinality $\kappa$ and $\lambda$, respectively.
|
||||
\item $\kappa^\lambda = \card{(^L{K})}$, where $K$ and $L$ are any sets of
|
||||
cardinality $\kappa$ and $\lambda$, respectively.
|
||||
\end{enumerate}
|
||||
|
||||
\lean{Mathlib/SetTheory/Cardinal/Basic}
|
||||
{Cardinal.add\_def}
|
||||
|
||||
\lean{Mathlib/SetTheory/Cardinal/Basic}
|
||||
{Cardinal.mul\_def}
|
||||
|
||||
\lean{Mathlib/SetTheory/Cardinal/Basic}
|
||||
{Cardinal.power\_def}
|
||||
|
||||
\section{\defined{Cardinal Number}}%
|
||||
\hyperlabel{ref:cardinal-number}
|
||||
|
||||
For any set $C$, the \textbf{cardinal number} of set $C$ is denoted as
|
||||
$\card{C}$.
|
||||
Furthermore,
|
||||
\begin{enumerate}[(a)]
|
||||
\item For any sets $A$ and $B$,
|
||||
$$\card{A} = \card{B} \quad\text{iff}\quad A \equin B.$$
|
||||
\item For a finite set $A$, $\card{A}$ is the \nameref{ref:natural-number}
|
||||
$n$ for which $A \equin n$.
|
||||
\end{enumerate}
|
||||
|
||||
\lean{Mathlib/Data/Finset/Card}
|
||||
{Finset.card}
|
||||
|
||||
\lean{Mathlib/SetTheory/Cardinal/Basic}
|
||||
{Cardinal}
|
||||
|
||||
\section{\defined{Cartesian Product}}%
|
||||
\hyperlabel{ref:cartesian-product}
|
||||
|
||||
|
@ -77,19 +118,6 @@
|
|||
|
||||
\lean{Mathlib/Data/Set/Prod}{Set.prod}
|
||||
|
||||
\section{\defined{Cardinal Arithmetic}}%
|
||||
\hyperlabel{sec:cardinal-arithmetic}
|
||||
|
||||
Let $\kappa$ and $\lambda$ be any cardinal numbers.
|
||||
\begin{enumerate}[(a)]
|
||||
\item $\kappa + \lambda = \card{(K \cup L)}$, where $K$ and $L$ are any
|
||||
disjoint sets of cardinality $\kappa$ and $\lambda$, respectively.
|
||||
\item $\kappa \cdot \lambda = \card{(K \times L)}$, where $K$ and $L$ are
|
||||
any sets of cardinality $\kappa$ and $\lambda$, respectively.
|
||||
\item $\kappa^\lambda = \card{^L{K}}$, where $K$ and $L$ are any sets of
|
||||
cardinality $\kappa$ and $\lambda$, respectively.
|
||||
\end{enumerate}
|
||||
|
||||
\section{\defined{Compatible}}%
|
||||
\hyperlabel{ref:compatible}
|
||||
|
||||
|
@ -142,9 +170,8 @@
|
|||
\section{\defined{Equinumerous}}%
|
||||
\hyperlabel{ref:equinumerous}
|
||||
|
||||
A set $A$ is \textbf{equinumerous} to a set $B$ (written
|
||||
$\equinumerous{A}{B}$) if and only if there is a one-to-one
|
||||
\nameref{ref:function} from $A$ onto $B$.
|
||||
A set $A$ is \textbf{equinumerous} to a set $B$ (written $A \equin B$) if and
|
||||
only if there is a one-to-one \nameref{ref:function} from $A$ onto $B$.
|
||||
In other words, there exists a one-to-one correspondence between $A$ and $B$.
|
||||
|
||||
\lean*{Mathlib/Init/Function}
|
||||
|
@ -394,6 +421,17 @@
|
|||
\lean{Init/Prelude}
|
||||
{Mul.mul}
|
||||
|
||||
\section{\defined{Natural Map}}%
|
||||
\hyperlabel{ref:natural-map}
|
||||
|
||||
Let $R$ be an \nameref{ref:equivalence-relation} on $A$.
|
||||
Then the \textbf{natural map} (or \textbf{canonical map})
|
||||
$\phi \colon A \rightarrow A / R$ is defined as $$\phi(x) = [x]_R$$ for
|
||||
$x \in A$.
|
||||
|
||||
\lean*{Init/Core}
|
||||
{Quotient.lift}
|
||||
|
||||
\section{\defined{Natural Number}}%
|
||||
\hyperlabel{ref:natural-number}
|
||||
|
||||
|
@ -1876,7 +1914,7 @@
|
|||
We proceed by contradiction.
|
||||
Suppose there existed a set $A$ consisting of every singleton.
|
||||
Then the \nameref{ref:union-axiom} suggests $\bigcup A$ is a set.
|
||||
But this set is precisely the class of all sets, which is \textit{not} a
|
||||
But this "set" is precisely the class of all sets, which is \textit{not} a
|
||||
set.
|
||||
Thus our original assumption was incorrect.
|
||||
That is, there is no set to which every singleton belongs.
|
||||
|
@ -3045,6 +3083,9 @@
|
|||
\code{Bookshelf/Enderton/Set/Relation}
|
||||
{Set.Relation.one\_to\_one\_comp\_is\_one\_to\_one}
|
||||
|
||||
\lean{Mathlib/Data/Set/Function}
|
||||
{Set.InjOn.comp}
|
||||
|
||||
\begin{proof}
|
||||
Let $F \colon B \rightarrow C$ and $G \colon A \rightarrow B$ be
|
||||
one-to-one \nameref{ref:function}s from sets $A$, $B$, and $C$.
|
||||
|
@ -3089,35 +3130,23 @@
|
|||
\end{align*}
|
||||
\end{proof}
|
||||
|
||||
\subsection{\verified{Theorem 3J}}%
|
||||
\hyperlabel{sub:theorem-3j}
|
||||
\subsection{\verified{Theorem 3J (a)}}%
|
||||
\hyperlabel{sub:theorem-3j-a}
|
||||
|
||||
\begin{theorem}[3J]
|
||||
\begin{theorem}[3J(a)]
|
||||
Assume that $F \colon A \rightarrow B$, and that $A$ is nonempty.
|
||||
\begin{enumerate}[(a)]
|
||||
\item There exists a function $G \colon B \rightarrow A$
|
||||
(a "left inverse") such that $G \circ F$ is the identity function $I_A$
|
||||
on $A$ iff $F$ is one-to-one.
|
||||
\item There exists a function $H \colon B \rightarrow A$
|
||||
(a "right inverse") such that $F \circ H$ is the identity function $I_B$
|
||||
on $B$ iff $F$ maps $A$ \textit{onto} $B$.
|
||||
\end{enumerate}
|
||||
There exists a function $G \colon B \rightarrow A$ (a "left inverse") such
|
||||
that $G \circ F$ is the identity function $I_A$ on $A$ iff $F$ is
|
||||
one-to-one.
|
||||
\end{theorem}
|
||||
|
||||
\code{Bookshelf/Enderton/Set/Chapter\_3}
|
||||
{Enderton.Set.Chapter\_3.theorem\_3j\_a}
|
||||
|
||||
\code{Bookshelf/Enderton/Set/Chapter\_3}
|
||||
{Enderton.Set.Chapter\_3.theorem\_3j\_b}
|
||||
|
||||
\begin{proof}
|
||||
|
||||
Let $F$ be a \nameref{ref:function} from nonempty set $A$ to set $B$.
|
||||
|
||||
\paragraph{(a)}%
|
||||
|
||||
We prove there exists a function $G \colon B \rightarrow A$ such that
|
||||
$G \circ F = I_A$ if and only if $F$ is one-to-one.
|
||||
|
||||
\subparagraph{($\Rightarrow$)}%
|
||||
|
||||
Let $G \colon B \rightarrow A$ such that $G \circ F = I_A$.
|
||||
|
@ -3145,10 +3174,24 @@
|
|||
Thus $(G \circ F)(x) = G(F(x)) = F^{-1}(F(x)) = x$ by
|
||||
\nameref{sub:theorem-3g}.
|
||||
|
||||
\paragraph{(b)}%
|
||||
\end{proof}
|
||||
|
||||
We prove there exists a function $H \colon B \rightarrow A$ such that
|
||||
$F \circ H = I_A$ if and only if $F$ maps $A$ onto $B$.
|
||||
\subsection{\unverified{Theorem 3J (b)}}%
|
||||
\hyperlabel{sub:theorem-3j-b}
|
||||
|
||||
\begin{theorem}[3J(b)]
|
||||
Assume that $F \colon A \rightarrow B$, and that $A$ is nonempty.
|
||||
There exists a function $H \colon B \rightarrow A$ (a "right inverse") such
|
||||
that $F \circ H$ is the identity function $I_B$ on $B$ iff $F$ maps $A$
|
||||
\textit{onto} $B$.
|
||||
\end{theorem}
|
||||
|
||||
\code{Bookshelf/Enderton/Set/Chapter\_3}
|
||||
{Enderton.Set.Chapter\_3.theorem\_3j\_b}
|
||||
|
||||
\begin{proof}
|
||||
|
||||
Let $F$ be a \nameref{ref:function} from nonempty set $A$ to set $B$.
|
||||
|
||||
\subparagraph{($\Rightarrow$)}%
|
||||
|
||||
|
@ -3188,9 +3231,9 @@
|
|||
\begin{proof}
|
||||
By definition, a one-to-one correspondence $f$ between sets $A$ and $B$ must
|
||||
be both one-to-one and onto.
|
||||
By \nameref{sub:theorem-3j}, $f$ is one-to-one if and only if it has a left
|
||||
inverse.
|
||||
The same theorem states that $f$ is onto $B$ if and only if it has a right
|
||||
By \nameref{sub:theorem-3j-a}, $f$ is one-to-one if and only if it has a
|
||||
left inverse.
|
||||
By \nameref{sub:theorem-3j-b}, $f$ is onto $B$ if and only if it has a right
|
||||
inverse.
|
||||
\end{proof}
|
||||
|
||||
|
@ -8769,10 +8812,9 @@
|
|||
\begin{theorem}[6A]
|
||||
For any sets $A$, $B$, and $C$,
|
||||
\begin{enumerate}[(a)]
|
||||
\item $\equinumerous{A}{A}$.
|
||||
\item If $\equinumerous{A}{B}$, then $\equinumerous{B}{A}$.
|
||||
\item If $\equinumerous{A}{B}$ and $\equinumerous{B}{C}$, then
|
||||
$\equinumerous{A}{C}$.
|
||||
\item $A \equin A$.
|
||||
\item If $A \equin B$, then $B \equin A$.
|
||||
\item If $A \equin B$ and $B \equin C$, then $A \equin C$.
|
||||
\end{enumerate}
|
||||
\end{theorem}
|
||||
|
||||
|
@ -8798,18 +8840,18 @@
|
|||
|
||||
\paragraph{(b)}%
|
||||
|
||||
Suppose $\equinumerous{A}{B}$.
|
||||
Suppose $A \equin B$.
|
||||
Then there exists a one-to-one correspondence $F$ between $A$ and $B$.
|
||||
Consider now \nameref{ref:inverse} $$F^{-1} = \{\tuple{u, v} \mid vFu\}.$$
|
||||
By \nameref{sub:one-to-one-inverse}, $F^{-1}$ is a one-to-one function.
|
||||
For all $y \in A$, $\tuple{y, F(y)} \in F$.
|
||||
Then $\tuple{F(y), y} \in F^{-1}$ meaning $F^{-1}$ is onto $A$.
|
||||
Hence $F^{-1}$ is a one-to-one correspondence between $B$ and $A$, i.e.
|
||||
$\equinumerous{B}{A}$.
|
||||
$B \equin A$.
|
||||
|
||||
\paragraph{(c)}%
|
||||
|
||||
Suppose $\equinumerous{A}{B}$ and $\equinumerous{B}{C}$.
|
||||
Suppose $A \equin B$ and $B \equin C$.
|
||||
Then there exists a one-to-one correspondence $G$ between $A$ and $B$ and
|
||||
a one-to-one correspondence $F$ between $B$ and $C$.
|
||||
By \nameref{sub:one-to-one-composition}, $F \circ G$ is a one-to-one
|
||||
|
@ -8823,7 +8865,7 @@
|
|||
Then $F(G(x)) = y$.
|
||||
Thus $\ran{(F \circ G)} = C$ meaning $F \circ G$ is onto $C$.
|
||||
Hence $F \circ G$ is a one-to-one correspondence function between $A$ and
|
||||
$C$, i.e. $\equinumerous{A}{C}$.
|
||||
$C$, i.e. $A \equin C$.
|
||||
|
||||
\end{proof}
|
||||
|
||||
|
@ -8957,13 +8999,13 @@
|
|||
|
||||
\begin{proof}
|
||||
Let $S$ be a \nameref{ref:finite-set} and $S'$ be a
|
||||
\nameref{ref:proper-subset} $S'$ of $S$.
|
||||
\nameref{ref:proper-subset} of $S$.
|
||||
Then there exists some set $T$, disjoint from $S'$, such that
|
||||
$S' \cup T = S$.
|
||||
By definition of a \nameref{ref:finite-set}, $S$ is
|
||||
\nameref{ref:equinumerous} to a natural number $n$.
|
||||
By \nameref{sub:theorem-6a}, $\equinumerous{S' \cup T}{S}$ which, by the
|
||||
same theorem, implies $\equinumerous{S' \cup T}{n}$.
|
||||
By \nameref{sub:theorem-6a}, $S' \cup T \equin S$ which, by the same
|
||||
theorem, implies $S' \cup T \equin n$.
|
||||
|
||||
Let $f$ be a one-to-one correspondence between $S' \cup T$ and $n$.
|
||||
Then $f \restriction S'$ is a one-to-one correspondence between $S'$ and a
|
||||
|
@ -9060,8 +9102,8 @@
|
|||
By \nameref{sub:trichotomy-law-natural-numbers}, exactly one of three
|
||||
situations is possible: $n = m$, $n < m$, or $m < n$.
|
||||
|
||||
If $n < m$, then $\equinumerous{m}{S}$ and $\equinumerous{S}{n}$.
|
||||
By \nameref{sub:theorem-6a}, it follows $\equinumerous{m}{n}$.
|
||||
If $n < m$, then $m \equin S$ and $S \equin n$.
|
||||
By \nameref{sub:theorem-6a}, it follows $m \equin n$.
|
||||
But \nameref{sub:pigeonhole-principle} indicates no natural number is
|
||||
equinumerous to a proper subset of itself, a contradiction.
|
||||
If $m < n$, a parallel argument applies.
|
||||
|
@ -9073,7 +9115,7 @@
|
|||
\hyperlabel{sub:lemma-6f}
|
||||
|
||||
\begin{lemma}[6F]
|
||||
If $C$ is a proper subset of a natural number $n$, then $C \approx m$ for
|
||||
If $C$ is a proper subset of a natural number $n$, then $C \equin m$ for
|
||||
some $m$ less than $n$.
|
||||
\end{lemma}
|
||||
|
||||
|
@ -9086,7 +9128,7 @@
|
|||
\begin{equation}
|
||||
\hyperlabel{sub:lemma-6f-eq1}
|
||||
S = \{n \in \omega \mid \forall C \subset n,
|
||||
\exists m < n \text{ such that } \equinumerous{C}{m}\}.
|
||||
\exists m < n \text{ such that } C \equin m\}.
|
||||
\end{equation}
|
||||
We prove that (i) $0 \in S$ and (ii) if $n \in S$ then $n^+ \in S$.
|
||||
Afterward we prove (iii) the lemma statement.
|
||||
|
@ -9143,7 +9185,7 @@
|
|||
\nameref{ref:inductive-set}.
|
||||
By \nameref{sub:theorem-4b}, $S = \omega$.
|
||||
Therefore, for every proper subset $C$ of a natural number $n$, there
|
||||
exists some $m < n$ such that $\equinumerous{C}{n}$.
|
||||
exists some $m < n$ such that $C \equin n$.
|
||||
|
||||
\end{proof}
|
||||
|
||||
|
@ -9162,26 +9204,232 @@
|
|||
Clearly, if $S' = S$, then $S'$ is finite.
|
||||
Therefore suppose $S'$ is a proper subset of $S$.
|
||||
|
||||
By definition of finite set, $S$ is \nameref{ref:equinumerous} to some
|
||||
By definition of a finite set, $S$ is \nameref{ref:equinumerous} to some
|
||||
natural number $n$.
|
||||
Let $f$ be a one-to-one correspondence between $S$ and $n$.
|
||||
Then $f \restriction S'$ is a one-to-one correspondence between $S'$ and
|
||||
some proper subset of $n$.
|
||||
By \nameref{sub:lemma-6f}, $\ran{(f \restriction S')}$ is equinumerous to
|
||||
some $m < n$.
|
||||
Then \nameref{sub:theorem-6a} indicates $\equinumerous{S'}{m}$.
|
||||
Then \nameref{sub:theorem-6a} indicates $S' \equin m$.
|
||||
Hence $S'$ is a finite set.
|
||||
\end{proof}
|
||||
|
||||
\subsection{\verified{Subset Size}}%
|
||||
\hyperlabel{sub:subset-size}
|
||||
|
||||
\begin{lemma}
|
||||
Let $A$ be a finite set and $B \subseteq A$.
|
||||
Then there exist natural numbers $m, n \in \omega$ such that
|
||||
$B \equin m$, $A \equin n$, and $m \leq n$.
|
||||
\end{lemma}
|
||||
|
||||
\code{Bookshelf/Enderton/Set/Chapter\_6}
|
||||
{Enderton.Set.Chapter\_6.subset\_size}
|
||||
|
||||
\begin{proof}
|
||||
|
||||
Let $A$ be a \nameref{ref:finite-set} and $B$ be a subset of $A$.
|
||||
By \nameref{sub:corollary-6g}, $B$ must be finite.
|
||||
By definition of a finite set, there exists natural numbers
|
||||
$m, n \in \omega$ such that $B \equin m$ and $A \equin n$.
|
||||
By \nameref{sub:trichotomy-law-natural-numbers}, it suffices to prove that
|
||||
$m > n$ is not possible for then either $m < n$ or $m = n$.
|
||||
|
||||
For the sake of contradiction, assume $m > n$.
|
||||
By definition of \nameref{ref:equinumerous}, there exists a one-to-one
|
||||
correspondence between $B$ and $m$.
|
||||
\nameref{sub:theorem-6a} indicates there then exists a one-to-one
|
||||
correspondence $f$ between $m$ and $B$.
|
||||
Likewise, there exists a one-to-one correspondence $g$ between $A$ and
|
||||
$n$.
|
||||
|
||||
Define $h \colon A \rightarrow B$ as $h(x) = f(g(x))$ for all $x \in A$.
|
||||
Since $n \subset m$ by \nameref{sub:corollary-4m}, $h$ is well-defined.
|
||||
By \nameref{sub:one-to-one-composition}, $h$ must be one-to-one.
|
||||
Thus $h$ is a one-to-one correspondence between $A$ and $\ran{h}$, i.e.
|
||||
$A \equin \ran{h}$.
|
||||
But $n < m$ meaning $\ran{h} \subset B$ which in turn is a proper subset
|
||||
of $A$ by hypothesis.
|
||||
\nameref{sub:corollary-6c} states no finite set is equinumerous to a
|
||||
proper subset of itself, a contradiction.
|
||||
|
||||
\end{proof}
|
||||
|
||||
\subsection{\pending{Finite Domain and Range Size}}%
|
||||
\hyperlabel{sub:finite-domain-range-size}
|
||||
|
||||
\begin{lemma}
|
||||
Let $A$ and $B$ be finite sets and $f \colon A \rightarrow B$ be a function.
|
||||
Then there exist natural numbers $m, n \in \omega$ such that
|
||||
$\dom{f} \equin m$, $\ran{f} \equin n$, and $m \geq n$.
|
||||
\end{lemma}
|
||||
|
||||
\begin{note}
|
||||
This proof avoids the \nameref{ref:axiom-of-choice-1} because $A$ and $B$
|
||||
are finite.
|
||||
In particular, we are able to choose a "smallest" element of each preimage
|
||||
set.
|
||||
Contrast this to \nameref{sub:theorem-3j-b}.
|
||||
\end{note}
|
||||
|
||||
\begin{proof}
|
||||
Let $A$ and $B$ be \nameref{ref:finite-set}s and $f \colon A \rightarrow B$
|
||||
be a function.
|
||||
By definition of finite sets, there exists \nameref{ref:natural-number}s
|
||||
$m, p \in \omega$ such that $A \equin m$ and $B \equin p$.
|
||||
By definition of the \nameref{ref:domain} of a function, $\dom{f} = A$.
|
||||
Thus $\dom{f} \equin m$.
|
||||
|
||||
By \nameref{sub:theorem-6a}, there exists a one-to-one correspondence $g$
|
||||
between $m$ and $\dom{f} = A$.
|
||||
For all $y \in \ran{f}$, consider $\img{f^{-1}}{\{y\}}$.
|
||||
Let $$A_y = \{ x \in m \mid f(g(x)) = y \}.$$
|
||||
Since $g$ is a one-to-one correspondence, it follows that
|
||||
$A_y \equin \img{f^{-1}}{\{y\}}$.
|
||||
Since $A_y$ is a nonempty subset of natural numbers, the
|
||||
\nameref{sub:well-ordering-natural-numbers} implies there exists a least
|
||||
element, say $q_y$.
|
||||
Define $C = \{q_y \mid y \in \ran{f}\}$.
|
||||
Thus $h \colon C \rightarrow \ran{f}$ given by $h(x) = f(g(x))$ is a
|
||||
one-to-one correspondence between $C$ and $\ran{f}$ by construction.
|
||||
That is, $C \equin \ran{f}$.
|
||||
By \nameref{sub:lemma-6f}, there exists some $n \leq m$ such that
|
||||
$C \equin n$.
|
||||
By \nameref{sub:theorem-6a}, $n \equin \ran{f}$.
|
||||
\end{proof}
|
||||
|
||||
\subsection{\verified{Set Difference Size}}%
|
||||
\hyperlabel{sub:set-difference-size}
|
||||
|
||||
\begin{lemma}
|
||||
Let $A \equin m$ for some natural number $m$ and $B \subseteq A$.
|
||||
Then there exists some $n \in \omega$ such that $n \leq m$, $B \equin n$,
|
||||
and $A - B \equin m - n$.
|
||||
\end{lemma}
|
||||
|
||||
\code{Bookshelf/Enderton/Set/Chapter\_6}
|
||||
{Enderton.Set.Chapter\_6.sdiff\_size}
|
||||
|
||||
\begin{proof}
|
||||
|
||||
Let
|
||||
\begin{equation}
|
||||
\hyperlabel{sub:set-difference-size-ih}
|
||||
S = \{m \in \omega \mid
|
||||
\forall A \equin m, \forall B \subseteq A, \exists n
|
||||
\in \omega (n \leq m \land B \equin n \land A - B \equin m - n)\}.
|
||||
\end{equation}
|
||||
We prove that (i) $0 \in S$ and (ii) if $n \in S$ then $n^+ \in S$.
|
||||
Afterward we prove (iii) the lemma statement.
|
||||
|
||||
\paragraph{(i)}%
|
||||
\hyperlabel{par:set-difference-size-i}
|
||||
|
||||
Let $A \equin 0$ and $B \subseteq A$.
|
||||
Then it follows $A = B = \emptyset = 0$.
|
||||
Since $0 \leq 0$, $B \equin 0$, and $A - B = \emptyset \equin 0 = 0 - 0$,
|
||||
it follows $0 \in S$.
|
||||
|
||||
\paragraph{(ii)}%
|
||||
\hyperlabel{par:set-difference-size-ii}
|
||||
|
||||
Suppose $m \in S$ and consider $m^+$.
|
||||
Let $A \equin m^+$ and let $B \subseteq A$.
|
||||
By definition of \nameref{ref:equinumerous}, there exists a one-to-one
|
||||
correspondence $f$ between $A$ and $m^+$.
|
||||
Since $f$ is one-to-one and onto, there exists a unique value $a \in A$
|
||||
such that $f(a) = m$.
|
||||
Then $B - \{a\} \subseteq A - \{a\}$ and $f$ is a one-to-one
|
||||
correspondence between $A - \{a\}$ and $m$.
|
||||
By \ihref{sub:set-difference-size-ih}, there exists some $n \in \omega$
|
||||
such that $n \leq m$, $B - \{a\} \equin n$ and
|
||||
\begin{equation}
|
||||
\hyperlabel{par:set-difference-size-ii-eq1}
|
||||
(A - \{a\}) - (B - \{a\}) \equin m - n.
|
||||
\end{equation}
|
||||
There are two cases to consider:
|
||||
|
||||
\subparagraph{Case 1}%
|
||||
|
||||
Assume $a \in B$.
|
||||
Then $B \equin n^+$.
|
||||
Furthermore, by definition of the set difference,
|
||||
\begin{align}
|
||||
(A - \{a\}) & - (B - \{a\}) \nonumber \\
|
||||
& = \{x \mid
|
||||
x \in A - \{a\} \land x \not\in B - \{a\}\} \nonumber \\
|
||||
& = \{x \mid
|
||||
(x \in A \land x \neq a) \land
|
||||
\neg(x \in B \land x \neq a)\} \nonumber \\
|
||||
& = \{x \mid
|
||||
(x \in A \land x \neq a) \land
|
||||
(x \not\in B \lor x = a)\} \nonumber \\
|
||||
& = \{x \mid
|
||||
(x \in A \land x \neq a \land x \not\in B) \lor
|
||||
(x \in A \land x \neq a \land x = a)\} \nonumber \\
|
||||
& = \{x \mid
|
||||
(x \in A \land x \neq a \land x \not\in B) \lor F\} \nonumber \\
|
||||
& = \{x \mid
|
||||
(x \in A \land x \neq a \land x \not\in B)\} \nonumber \\
|
||||
& = \{x \mid x \in A - B \land x \neq a\} \nonumber \\
|
||||
& = \{x \mid x \in A - B \land x \not\in \{a\}\} \nonumber \\
|
||||
& = (A - B) - \{a\}.
|
||||
\hyperlabel{par:set-difference-size-ii-eq2}
|
||||
\end{align}
|
||||
Since $a \in A$ and $a \in B$, $(A - B) - \{a\} = A - B$.
|
||||
Thus
|
||||
\begin{align*}
|
||||
(A - \{a\} - (B - \{a\})
|
||||
& = (A - B) - \{a\} & \eqref{par:set-difference-size-ii-eq2} \\
|
||||
& = A - B \\
|
||||
& \equin m - n & \eqref{par:set-difference-size-ii-eq1} \\
|
||||
& \equin m^+ - n^+.
|
||||
\end{align*}
|
||||
|
||||
\subparagraph{Case 2}%
|
||||
|
||||
Assume $a \not\in B$.
|
||||
Then $B - \{a\} = B$ (i.e. $B \approx n$) and
|
||||
\begin{align*}
|
||||
(A - \{a\}) - (B - \{a\})
|
||||
& = (A - \{a\}) - B \\
|
||||
& \equin m - n. & \eqref{par:set-difference-size-ii-eq1}
|
||||
\end{align*}
|
||||
The above implies that there exists a one-to-one correspondence $g$
|
||||
between $(A - \{a\}) - B$ and $m - n$.
|
||||
Therefore $g \cup \{\tuple{a, m}\}$ is a one-to-one correspondence
|
||||
between $A - B$ and $(m - n) \cup \{m\}$.
|
||||
Hence $$A - B \equin (m - n) \cup \{m\} \equin m^+ - n.$$
|
||||
|
||||
\subparagraph{Subconclusion}%
|
||||
|
||||
The above two cases are exhaustive and both conclude the existence of
|
||||
some $n \in \omega$ such that $n \leq m^+$, $B \equin n$ and
|
||||
$A - B \equin m^+ - n$.
|
||||
Hence $m^+ \in S$.
|
||||
|
||||
\paragraph{(iii)}%
|
||||
|
||||
By \nameref{par:set-difference-size-i} and
|
||||
\nameref{par:set-difference-size-ii}, $S \subseteq \omega$ is an
|
||||
\nameref{ref:inductive-set}.
|
||||
Thus \nameref{sub:theorem-4b} implies $S = \omega$.
|
||||
Hence, for all $A \equin m$ for some $m \in \omega$, if $B \subseteq A$,
|
||||
then there exists some $n \in \omega$ such that $n \leq m$,
|
||||
$B \equin n$, and $A - B \equin m - n$.
|
||||
|
||||
\end{proof}
|
||||
|
||||
\subsection{\sorry{Theorem 6H}}%
|
||||
\hyperlabel{sub:theorem-6h}
|
||||
|
||||
Assume that $\equinumerous{K_1}{K_2}$ and $\equinumerous{L_1}{L_2}$.
|
||||
Assume that $K_1 \equin K_2$ and $L_1 \equin L_2$.
|
||||
\begin{enumerate}[(a)]
|
||||
\item If $K_1 \cap L_1 = K_2 \cap L_2 = \emptyset$, then
|
||||
$\equinumerous{K_1 \cup L_1}{K_2 \cup L_2}$.
|
||||
\item $\equinumerous{K_1 \times L_1}{K_2 \times L_2}$.
|
||||
\item $\equinumerous{^{(L_1)}{K_1}}{^{(L_2)}{K_2}}$.
|
||||
$K_1 \cup L_1 \equin K_2 \cup L_2$.
|
||||
\item $K_1 \times L_1 \equin K_2 \times L_2$.
|
||||
\item $^{(L_1)}{K_1} \equin ^{(L_2)}{K_2}$.
|
||||
\end{enumerate}
|
||||
|
||||
\begin{proof}
|
||||
|
@ -9530,7 +9778,7 @@
|
|||
Refer to \nameref{sub:theorem-6a}.
|
||||
\end{proof}
|
||||
|
||||
\subsection{\sorry{Exercise 6.6}}%
|
||||
\subsection{\unverified{Exercise 6.6}}%
|
||||
\hyperlabel{sub:exercise-6.6}
|
||||
|
||||
Let $\kappa$ be a nonzero cardinal number.
|
||||
|
@ -9538,20 +9786,74 @@
|
|||
belongs.
|
||||
|
||||
\begin{proof}
|
||||
TODO
|
||||
Let $\kappa$ be a nonzero cardinal number and define
|
||||
$$\mathbf{K}_\kappa = \{ X \mid \card{X} = \kappa \}.$$
|
||||
For the sake of contradiction, suppose $\mathbf{K}_\kappa$ is a set.
|
||||
Then the \nameref{ref:union-axiom} suggests $\bigcup \mathbf{K}_{\kappa}$ is
|
||||
a set.
|
||||
But this "set" is precisely the class of all sets, which is \textit{not} a
|
||||
set.
|
||||
Thus our original assumption was incorrect.
|
||||
That is, there does not exist a set to which every set of cardinality
|
||||
$\kappa$ belongs.
|
||||
\end{proof}
|
||||
|
||||
\subsection{\sorry{Exercise 6.7}}%
|
||||
\subsection{\pending{Exercise 6.7}}%
|
||||
\hyperlabel{sub:exercise-6.7}
|
||||
|
||||
Assume that $A$ is finite and $f \colon A \rightarrow A$.
|
||||
Show that $f$ is one-to-one iff $\ran{f} = A$.
|
||||
|
||||
\code*{Bookshelf/Enderton/Set/Chapter\_6}
|
||||
{Enderton.Set.Chapter\_6.exercise\_6\_7}
|
||||
|
||||
\begin{proof}
|
||||
TODO
|
||||
Let $A$ be a \nameref{ref:finite-set} and $f \colon A \rightarrow A$.
|
||||
|
||||
\paragraph{($\Rightarrow$)}%
|
||||
|
||||
Suppose $f$ is one-to-one.
|
||||
Then $f$ is a one-to-one correspondence between $A$ and $\ran{f}$.
|
||||
That is, $A \equin \ran{f}$.
|
||||
Because $f$ maps $A$ onto $A$, $\ran{f} \subseteq A$.
|
||||
Hence $\ran{f} \subset A$ or $\ran{f} = A$.
|
||||
But, by \nameref{sub:corollary-6c}, $\ran{f}$ cannot be a proper subset of
|
||||
$A$.
|
||||
Thus $\ran{f} = A$.
|
||||
|
||||
\paragraph{($\Leftarrow$)}%
|
||||
|
||||
If $A = \emptyset$, then $f$ is trivially one-to-one.
|
||||
Assume now $A \neq \emptyset$ and $\ran{f} = A$.
|
||||
Let $x_1, x_2 \in A$ such that $f(x_1) = f(x_2) = y$ for some $y \in A$.
|
||||
We must prove that $x_1 = x_2$.
|
||||
|
||||
Let $B = \img{f^{-1}}{\{y\}}$.
|
||||
Then $x_1, x_2 \in B$.
|
||||
Since $B \subseteq A$, \nameref{sub:subset-size} indicates that there
|
||||
exist natural numbers $m_1, n_1 \in \omega$ such that $B \equin m_1$,
|
||||
$A \equin n_1$, and $m_1 \leq n_1$.
|
||||
Define $f' \colon (A - B) \rightarrow (A - \{y\})$ as the
|
||||
\nameref{ref:restriction} of $f$ to $A - B$.
|
||||
Since $\ran{f} = A$, it follows $\ran{f'} = A - \{y\}$.
|
||||
Since \nameref{sub:corollary-6g} implies $A - B$ and $A - \{y\}$ are
|
||||
finite sets, \nameref{sub:finite-domain-range-size} implies the
|
||||
existence of natural numbers $m_2, n_2 \in \omega$ such that
|
||||
$\dom{f'} \equin m_2$, $\ran{f'} \equin n_2$, and $m_2 \geq n_2$.
|
||||
Thus, by \nameref{sub:set-difference-size},
|
||||
\begin{align*}
|
||||
m_2 & \equin \dom{f'} \equin A - B \equin n_1 - m_1 \\
|
||||
n_2 & \equin \ran{f'} \equin A - \{y\} \equin n_1 - 1.
|
||||
\end{align*}
|
||||
By \nameref{sub:corollary-6e}, $m_2 = n_1 - m_1$ and $n_2 = n_1 - 1$.
|
||||
Since $m_2 \geq n_2$, $n_1 - m_1 \geq n_1 - 1$.
|
||||
But $1 \leq m_1 \leq n_1$ meaning $m_1 = 1$.
|
||||
Hence $B$ is a singleton.
|
||||
Therefore $x_1 = x_2$, i.e. $f$ is one-to-one.
|
||||
|
||||
\end{proof}
|
||||
|
||||
\subsection{\sorry{Exercise 6.8}}%
|
||||
\subsection{\pending{Exercise 6.8}}%
|
||||
\hyperlabel{sub:exercise-6.8}
|
||||
|
||||
Prove that the union of two finite sets is finite, without any use of
|
||||
|
@ -9561,7 +9863,7 @@
|
|||
TODO
|
||||
\end{proof}
|
||||
|
||||
\subsection{\sorry{Exercise 6.9}}%
|
||||
\subsection{\pending{Exercise 6.9}}%
|
||||
\hyperlabel{sub:exercise-6.9}
|
||||
|
||||
Prove that the Cartesian product of two finite sets is finite, without any use
|
||||
|
|
|
@ -19,7 +19,7 @@ The `∅` does not equal the singleton set containing `∅`.
|
|||
lemma empty_ne_singleton_empty (h : ∅ = ({∅} : Set (Set α))) : False :=
|
||||
absurd h (Ne.symm $ Set.singleton_ne_empty (∅ : Set α))
|
||||
|
||||
/-- #### Exercise 1.1a
|
||||
/-- ### Exercise 1.1a
|
||||
|
||||
`{∅} ___ {∅, {∅}}`
|
||||
-/
|
||||
|
@ -27,7 +27,7 @@ theorem exercise_1_1a
|
|||
: {∅} ∈ ({∅, {∅}} : Set (Set (Set α)))
|
||||
∧ {∅} ⊆ ({∅, {∅}} : Set (Set (Set α))) := ⟨by simp, by simp⟩
|
||||
|
||||
/-- #### Exercise 1.1b
|
||||
/-- ### Exercise 1.1b
|
||||
|
||||
`{∅} ___ {∅, {{∅}}}`
|
||||
-/
|
||||
|
@ -39,7 +39,7 @@ theorem exercise_1_1b
|
|||
simp at h
|
||||
exact empty_ne_singleton_empty h
|
||||
|
||||
/-- #### Exercise 1.1c
|
||||
/-- ### Exercise 1.1c
|
||||
|
||||
`{{∅}} ___ {∅, {∅}}`
|
||||
-/
|
||||
|
@ -47,7 +47,7 @@ theorem exercise_1_1c
|
|||
: {{∅}} ∉ ({∅, {∅}} : Set (Set (Set (Set α))))
|
||||
∧ {{∅}} ⊆ ({∅, {∅}} : Set (Set (Set (Set α)))) := ⟨by simp, by simp⟩
|
||||
|
||||
/-- #### Exercise 1.1d
|
||||
/-- ### Exercise 1.1d
|
||||
|
||||
`{{∅}} ___ {∅, {{∅}}}`
|
||||
-/
|
||||
|
@ -59,7 +59,7 @@ theorem exercise_1_1d
|
|||
simp at h
|
||||
exact empty_ne_singleton_empty h
|
||||
|
||||
/-- #### Exercise 1.1e
|
||||
/-- ### Exercise 1.1e
|
||||
|
||||
`{{∅}} ___ {∅, {∅, {∅}}}`
|
||||
-/
|
||||
|
|
|
@ -10,7 +10,7 @@ Axioms and Operations
|
|||
|
||||
namespace Enderton.Set.Chapter_2
|
||||
|
||||
/-! #### Commutative Laws
|
||||
/-! ### Commutative Laws
|
||||
|
||||
For any sets `A` and `B`,
|
||||
```
|
||||
|
@ -20,7 +20,8 @@ A ∩ B = B ∩ A
|
|||
-/
|
||||
|
||||
theorem commutative_law_i (A B : Set α)
|
||||
: A ∪ B = B ∪ A := calc A ∪ B
|
||||
: A ∪ B = B ∪ A :=
|
||||
calc A ∪ B
|
||||
_ = { x | x ∈ A ∨ x ∈ B } := rfl
|
||||
_ = { x | x ∈ B ∨ x ∈ A } := by
|
||||
ext
|
||||
|
@ -39,7 +40,7 @@ theorem commutative_law_ii (A B : Set α)
|
|||
|
||||
#check Set.inter_comm
|
||||
|
||||
/-! #### Associative Laws
|
||||
/-! ### Associative Laws
|
||||
|
||||
For any sets `A`, `B`, and `C`,
|
||||
```
|
||||
|
@ -74,7 +75,7 @@ theorem associative_law_ii (A B C : Set α)
|
|||
|
||||
#check Set.inter_assoc
|
||||
|
||||
/-! #### Distributive Laws
|
||||
/-! ### Distributive Laws
|
||||
|
||||
For any sets `A`, `B`, and `C`,
|
||||
```
|
||||
|
@ -107,7 +108,7 @@ theorem distributive_law_ii (A B C : Set α)
|
|||
|
||||
#check Set.union_distrib_left
|
||||
|
||||
/-! #### De Morgan's Laws
|
||||
/-! ### De Morgan's Laws
|
||||
|
||||
For any sets `A`, `B`, and `C`,
|
||||
```
|
||||
|
@ -148,7 +149,7 @@ theorem de_morgans_law_ii (A B C : Set α)
|
|||
|
||||
#check Set.diff_inter
|
||||
|
||||
/-! #### Identities Involving ∅
|
||||
/-! ### Identities Involving ∅
|
||||
|
||||
For any set `A`,
|
||||
```
|
||||
|
@ -171,8 +172,7 @@ theorem emptyset_identity_ii (A : Set α)
|
|||
: A ∩ ∅ = ∅ := calc A ∩ ∅
|
||||
_ = { x | x ∈ A ∧ x ∈ ∅ } := rfl
|
||||
_ = { x | x ∈ A ∧ False } := rfl
|
||||
_ = { x | False } := by simp
|
||||
_ = ∅ := rfl
|
||||
_ = ∅ := by simp
|
||||
|
||||
#check Set.inter_empty
|
||||
|
||||
|
@ -181,12 +181,11 @@ theorem emptyset_identity_iii (A C : Set α)
|
|||
_ = { x | x ∈ A ∧ x ∈ C \ A } := rfl
|
||||
_ = { x | x ∈ A ∧ (x ∈ C ∧ x ∉ A) } := rfl
|
||||
_ = { x | x ∈ C ∧ False } := by simp
|
||||
_ = { x | False } := by simp
|
||||
_ = ∅ := rfl
|
||||
_ = ∅ := by simp
|
||||
|
||||
#check Set.inter_diff_self
|
||||
|
||||
/-! #### Monotonicity
|
||||
/-! ### Monotonicity
|
||||
|
||||
For any sets `A`, `B`, and `C`,
|
||||
```
|
||||
|
@ -230,7 +229,7 @@ theorem monotonicity_iii (A B : Set (Set α)) (h : A ⊆ B)
|
|||
|
||||
#check Set.sUnion_mono
|
||||
|
||||
/-! #### Anti-monotonicity
|
||||
/-! ### Anti-monotonicity
|
||||
|
||||
For any sets `A`, `B`, and `C`,
|
||||
```
|
||||
|
@ -262,7 +261,7 @@ theorem anti_monotonicity_ii (A B : Set (Set α)) (h : A ⊆ B)
|
|||
|
||||
#check Set.sInter_subset_sInter
|
||||
|
||||
/-- #### Intersection/Difference Associativity
|
||||
/-- ### Intersection/Difference Associativity
|
||||
|
||||
Let `A`, `B`, and `C` be sets. Then `A ∩ (B - C) = (A ∩ B) - C`.
|
||||
-/
|
||||
|
@ -279,7 +278,7 @@ theorem inter_diff_assoc (A B C : Set α)
|
|||
|
||||
#check Set.inter_diff_assoc
|
||||
|
||||
/-- #### Exercise 2.1
|
||||
/-- ### Exercise 2.1
|
||||
|
||||
Assume that `A` is the set of integers divisible by `4`. Similarly assume that
|
||||
`B` and `C` are the sets of integers divisible by `9` and `10`, respectively.
|
||||
|
@ -301,7 +300,7 @@ theorem exercise_2_1 {A B C : Set ℤ}
|
|||
· rw [hC] at hc
|
||||
exact Set.mem_setOf.mp hc
|
||||
|
||||
/-- #### Exercise 2.2
|
||||
/-- ### Exercise 2.2
|
||||
|
||||
Give an example of sets `A` and `B` for which `⋃ A = ⋃ B` but `A ≠ B`.
|
||||
-/
|
||||
|
@ -340,7 +339,7 @@ theorem exercise_2_2 {A B : Set (Set ℕ)}
|
|||
have h₂ := h₁ 2
|
||||
simp at h₂
|
||||
|
||||
/-- #### Exercise 2.3
|
||||
/-- ### Exercise 2.3
|
||||
|
||||
Show that every member of a set `A` is a subset of `U A`. (This was stated as an
|
||||
example in this section.)
|
||||
|
@ -353,7 +352,7 @@ theorem exercise_2_3 {A : Set (Set α)}
|
|||
rw [Set.mem_setOf_eq]
|
||||
exact ⟨x, ⟨hx, hy⟩⟩
|
||||
|
||||
/-- #### Exercise 2.4
|
||||
/-- ### Exercise 2.4
|
||||
|
||||
Show that if `A ⊆ B`, then `⋃ A ⊆ ⋃ B`.
|
||||
-/
|
||||
|
@ -365,7 +364,7 @@ theorem exercise_2_4 {A B : Set (Set α)} (h : A ⊆ B) : ⋃₀ A ⊆ ⋃₀ B
|
|||
rw [Set.mem_setOf_eq]
|
||||
exact ⟨t, ⟨h ht, hxt⟩⟩
|
||||
|
||||
/-- #### Exercise 2.5
|
||||
/-- ### Exercise 2.5
|
||||
|
||||
Assume that every member of `𝓐` is a subset of `B`. Show that `⋃ 𝓐 ⊆ B`.
|
||||
-/
|
||||
|
@ -377,7 +376,7 @@ theorem exercise_2_5 {𝓐 : Set (Set α)} (h : ∀ x ∈ 𝓐, x ⊆ B)
|
|||
have ⟨t, ⟨ht𝓐, hyt⟩⟩ := hy
|
||||
exact (h t ht𝓐) hyt
|
||||
|
||||
/-- #### Exercise 2.6a
|
||||
/-- ### Exercise 2.6a
|
||||
|
||||
Show that for any set `A`, `⋃ 𝓟 A = A`.
|
||||
-/
|
||||
|
@ -394,7 +393,7 @@ theorem exercise_2_6a : ⋃₀ (𝒫 A) = A := by
|
|||
rw [Set.mem_setOf_eq]
|
||||
exact ⟨A, ⟨by rw [Set.mem_setOf_eq], hx⟩⟩
|
||||
|
||||
/-- #### Exercise 2.6b
|
||||
/-- ### Exercise 2.6b
|
||||
|
||||
Show that `A ⊆ 𝓟 ⋃ A`. Under what conditions does equality hold?
|
||||
-/
|
||||
|
@ -413,7 +412,7 @@ theorem exercise_2_6b
|
|||
conv => rhs; rw [hB, exercise_2_6a]
|
||||
exact hB
|
||||
|
||||
/-- #### Exercise 2.7a
|
||||
/-- ### Exercise 2.7a
|
||||
|
||||
Show that for any sets `A` and `B`, `𝓟 A ∩ 𝓟 B = 𝓟 (A ∩ B)`.
|
||||
-/
|
||||
|
@ -431,7 +430,7 @@ theorem exercise_2_7A
|
|||
intro x hA _
|
||||
exact hA
|
||||
|
||||
/-- #### Exercise 2.7b (i)
|
||||
/-- ### Exercise 2.7b (i)
|
||||
|
||||
Show that `𝓟 A ∪ 𝓟 B ⊆ 𝓟 (A ∪ B)`.
|
||||
-/
|
||||
|
@ -448,7 +447,7 @@ theorem exercise_2_7b_i
|
|||
rw [Set.mem_setOf_eq]
|
||||
exact Set.subset_union_of_subset_right hB A
|
||||
|
||||
/-- #### Exercise 2.7b (ii)
|
||||
/-- ### Exercise 2.7b (ii)
|
||||
|
||||
Under what conditions does `𝓟 A ∪ 𝓟 B = 𝓟 (A ∪ B)`.?
|
||||
-/
|
||||
|
@ -500,7 +499,7 @@ theorem exercise_2_7b_ii
|
|||
refine Or.inl (Set.Subset.trans hx ?_)
|
||||
exact subset_of_eq (Set.right_subset_union_eq_self hB)
|
||||
|
||||
/-- #### Exercise 2.9
|
||||
/-- ### Exercise 2.9
|
||||
|
||||
Give an example of sets `a` and `B` for which `a ∈ B` but `𝓟 a ∉ 𝓟 B`.
|
||||
-/
|
||||
|
@ -528,7 +527,7 @@ theorem exercise_2_9 (ha : a = {1}) (hB : B = {{1}})
|
|||
have := h 1
|
||||
simp at this
|
||||
|
||||
/-- #### Exercise 2.10
|
||||
/-- ### Exercise 2.10
|
||||
|
||||
Show that if `a ∈ B`, then `𝓟 a ∈ 𝓟 𝓟 ⋃ B`.
|
||||
-/
|
||||
|
@ -541,7 +540,7 @@ theorem exercise_2_10 {B : Set (Set α)} (ha : a ∈ B)
|
|||
rw [← hb, Set.mem_setOf_eq]
|
||||
exact h₂
|
||||
|
||||
/-- #### Exercise 2.11 (i)
|
||||
/-- ### Exercise 2.11 (i)
|
||||
|
||||
Show that for any sets `A` and `B`, `A = (A ∩ B) ∪ (A - B)`.
|
||||
-/
|
||||
|
@ -558,7 +557,7 @@ theorem exercise_2_11_i {A B : Set α}
|
|||
· intro hx
|
||||
exact ⟨hx, em (B x)⟩
|
||||
|
||||
/-- #### Exercise 2.11 (ii)
|
||||
/-- ### Exercise 2.11 (ii)
|
||||
|
||||
Show that for any sets `A` and `B`, `A ∪ (B - A) = A ∪ B`.
|
||||
-/
|
||||
|
@ -617,7 +616,7 @@ lemma right_diff_eq_insert_one_three : A \ (B \ C) = {1, 3} := by
|
|||
rw [hy] at hz
|
||||
unfold Membership.mem Set.instMembershipSet Set.Mem at hz
|
||||
unfold singleton Set.instSingletonSet Set.singleton setOf at hz
|
||||
simp only at hz
|
||||
simp at hz
|
||||
· intro hy
|
||||
refine ⟨Or.inr (Or.inr hy), ?_⟩
|
||||
intro hz
|
||||
|
@ -652,7 +651,7 @@ lemma left_diff_eq_singleton_one : (A \ B) \ C = {1} := by
|
|||
| inl y => rw [hx] at y; simp at y
|
||||
| inr y => rw [hx] at y; simp at y
|
||||
|
||||
/-- #### Exercise 2.14
|
||||
/-- ### Exercise 2.14
|
||||
|
||||
Show by example that for some sets `A`, `B`, and `C`, the set `A - (B - C)` is
|
||||
different from `(A - B) - C`.
|
||||
|
@ -669,7 +668,7 @@ theorem exercise_2_14 : A \ (B \ C) ≠ (A \ B) \ C := by
|
|||
|
||||
end
|
||||
|
||||
/-- #### Exercise 2.15 (a)
|
||||
/-- ### Exercise 2.15 (a)
|
||||
|
||||
Show that `A ∩ (B + C) = (A ∩ B) + (A ∩ C)`.
|
||||
-/
|
||||
|
@ -697,7 +696,7 @@ theorem exercise_2_15a (A B C : Set α)
|
|||
|
||||
#check Set.inter_symmDiff_distrib_left
|
||||
|
||||
/-- #### Exercise 2.15 (b)
|
||||
/-- ### Exercise 2.15 (b)
|
||||
|
||||
Show that `A + (B + C) = (A + B) + C`.
|
||||
-/
|
||||
|
@ -750,7 +749,7 @@ theorem exercise_2_15b (A B C : Set α)
|
|||
|
||||
#check symmDiff_assoc
|
||||
|
||||
/-- #### Exercise 2.16
|
||||
/-- ### Exercise 2.16
|
||||
|
||||
Simplify:
|
||||
`[(A ∪ B ∪ C) ∩ (A ∪ B)] - [(A ∪ (B - C)) ∩ A]`
|
||||
|
@ -762,7 +761,7 @@ theorem exercise_2_16 {A B C : Set α}
|
|||
_ = (A ∪ B) \ A := by rw [Set.union_inter_cancel_left]
|
||||
_ = B \ A := by rw [Set.union_diff_left]
|
||||
|
||||
/-! #### Exercise 2.17
|
||||
/-! ### Exercise 2.17
|
||||
|
||||
Show that the following four conditions are equivalent.
|
||||
|
||||
|
@ -792,13 +791,13 @@ theorem exercise_2_17_ii {A B : Set α} (h : A \ B = ∅)
|
|||
|
||||
theorem exercise_2_17_iii {A B : Set α} (h : A ∪ B = B)
|
||||
: A ∩ B = A := by
|
||||
suffices A ⊆ B from Set.inter_eq_left_iff_subset.mpr this
|
||||
exact Set.union_eq_right_iff_subset.mp h
|
||||
suffices A ⊆ B from Set.inter_eq_left.mpr this
|
||||
exact Set.union_eq_right.mp h
|
||||
|
||||
theorem exercise_2_17_iv {A B : Set α} (h : A ∩ B = A)
|
||||
: A ⊆ B := Set.inter_eq_left_iff_subset.mp h
|
||||
: A ⊆ B := Set.inter_eq_left.mp h
|
||||
|
||||
/-- #### Exercise 2.19
|
||||
/-- ### Exercise 2.19
|
||||
|
||||
Is `𝒫 (A - B)` always equal to `𝒫 A - 𝒫 B`? Is it ever equal to `𝒫 A - 𝒫 B`?
|
||||
-/
|
||||
|
@ -811,7 +810,7 @@ theorem exercise_2_19 {A B : Set α}
|
|||
have := h ∅
|
||||
exact absurd (this.mp he) ne
|
||||
|
||||
/-- #### Exercise 2.20
|
||||
/-- ### Exercise 2.20
|
||||
|
||||
Let `A`, `B`, and `C` be sets such that `A ∪ B = A ∪ C` and `A ∩ B = A ∩ C`.
|
||||
Show that `B = C`.
|
||||
|
@ -837,7 +836,7 @@ theorem exercise_2_20 {A B C : Set α}
|
|||
rw [← hu] at this
|
||||
exact Or.elim this (absurd · hA) (by simp)
|
||||
|
||||
/-- #### Exercise 2.21
|
||||
/-- ### Exercise 2.21
|
||||
|
||||
Show that `⋃ (A ∪ B) = (⋃ A) ∪ (⋃ B)`.
|
||||
-/
|
||||
|
@ -861,7 +860,7 @@ theorem exercise_2_21 {A B : Set (Set α)}
|
|||
have ⟨t, ht⟩ : ∃ t, t ∈ B ∧ x ∈ t := hB
|
||||
exact ⟨t, ⟨Set.mem_union_right A ht.left, ht.right⟩⟩
|
||||
|
||||
/-- #### Exercise 2.22
|
||||
/-- ### Exercise 2.22
|
||||
|
||||
Show that if `A` and `B` are nonempty sets, then `⋂ (A ∪ B) = ⋂ A ∩ ⋂ B`.
|
||||
-/
|
||||
|
@ -890,7 +889,7 @@ theorem exercise_2_22 {A B : Set (Set α)}
|
|||
· intro hB
|
||||
exact (this t).right hB
|
||||
|
||||
/-- #### Exercise 2.24a
|
||||
/-- ### Exercise 2.24a
|
||||
|
||||
Show that is `𝓐` is nonempty, then `𝒫 (⋂ 𝓐) = ⋂ { 𝒫 X | X ∈ 𝓐 }`.
|
||||
-/
|
||||
|
@ -909,7 +908,7 @@ theorem exercise_2_24a {𝓐 : Set (Set α)}
|
|||
_ = { x | ∀ t ∈ { 𝒫 X | X ∈ 𝓐 }, x ∈ t} := by simp
|
||||
_ = ⋂₀ { 𝒫 X | X ∈ 𝓐 } := rfl
|
||||
|
||||
/-- #### Exercise 2.24b
|
||||
/-- ### Exercise 2.24b
|
||||
|
||||
Show that
|
||||
```
|
||||
|
@ -951,7 +950,7 @@ theorem exercise_2_24b {𝓐 : Set (Set α)}
|
|||
simp only [Set.mem_setOf_eq, exists_exists_and_eq_and, Set.mem_powerset_iff]
|
||||
exact ⟨⋃₀ 𝓐, ⟨hA, hx⟩⟩
|
||||
|
||||
/-- #### Exercise 2.25
|
||||
/-- ### Exercise 2.25
|
||||
|
||||
Is `A ∪ (⋃ 𝓑)` always the same as `⋃ { A ∪ X | X ∈ 𝓑 }`? If not, then under
|
||||
what conditions does equality hold?
|
||||
|
|
|
@ -17,30 +17,51 @@ namespace Enderton.Set.Chapter_3
|
|||
|
||||
open Set.Relation
|
||||
|
||||
/-- #### Lemma 3B
|
||||
/-- ### Lemma 3B
|
||||
|
||||
If `x ∈ C` and `y ∈ C`, then `⟨x, y⟩ ∈ 𝒫 𝒫 C`.
|
||||
-/
|
||||
lemma lemma_3b {C : Set α} (hx : x ∈ C) (hy : y ∈ C)
|
||||
: OrderedPair x y ∈ 𝒫 𝒫 C := by
|
||||
/-
|
||||
> Let `C` be an arbitrary set and `x, y ∈ C`. Then by definition of the power
|
||||
> set, `{x}` and `{x, y}` are members of `𝒫 C`.
|
||||
-/
|
||||
have hxs : {x} ⊆ C := Set.singleton_subset_iff.mpr hx
|
||||
have hxys : {x, y} ⊆ C := Set.mem_mem_imp_pair_subset hx hy
|
||||
/-
|
||||
> Likewise `{{x}, {x, y}}` is a member of `𝒫 𝒫 C`. By definition of an ordered
|
||||
> pair, `⟨x, y⟩ = {{x}, {x, y}}`. This concludes our proof.
|
||||
-/
|
||||
exact Set.mem_mem_imp_pair_subset hxs hxys
|
||||
|
||||
/-- #### Theorem 3D
|
||||
/-- ### Theorem 3D
|
||||
|
||||
If `⟨x, y⟩ ∈ A`, then `x` and `y` belong to `⋃ ⋃ A`.
|
||||
-/
|
||||
theorem theorem_3d {A : Set (Set (Set α))} (h : OrderedPair x y ∈ A)
|
||||
: x ∈ ⋃₀ (⋃₀ A) ∧ y ∈ ⋃₀ (⋃₀ A) := by
|
||||
/-
|
||||
> Let `A` be a set and `⟨x, y⟩ ∈ A`. By definition of an ordered pair,
|
||||
>
|
||||
> `⟨x, y⟩ = {{x}, {x, y}}`.
|
||||
>
|
||||
> By Exercise 2.3, `{{x}, {x, y}} ⊆ ∪ A`. Then `{x, y} ∈ ∪ A`.
|
||||
-/
|
||||
have hp := Chapter_2.exercise_2_3 (OrderedPair x y) h
|
||||
unfold OrderedPair at hp
|
||||
have hq : {x, y} ∈ ⋃₀ A := hp (by simp)
|
||||
/-
|
||||
> Another application of Exercise 2.3 implies `{x, y} ∈ ∪ ∪ A`.
|
||||
-/
|
||||
have : {x, y} ⊆ ⋃₀ ⋃₀ A := Chapter_2.exercise_2_3 {x, y} hq
|
||||
/-
|
||||
> Therefore `x, y ∈ ∪ ∪ A`.
|
||||
-/
|
||||
exact ⟨this (by simp), this (by simp)⟩
|
||||
|
||||
|
||||
/-- #### Theorem 3G (i)
|
||||
/-- ### Theorem 3G (i)
|
||||
|
||||
Assume that `F` is a one-to-one function. If `x ∈ dom F`, then `F⁻¹(F(x)) = x`.
|
||||
-/
|
||||
|
@ -54,7 +75,7 @@ theorem theorem_3g_i {F : Set.HRelation α β}
|
|||
unfold isOneToOne at hF
|
||||
exact (single_valued_eq_unique hF.left hy hy₁).symm
|
||||
|
||||
/-- #### Theorem 3G (ii)
|
||||
/-- ### Theorem 3G (ii)
|
||||
|
||||
Assume that `F` is a one-to-one function. If `y ∈ ran F`, then `F(F⁻¹(y)) = y`.
|
||||
-/
|
||||
|
@ -68,7 +89,7 @@ theorem theorem_3g_ii {F : Set.HRelation α β}
|
|||
unfold isOneToOne at hF
|
||||
exact (single_rooted_eq_unique hF.right hx hx₁).symm
|
||||
|
||||
/-- #### Theorem 3H
|
||||
/-- ### Theorem 3H
|
||||
|
||||
Assume that `F` and `G` are functions. Then
|
||||
```
|
||||
|
@ -107,7 +128,7 @@ theorem theorem_3h_dom {F : Set.HRelation β γ} {G : Set.HRelation α β}
|
|||
simp only [Set.mem_setOf_eq]
|
||||
exact ⟨a, ha.left.left, hb⟩
|
||||
|
||||
/-- #### Theorem 3J (a)
|
||||
/-- ### Theorem 3J (a)
|
||||
|
||||
Assume that `F : A → B`, and that `A` is nonempty. There exists a function
|
||||
`G : B → A` (a "left inverse") such that `G ∘ F` is the identity function on `A`
|
||||
|
@ -260,11 +281,11 @@ theorem theorem_3j_a {F : Set.HRelation α β}
|
|||
rw [← single_valued_eq_unique hF.is_func hx₂.right ht₂.left] at ht₂
|
||||
exact single_valued_eq_unique hG₁.is_func ht₂.right ht₁.right
|
||||
|
||||
/-- #### Theorem 3J (b)
|
||||
/-- ### Theorem 3J (b)
|
||||
|
||||
Assume that `F : A → B`, and that `A` is nonempty. There exists a function
|
||||
`H : B → A` (a "right inverse") such that `F ∘ H` is the identity function on
|
||||
`B` **iff** `F` maps `A` onto `B`.
|
||||
`B` only if `F` maps `A` onto `B`.
|
||||
-/
|
||||
theorem theorem_3j_b {F : Set.HRelation α β} (hF : mapsInto F A B)
|
||||
: (∃ H, mapsInto H B A ∧ comp F H = { p | p.1 ∈ B ∧ p.1 = p.2 }) →
|
||||
|
@ -279,7 +300,7 @@ theorem theorem_3j_b {F : Set.HRelation α β} (hF : mapsInto F A B)
|
|||
simp only [Set.mem_setOf_eq, Prod.exists, exists_eq_right, Set.setOf_mem_eq]
|
||||
exact hy
|
||||
|
||||
/-- #### Theorem 3K (a)
|
||||
/-- ### Theorem 3K (a)
|
||||
|
||||
The following hold for any sets. (`F` need not be a function.)
|
||||
The image of a union is the union of the images:
|
||||
|
@ -314,7 +335,7 @@ theorem theorem_3k_a {F : Set.HRelation α β} {𝓐 : Set (Set α)}
|
|||
simp only [Set.mem_sUnion, Set.mem_setOf_eq]
|
||||
exact ⟨u, ⟨A, hA.left, hu.left⟩, hu.right⟩
|
||||
|
||||
/-! #### Theorem 3K (b)
|
||||
/-! ### Theorem 3K (b)
|
||||
|
||||
The following hold for any sets. (`F` need not be a function.)
|
||||
The image of an intersection is included in the intersection of the images:
|
||||
|
@ -374,7 +395,7 @@ theorem theorem_3k_b_ii {F : Set.HRelation α β} {𝓐 : Set (Set α)}
|
|||
simp only [Set.mem_sInter, Set.mem_setOf_eq]
|
||||
exact ⟨u, hu⟩
|
||||
|
||||
/-! #### Theorem 3K (c)
|
||||
/-! ### Theorem 3K (c)
|
||||
|
||||
The following hold for any sets. (`F` need not be a function.)
|
||||
The image of a difference includes the difference of the images:
|
||||
|
@ -428,7 +449,7 @@ theorem theorem_3k_c_ii {F : Set.HRelation α β} {A B : Set α}
|
|||
exact absurd hu₁.left hu.left.right
|
||||
exact ⟨hv₁, hv₂⟩
|
||||
|
||||
/-! #### Corollary 3L
|
||||
/-! ### Corollary 3L
|
||||
|
||||
For any function `G` and sets `A`, `B`, and `𝓐`:
|
||||
|
||||
|
@ -456,7 +477,7 @@ theorem corollary_3l_iii {G : Set.HRelation β α} {A B : Set α}
|
|||
single_valued_self_iff_single_rooted_inv.mp hG
|
||||
exact (theorem_3k_c_ii hG').symm
|
||||
|
||||
/-- #### Theorem 3M
|
||||
/-- ### Theorem 3M
|
||||
|
||||
If `R` is a symmetric and transitive relation, then `R` is an equivalence
|
||||
relation on `fld R`.
|
||||
|
@ -476,7 +497,7 @@ theorem theorem_3m {R : Set.Relation α}
|
|||
have := hS ht
|
||||
exact hT this ht
|
||||
|
||||
/-- #### Theorem 3R
|
||||
/-- ### Theorem 3R
|
||||
|
||||
Let `R` be a linear ordering on `A`.
|
||||
|
||||
|
@ -500,7 +521,7 @@ theorem theorem_3r {R : Rel α α} (hR : IsStrictTotalOrder α R)
|
|||
right
|
||||
exact h₂
|
||||
|
||||
/-- #### Exercise 3.1
|
||||
/-- ### Exercise 3.1
|
||||
|
||||
Suppose that we attempted to generalize the Kuratowski definitions of ordered
|
||||
pairs to ordered triples by defining
|
||||
|
@ -521,9 +542,9 @@ theorem exercise_3_1 {x y z u v w : ℕ}
|
|||
· rw [hx, hy, hz, hu, hv, hw]
|
||||
simp
|
||||
· rw [hy, hv]
|
||||
simp only
|
||||
simp
|
||||
|
||||
/-- #### Exercise 3.2a
|
||||
/-- ### Exercise 3.2a
|
||||
|
||||
Show that `A × (B ∪ C) = (A × B) ∪ (A × C)`.
|
||||
-/
|
||||
|
@ -539,7 +560,7 @@ theorem exercise_3_2a {A : Set α} {B C : Set β}
|
|||
_ = { p | p ∈ Set.prod A B ∨ (p ∈ Set.prod A C) } := rfl
|
||||
_ = (Set.prod A B) ∪ (Set.prod A C) := rfl
|
||||
|
||||
/-- #### Exercise 3.2b
|
||||
/-- ### Exercise 3.2b
|
||||
|
||||
Show that if `A × B = A × C` and `A ≠ ∅`, then `B = C`.
|
||||
-/
|
||||
|
@ -568,7 +589,7 @@ theorem exercise_3_2b {A : Set α} {B C : Set β}
|
|||
have ⟨c, hc⟩ := Set.nonempty_iff_ne_empty.mpr (Ne.symm nC)
|
||||
exact (h (a, c)).mpr ⟨ha, hc⟩
|
||||
|
||||
/-- #### Exercise 3.3
|
||||
/-- ### Exercise 3.3
|
||||
|
||||
Show that `A × ⋃ 𝓑 = ⋃ {A × X | X ∈ 𝓑}`.
|
||||
-/
|
||||
|
@ -596,7 +617,7 @@ theorem exercise_3_3 {A : Set (Set α)} {𝓑 : Set (Set β)}
|
|||
· intro ⟨b, h₁, h₂, h₃⟩
|
||||
exact ⟨b, h₁, h₂, h₃⟩
|
||||
|
||||
/-- #### Exercise 3.5a
|
||||
/-- ### Exercise 3.5a
|
||||
|
||||
Assume that `A` and `B` are given sets, and show that there exists a set `C`
|
||||
such that for any `y`,
|
||||
|
@ -664,7 +685,7 @@ theorem exercise_3_5a {A : Set α} {B : Set β}
|
|||
rw [hab.right]
|
||||
exact ⟨hab.left, hb⟩
|
||||
|
||||
/-- #### Exercise 3.5b
|
||||
/-- ### Exercise 3.5b
|
||||
|
||||
With `A`, `B`, and `C` as above, show that `A × B = ∪ C`.
|
||||
-/
|
||||
|
@ -697,7 +718,7 @@ theorem exercise_3_5b {A : Set α} (B : Set β)
|
|||
exact ⟨h, hb⟩
|
||||
|
||||
|
||||
/-- #### Exercise 3.6
|
||||
/-- ### Exercise 3.6
|
||||
|
||||
Show that a set `A` is a relation **iff** `A ⊆ dom A × ran A`.
|
||||
-/
|
||||
|
@ -715,7 +736,7 @@ theorem exercise_3_6 {A : Set.HRelation α β}
|
|||
]
|
||||
exact ⟨⟨b, ht⟩, ⟨a, ht⟩⟩
|
||||
|
||||
/-- #### Exercise 3.7
|
||||
/-- ### Exercise 3.7
|
||||
|
||||
Show that if `R` is a relation, then `fld R = ⋃ ⋃ R`.
|
||||
-/
|
||||
|
@ -794,7 +815,7 @@ theorem exercise_3_7 {R : Set.Relation α}
|
|||
simp only [Set.mem_singleton_iff, Set.mem_insert_iff] at this
|
||||
exact hxy_mem this
|
||||
|
||||
/-- #### Exercise 3.8 (i)
|
||||
/-- ### Exercise 3.8 (i)
|
||||
|
||||
Show that for any set `𝓐`:
|
||||
```
|
||||
|
@ -820,7 +841,7 @@ theorem exercise_3_8_i {A : Set (Set.HRelation α β)}
|
|||
· intro ⟨t, ht, y, hx⟩
|
||||
exact ⟨y, t, ht, hx⟩
|
||||
|
||||
/-- #### Exercise 3.8 (ii)
|
||||
/-- ### Exercise 3.8 (ii)
|
||||
|
||||
Show that for any set `𝓐`:
|
||||
```
|
||||
|
@ -845,7 +866,7 @@ theorem exercise_3_8_ii {A : Set (Set.HRelation α β)}
|
|||
· intro ⟨y, ⟨hy, ⟨t, ht⟩⟩⟩
|
||||
exact ⟨t, ⟨y, ⟨hy, ht⟩⟩⟩
|
||||
|
||||
/-- #### Exercise 3.9 (i)
|
||||
/-- ### Exercise 3.9 (i)
|
||||
|
||||
Discuss the result of replacing the union operation by the intersection
|
||||
operation in the preceding problem.
|
||||
|
@ -871,7 +892,7 @@ theorem exercise_3_9_i {A : Set (Set.HRelation α β)}
|
|||
intro _ y hy R hR
|
||||
exact ⟨y, hy R hR⟩
|
||||
|
||||
/-- #### Exercise 3.9 (ii)
|
||||
/-- ### Exercise 3.9 (ii)
|
||||
|
||||
Discuss the result of replacing the union operation by the intersection
|
||||
operation in the preceding problem.
|
||||
|
@ -897,7 +918,7 @@ theorem exercise_3_9_ii {A : Set (Set.HRelation α β)}
|
|||
intro _ y hy R hR
|
||||
exact ⟨y, hy R hR⟩
|
||||
|
||||
/-- #### Exercise 3.12
|
||||
/-- ### Exercise 3.12
|
||||
|
||||
Assume that `f` and `g` are functions and show that
|
||||
```
|
||||
|
@ -927,7 +948,7 @@ theorem exercise_3_12 {f g : Set.HRelation α β}
|
|||
rw [single_valued_eq_unique hf hp hy₁.left.left]
|
||||
exact hy₁.left.right
|
||||
|
||||
/-- #### Exercise 3.13
|
||||
/-- ### Exercise 3.13
|
||||
|
||||
Assume that `f` and `g` are functions with `f ⊆ g` and `dom g ⊆ dom f`. Show
|
||||
that `f = g`.
|
||||
|
@ -951,7 +972,7 @@ theorem exercise_3_13 {f g : Set.HRelation α β}
|
|||
rw [single_valued_eq_unique hg hp hx.left.right]
|
||||
exact hx.left.left
|
||||
|
||||
/-- #### Exercise 3.14 (a)
|
||||
/-- ### Exercise 3.14 (a)
|
||||
|
||||
Assume that `f` and `g` are functions. Show that `f ∩ g` is a function.
|
||||
-/
|
||||
|
@ -960,7 +981,7 @@ theorem exercise_3_14_a {f g : Set.HRelation α β}
|
|||
: isSingleValued (f ∩ g) :=
|
||||
single_valued_subset hf (Set.inter_subset_left f g)
|
||||
|
||||
/-- #### Exercise 3.14 (b)
|
||||
/-- ### Exercise 3.14 (b)
|
||||
|
||||
Assume that `f` and `g` are functions. Show that `f ∪ g` is a function **iff**
|
||||
`f(x) = g(x)` for every `x` in `(dom f) ∩ (dom g)`.
|
||||
|
@ -1040,7 +1061,7 @@ theorem exercise_3_14_b {f g : Set.HRelation α β}
|
|||
· intro hz
|
||||
exact absurd (mem_pair_imp_fst_mem_dom hz) hgx
|
||||
|
||||
/-- #### Exercise 3.15
|
||||
/-- ### Exercise 3.15
|
||||
|
||||
Let `𝓐` be a set of functions such that for any `f` and `g` in `𝓐`, either
|
||||
`f ⊆ g` or `g ⊆ f`. Show that `⋃ 𝓐` is a function.
|
||||
|
@ -1063,7 +1084,7 @@ theorem exercise_3_15 {𝓐 : Set (Set.HRelation α β)}
|
|||
have := hg' hg.right
|
||||
exact single_valued_eq_unique (h𝓐 f hf.left) this hf.right
|
||||
|
||||
/-! #### Exercise 3.17
|
||||
/-! ### Exercise 3.17
|
||||
|
||||
Show that the composition of two single-rooted sets is again single-rooted.
|
||||
Conclude that the composition of two one-to-one functions is again one-to-one.
|
||||
|
@ -1098,7 +1119,7 @@ theorem exercise_3_17_ii {F : Set.HRelation β γ} {G : Set.HRelation α β}
|
|||
(single_valued_comp_is_single_valued hF.left hG.left)
|
||||
(exercise_3_17_i hF.right hG.right)
|
||||
|
||||
/-! #### Exercise 3.18
|
||||
/-! ### Exercise 3.18
|
||||
|
||||
Let `R` be the set
|
||||
```
|
||||
|
@ -1231,7 +1252,7 @@ theorem exercise_3_18_v
|
|||
|
||||
end Exercise_3_18
|
||||
|
||||
/-! #### Exercise 3.19
|
||||
/-! ### Exercise 3.19
|
||||
|
||||
Let
|
||||
```
|
||||
|
@ -1481,7 +1502,7 @@ theorem exercise_3_19_x
|
|||
|
||||
end Exercise_3_19
|
||||
|
||||
/-- #### Exercise 3.20
|
||||
/-- ### Exercise 3.20
|
||||
|
||||
Show that `F ↾ A = F ∩ (A × ran F)`.
|
||||
-/
|
||||
|
@ -1501,7 +1522,7 @@ theorem exercise_3_20 {F : Set.HRelation α β} {A : Set α}
|
|||
_ = F ∩ {p | p.fst ∈ A ∧ p.snd ∈ ran F} := rfl
|
||||
_ = F ∩ (Set.prod A (ran F)) := rfl
|
||||
|
||||
/-- #### Exercise 3.22 (a)
|
||||
/-- ### Exercise 3.22 (a)
|
||||
|
||||
Show that the following is correct for any sets.
|
||||
```
|
||||
|
@ -1516,7 +1537,7 @@ theorem exercise_3_22_a {A B : Set α} {F : Set.HRelation α β} (h : A ⊆ B)
|
|||
have := h hu.left
|
||||
exact ⟨u, this, hu.right⟩
|
||||
|
||||
/-- #### Exercise 3.22 (b)
|
||||
/-- ### Exercise 3.22 (b)
|
||||
|
||||
Show that the following is correct for any sets.
|
||||
```
|
||||
|
@ -1546,7 +1567,7 @@ theorem exercise_3_22_b {A B : Set α} {F : Set.HRelation α β}
|
|||
_ = { v | ∃ a ∈ image G A, (a, v) ∈ F } := rfl
|
||||
_ = image F (image G A) := rfl
|
||||
|
||||
/-- #### Exercise 3.22 (c)
|
||||
/-- ### Exercise 3.22 (c)
|
||||
|
||||
Show that the following is correct for any sets.
|
||||
```
|
||||
|
@ -1564,7 +1585,7 @@ theorem exercise_3_22_c {A B : Set α} {Q : Set.Relation α}
|
|||
_ = { p | p ∈ Q ∧ p.1 ∈ A} ∪ { p | p ∈ Q ∧ p.1 ∈ B } := rfl
|
||||
_ = (restriction Q A) ∪ (restriction Q B) := rfl
|
||||
|
||||
/-- #### Exercise 3.23 (i)
|
||||
/-- ### Exercise 3.23 (i)
|
||||
|
||||
Let `I` be the identity function on the set `A`. Show that for any sets `B` and
|
||||
`C`, `B ∘ I = B ↾ A`.
|
||||
|
@ -1588,7 +1609,7 @@ theorem exercise_3_23_i {A : Set α} {B : Set.HRelation α β} {I : Set.Relation
|
|||
intro (x, y) hp
|
||||
refine ⟨x, ⟨hp.right, rfl⟩, hp.left⟩
|
||||
|
||||
/-- #### Exercise 3.23 (ii)
|
||||
/-- ### Exercise 3.23 (ii)
|
||||
|
||||
Let `I` be the identity function on the set `A`. Show that for any sets `B` and
|
||||
`C`, `I⟦C⟧ = A ∩ C`.
|
||||
|
@ -1620,7 +1641,7 @@ theorem exercise_3_23_ii {A C : Set α} {I : Set.Relation α}
|
|||
_ = C ∩ A := rfl
|
||||
_ = A ∩ C := Set.inter_comm C A
|
||||
|
||||
/-- #### Exercise 3.24
|
||||
/-- ### Exercise 3.24
|
||||
|
||||
Show that for a function `F`, `F⁻¹⟦A⟧ = { x ∈ dom F | F(x) ∈ A }`.
|
||||
-/
|
||||
|
@ -1650,7 +1671,7 @@ theorem exercise_3_24 {F : Set.HRelation α β} {A : Set β}
|
|||
· intro ⟨y, hy⟩
|
||||
exact ⟨y, hy.left⟩
|
||||
|
||||
/-- #### Exercise 3.25 (b)
|
||||
/-- ### Exercise 3.25 (b)
|
||||
|
||||
Show that the result of part (a) holds for any function `G`, not necessarily
|
||||
one-to-one.
|
||||
|
@ -1673,7 +1694,7 @@ theorem exercise_3_25_b {G : Set.HRelation α β} (hG : isSingleValued G)
|
|||
have ⟨t, ht⟩ := ran_exists h.left
|
||||
exact ⟨t, ⟨t, x, ht, rfl, rfl⟩, by rwa [← h.right]⟩
|
||||
|
||||
/-- #### Exercise 3.25 (a)
|
||||
/-- ### Exercise 3.25 (a)
|
||||
|
||||
Assume that `G` is a one-to-one function. Show that `G ∘ G⁻¹` is the identity
|
||||
function on `ran G`.
|
||||
|
@ -1682,7 +1703,7 @@ theorem exercise_3_25_a {G : Set.HRelation α β} (hG : isOneToOne G)
|
|||
: comp G (inv G) = { p | p.1 ∈ ran G ∧ p.1 = p.2 } :=
|
||||
exercise_3_25_b hG.left
|
||||
|
||||
/-- #### Exercise 3.27
|
||||
/-- ### Exercise 3.27
|
||||
|
||||
Show that `dom (F ∘ G) = G⁻¹⟦dom F⟧` for any sets `F` and `G`. (`F` and `G` need
|
||||
not be functions.)
|
||||
|
@ -1716,7 +1737,7 @@ theorem exercise_3_27 {F : Set.HRelation β γ} {G : Set.HRelation α β}
|
|||
]
|
||||
exact ⟨t, u, hu.right, ht⟩
|
||||
|
||||
/-- #### Exercise 3.28
|
||||
/-- ### Exercise 3.28
|
||||
|
||||
Assume that `f` is a one-to-one function from `A` into `B`, and that `G` is the
|
||||
function with `dom G = 𝒫 A` defined by the equation `G(X) = f⟦X⟧`. Show that `G`
|
||||
|
@ -1811,7 +1832,7 @@ theorem exercise_3_28 {A : Set α} {B : Set β}
|
|||
have hz := mem_pair_imp_snd_mem_ran hb.right
|
||||
exact hf.right.ran_ss hz
|
||||
|
||||
/-- #### Exercise 3.29
|
||||
/-- ### Exercise 3.29
|
||||
|
||||
Assume that `f : A → B` and define a function `G : B → 𝒫 A` by
|
||||
```
|
||||
|
@ -1854,7 +1875,7 @@ theorem exercise_3_29 {f : Set.HRelation α β} {G : Set.HRelation β (Set α)}
|
|||
rw [heq] at this
|
||||
exact single_valued_eq_unique hf.is_func this.right ht
|
||||
|
||||
/-! #### Exercise 3.30
|
||||
/-! ### Exercise 3.30
|
||||
|
||||
Assume that `F : 𝒫 A → 𝒫 A` and that `F` has the monotonicity property:
|
||||
```
|
||||
|
@ -1871,7 +1892,7 @@ variable {F : Set α → Set α} {A B C : Set α}
|
|||
(hB : B = ⋂₀ { X | X ⊆ A ∧ F X ⊆ X })
|
||||
(hC : C = ⋃₀ { X | X ⊆ A ∧ X ⊆ F X })
|
||||
|
||||
/-- ##### Exercise 3.30 (a)
|
||||
/-- #### Exercise 3.30 (a)
|
||||
|
||||
Show that `F(B) = B` and `F(C) = C`.
|
||||
-/
|
||||
|
@ -1967,7 +1988,7 @@ theorem exercise_3_30_a : F B = B ∧ F C = C := by
|
|||
· rw [Set.Subset.antisymm_iff]
|
||||
exact ⟨hC_subset, hC_supset⟩
|
||||
|
||||
/-- ##### Exercise 3.30 (b)
|
||||
/-- #### Exercise 3.30 (b)
|
||||
|
||||
Show that if `F(X) = X`, then `B ⊆ X ⊆ C`.
|
||||
-/
|
||||
|
@ -1989,7 +2010,7 @@ theorem exercise_3_30_b : ∀ X, X ⊆ A ∧ F X = X → B ⊆ X ∧ X ⊆ C :=
|
|||
|
||||
end Exercise_3_30
|
||||
|
||||
/-- #### Exercise 3.32 (a)
|
||||
/-- ### Exercise 3.32 (a)
|
||||
|
||||
Show that `R` is symmetric **iff** `R⁻¹ ⊆ R`.
|
||||
-/
|
||||
|
@ -2007,7 +2028,7 @@ theorem exercise_3_32_a {R : Set.Relation α}
|
|||
rw [← mem_self_comm_mem_inv] at hp
|
||||
exact hR hp
|
||||
|
||||
/-- #### Exercise 3.32 (b)
|
||||
/-- ### Exercise 3.32 (b)
|
||||
|
||||
Show that `R` is transitive **iff** `R ∘ R ⊆ R`.
|
||||
-/
|
||||
|
@ -2024,7 +2045,7 @@ theorem exercise_3_32_b {R : Set.Relation α}
|
|||
have : (x, z) ∈ comp R R := ⟨y, hx, hz⟩
|
||||
exact hR this
|
||||
|
||||
/-- #### Exercise 3.33
|
||||
/-- ### Exercise 3.33
|
||||
|
||||
Show that `R` is a symmetric and transitive relation **iff** `R = R⁻¹ ∘ R`.
|
||||
-/
|
||||
|
@ -2074,7 +2095,7 @@ theorem exercise_3_33 {R : Set.Relation α}
|
|||
rw [h, hR]
|
||||
exact ⟨y, hx, this⟩
|
||||
|
||||
/-- #### Exercise 3.34 (a)
|
||||
/-- ### Exercise 3.34 (a)
|
||||
|
||||
Assume that `𝓐` is a nonempty set, every member of which is a transitive
|
||||
relation. Is the set `⋂ 𝓐` a transitive relation?
|
||||
|
@ -2089,7 +2110,7 @@ theorem exercise_3_34_a {𝓐 : Set (Set.Relation α)}
|
|||
have hy' := hy A hA
|
||||
exact h𝓐 A hA hx' hy'
|
||||
|
||||
/-- #### Exercise 3.34 (b)
|
||||
/-- ### Exercise 3.34 (b)
|
||||
|
||||
Assume that `𝓐` is a nonempty set, every member of which is a transitive
|
||||
relation. Is `⋃ 𝓐` a transitive relation?
|
||||
|
@ -2136,7 +2157,7 @@ theorem exercise_3_34_b {𝓐 : Set (Set.Relation ℕ)}
|
|||
simp at this
|
||||
exact absurd (h h₁ h₂) h₃
|
||||
|
||||
/-- #### Exercise 3.35
|
||||
/-- ### Exercise 3.35
|
||||
|
||||
Show that for any `R` and `x`, we have `[x]_R = R⟦{x}⟧`.
|
||||
-/
|
||||
|
@ -2147,7 +2168,7 @@ theorem exercise_3_35 {R : Set.Relation α} {x : α}
|
|||
_ = { t | ∃ u ∈ ({x} : Set α), (u, t) ∈ R } := by simp
|
||||
_ = image R {x} := rfl
|
||||
|
||||
/-- #### Exercise 3.36
|
||||
/-- ### Exercise 3.36
|
||||
|
||||
Assume that `f : A → B` and that `R` is an equivalence relation on `B`. Define
|
||||
`Q` to be the set `{⟨x, y⟩ ∈ A × A | ⟨f(x), f(y)⟩ ∈ R}`. Show that `Q` is an
|
||||
|
@ -2213,7 +2234,7 @@ theorem exercise_3_36 {f : Set.HRelation α β}
|
|||
simp only [exists_and_left, Set.mem_setOf_eq]
|
||||
exact ⟨fx, hfx, fz, hfz, hR.trans h₁ h₂⟩
|
||||
|
||||
/-- #### Exercise 3.37
|
||||
/-- ### Exercise 3.37
|
||||
|
||||
Assume that `P` is a partition of a set `A`. Define the relation `Rₚ` as
|
||||
follows:
|
||||
|
@ -2290,7 +2311,7 @@ theorem exercise_3_37 {P : Set (Set α)} {A : Set α}
|
|||
simp only [Set.mem_setOf_eq]
|
||||
exact ⟨B₁, hB₁.left, hB₁.right.left, by rw [hB]; exact hB₂.right.right⟩
|
||||
|
||||
/-- #### Exercise 3.38
|
||||
/-- ### Exercise 3.38
|
||||
|
||||
Theorem 3P shows that `A / R` is a partition of `A` whenever `R` is an
|
||||
equivalence relation on `A`. Show that if we start with the equivalence relation
|
||||
|
@ -2358,7 +2379,7 @@ theorem exercise_3_38 {P : Set (Set α)} {A : Set α}
|
|||
simp only [Set.mem_setOf_eq]
|
||||
_ = neighborhood Rₚ x := rfl
|
||||
|
||||
/-- #### Exercise 3.39
|
||||
/-- ### Exercise 3.39
|
||||
|
||||
Assume that we start with an equivalence relation `R` on `A` and define `P` to
|
||||
be the partition `A / R`. Show that `Rₚ`, as defined in Exercise 37, is just
|
||||
|
@ -2396,7 +2417,7 @@ theorem exercise_3_39 {P : Set (Set α)} {R Rₚ : Set.Relation α} {A : Set α}
|
|||
rw [hP]
|
||||
exact ⟨x, hxA, rfl⟩
|
||||
|
||||
/-- #### Exercise 3.41 (a)
|
||||
/-- ### Exercise 3.41 (a)
|
||||
|
||||
Let `ℝ` be the set of real numbers and define the realtion `Q` on `ℝ × ℝ` by
|
||||
`⟨u, v⟩ Q ⟨x, y⟩` **iff** `u + y = x + v`. Show that `Q` is an equivalence
|
||||
|
@ -2449,7 +2470,7 @@ theorem exercise_3_41_a {Q : Set.Relation (ℝ × ℝ)}
|
|||
conv => right; rw [add_comm]
|
||||
exact this
|
||||
|
||||
/-- #### Exercise 3.43
|
||||
/-- ### Exercise 3.43
|
||||
|
||||
Assume that `R` is a linear ordering on a set `A`. Show that `R⁻¹` is also a
|
||||
linear ordering on `A`.
|
||||
|
@ -2472,7 +2493,7 @@ theorem exercise_3_43 {R : Rel α α} (hR : IsStrictTotalOrder α R)
|
|||
unfold Rel.inv flip at *
|
||||
exact hR.trans c b a hac hab
|
||||
|
||||
/-! #### Exercise 3.44
|
||||
/-! ### Exercise 3.44
|
||||
|
||||
Assume that `<` is a linear ordering on a set `A`. Assume that `f : A → A` and
|
||||
that `f` has the property that whenever `x < y`, then `f(x) < f(y)`. Show that
|
||||
|
@ -2516,7 +2537,7 @@ theorem exercise_3_44_ii {R : Rel α α} (hR : IsStrictTotalOrder α R)
|
|||
have := hR.trans (f x) (f y) (f x) h (hf y x h₂)
|
||||
exact absurd this (hR.irrefl (f x))
|
||||
|
||||
/-- #### Exercise 3.45
|
||||
/-- ### Exercise 3.45
|
||||
|
||||
Assume that `<_A` and `<_B` are linear orderings on `A` and `B`, respectively.
|
||||
Define the binary relation `<_L` on the Cartesian product `A × B` by:
|
||||
|
|
|
@ -11,7 +11,7 @@ Natural Numbers
|
|||
|
||||
namespace Enderton.Set.Chapter_4
|
||||
|
||||
/-- #### Theorem 4C
|
||||
/-- ### Theorem 4C
|
||||
|
||||
Every natural number except `0` is the successor of some natural number.
|
||||
-/
|
||||
|
@ -23,7 +23,7 @@ theorem theorem_4c (n : ℕ)
|
|||
|
||||
#check Nat.exists_eq_succ_of_ne_zero
|
||||
|
||||
/-- #### Theorem 4I
|
||||
/-- ### Theorem 4I
|
||||
|
||||
For natural numbers `m` and `n`,
|
||||
```
|
||||
|
@ -34,7 +34,7 @@ m + n⁺ = (m + n)⁺
|
|||
theorem theorem_4i (m n : ℕ)
|
||||
: m + 0 = m ∧ m + n.succ = (m + n).succ := ⟨rfl, rfl⟩
|
||||
|
||||
/-- #### Theorem 4J
|
||||
/-- ### Theorem 4J
|
||||
|
||||
For natural numbers `m` and `n`,
|
||||
```
|
||||
|
@ -45,7 +45,7 @@ m ⬝ n⁺ = m ⬝ n + m .
|
|||
theorem theorem_4j (m n : ℕ)
|
||||
: m * 0 = 0 ∧ m * n.succ = m * n + m := ⟨rfl, rfl⟩
|
||||
|
||||
/-- #### Left Additive Identity
|
||||
/-- ### Left Additive Identity
|
||||
|
||||
For all `n ∈ ω`, `A₀(n) = n`. In other words, `0 + n = n`.
|
||||
-/
|
||||
|
@ -60,7 +60,7 @@ lemma left_additive_identity (n : ℕ)
|
|||
|
||||
#check Nat.zero_add
|
||||
|
||||
/-- #### Lemma 2
|
||||
/-- ### Lemma 2
|
||||
|
||||
For all `m, n ∈ ω`, `Aₘ₊(n) = Aₘ(n⁺)`. In other words, `m⁺ + n = m + n⁺`.
|
||||
-/
|
||||
|
@ -76,7 +76,7 @@ lemma lemma_2 (m n : ℕ)
|
|||
|
||||
#check Nat.succ_add_eq_succ_add
|
||||
|
||||
/-- #### Theorem 4K-1
|
||||
/-- ### Theorem 4K-1
|
||||
|
||||
Associatve law for addition. For `m, n, p ∈ ω`,
|
||||
```
|
||||
|
@ -99,7 +99,7 @@ theorem theorem_4k_1 {m n p : ℕ}
|
|||
|
||||
#check Nat.add_assoc
|
||||
|
||||
/-- #### Theorem 4K-2
|
||||
/-- ### Theorem 4K-2
|
||||
|
||||
Commutative law for addition. For `m, n ∈ ω`,
|
||||
```
|
||||
|
@ -119,7 +119,7 @@ theorem theorem_4k_2 {m n : ℕ}
|
|||
|
||||
#check Nat.add_comm
|
||||
|
||||
/-- #### Zero Multiplicand
|
||||
/-- ### Zero Multiplicand
|
||||
|
||||
For all `n ∈ ω`, `M₀(n) = 0`. In other words, `0 ⬝ n = 0`.
|
||||
-/
|
||||
|
@ -135,7 +135,7 @@ theorem zero_multiplicand (n : ℕ)
|
|||
|
||||
#check Nat.zero_mul
|
||||
|
||||
/-- #### Successor Distribution
|
||||
/-- ### Successor Distribution
|
||||
|
||||
For all `m, n ∈ ω`, `Mₘ₊(n) = Mₘ(n) + n`. In other words,
|
||||
```
|
||||
|
@ -159,7 +159,7 @@ theorem succ_distrib (m n : ℕ)
|
|||
|
||||
#check Nat.succ_mul
|
||||
|
||||
/-- #### Theorem 4K-3
|
||||
/-- ### Theorem 4K-3
|
||||
|
||||
Distributive law. For `m, n, p ∈ ω`,
|
||||
```
|
||||
|
@ -181,7 +181,7 @@ theorem theorem_4k_3 (m n p : ℕ)
|
|||
_ = (m * n + n) + (m * p + p) := by rw [theorem_4k_1, theorem_4k_1]
|
||||
_ = m.succ * n + m.succ * p := by rw [succ_distrib, succ_distrib]
|
||||
|
||||
/-- #### Successor Identity
|
||||
/-- ### Successor Identity
|
||||
|
||||
For all `m ∈ ω`, `Aₘ(1) = m⁺`. In other words, `m + 1 = m⁺`.
|
||||
-/
|
||||
|
@ -197,7 +197,7 @@ theorem succ_identity (m : ℕ)
|
|||
|
||||
#check Nat.succ_eq_one_add
|
||||
|
||||
/-- #### Right Multiplicative Identity
|
||||
/-- ### Right Multiplicative Identity
|
||||
|
||||
For all `m ∈ ω`, `Mₘ(1) = m`. In other words, `m ⬝ 1 = m`.
|
||||
-/
|
||||
|
@ -213,7 +213,7 @@ theorem right_mul_id (m : ℕ)
|
|||
|
||||
#check Nat.mul_one
|
||||
|
||||
/-- #### Theorem 4K-5
|
||||
/-- ### Theorem 4K-5
|
||||
|
||||
Commutative law for multiplication. For `m, n ∈ ω`, `m ⬝ n = n ⬝ m`.
|
||||
-/
|
||||
|
@ -232,7 +232,7 @@ theorem theorem_4k_5 (m n : ℕ)
|
|||
|
||||
#check Nat.mul_comm
|
||||
|
||||
/-- #### Theorem 4K-4
|
||||
/-- ### Theorem 4K-4
|
||||
|
||||
Associative law for multiplication. For `m, n, p ∈ ω`,
|
||||
```
|
||||
|
@ -254,7 +254,7 @@ theorem theorem_4k_4 (m n p : ℕ)
|
|||
|
||||
#check Nat.mul_assoc
|
||||
|
||||
/-- #### Lemma 4L(b)
|
||||
/-- ### Lemma 4L(b)
|
||||
|
||||
No natural number is a member of itself.
|
||||
-/
|
||||
|
@ -269,7 +269,7 @@ lemma lemma_4l_b (n : ℕ)
|
|||
|
||||
#check Nat.lt_irrefl
|
||||
|
||||
/-- #### Lemma 10
|
||||
/-- ### Lemma 10
|
||||
|
||||
For every natural number `n ≠ 0`, `0 ∈ n`.
|
||||
-/
|
||||
|
@ -285,7 +285,7 @@ theorem zero_least_nat (n : ℕ)
|
|||
|
||||
#check Nat.pos_of_ne_zero
|
||||
|
||||
/-! #### Theorem 4N
|
||||
/-! ### Theorem 4N
|
||||
|
||||
For any natural numbers `n`, `m`, and `p`,
|
||||
```
|
||||
|
@ -299,9 +299,25 @@ m ∈ n ↔ m ⬝ p ∈ n ⬝ p.
|
|||
|
||||
theorem theorem_4n_i (m n p : ℕ)
|
||||
: m < n ↔ m + p < n + p := by
|
||||
/-
|
||||
> Let `m` and `n` be natural numbers.
|
||||
>
|
||||
> ##### (⇒)
|
||||
> Suppose `m ∈ n`. Let
|
||||
>
|
||||
> `S = {p ∈ ω | m + p ∈ n + p}`
|
||||
-/
|
||||
have hf : ∀ m n : ℕ, m < n → m + p < n + p := by
|
||||
induction p with
|
||||
/-
|
||||
> It trivially follows that `0 ∈ S`.
|
||||
-/
|
||||
| zero => simp
|
||||
/-
|
||||
> Next, suppose `p ∈ S`. That is, suppose `m + p ∈ n + p`. By *Lemma 4L(a)*,
|
||||
> this holds if and only if `(m + p)⁺ ∈ (n + p)⁺`. *Theorem 4I* then implies
|
||||
> that `m + p⁺ ∈ n + p⁺` meaning `p⁺ ∈ S`.
|
||||
-/
|
||||
| succ p ih =>
|
||||
intro m n hp
|
||||
have := ih m n hp
|
||||
|
@ -310,29 +326,76 @@ theorem theorem_4n_i (m n p : ℕ)
|
|||
have h₂ : (n + p).succ = n + p.succ := rfl
|
||||
rwa [← h₁, ← h₂]
|
||||
apply Iff.intro
|
||||
/-
|
||||
> Thus `S` is an inductive set. Hence *Theorem 4B* implies `S = ω`. Therefore,
|
||||
> for all `p ∈ ω`, `m ∈n` implies `m + p ∈ n + p`.
|
||||
-/
|
||||
· exact hf m n
|
||||
/-
|
||||
> ##### (⇐)
|
||||
> Let `p` be a natural number and suppose `m + p ∈ n + p`. By the
|
||||
> *Trichotomy Law for `ω`*, there are two cases to consider regarding how `m`
|
||||
> and `n` relate to one another:
|
||||
-/
|
||||
· intro h
|
||||
match @trichotomous ℕ LT.lt _ m n with
|
||||
| Or.inl h₁ =>
|
||||
exact h₁
|
||||
| Or.inr (Or.inl h₁) =>
|
||||
/-
|
||||
> ###### Case 1
|
||||
> Suppose `m = n`. Then `m + p ∈ n + p = m + p`. *Lemma 4L(b)* shows this is
|
||||
> impossible.
|
||||
-/
|
||||
rw [← h₁] at h
|
||||
exact absurd h (lemma_4l_b (m + p))
|
||||
| Or.inr (Or.inr h₁) =>
|
||||
/-
|
||||
> ###### Case 2
|
||||
> Suppose `n ∈ m`. Then *(⇒)* indicates `n + p ∈ m + p`. But this contradicts
|
||||
> the *Trichotomy Law for `ω`* since, by hypothesis, `m + p ∈ n + p`.
|
||||
-/
|
||||
have := hf n m h₁
|
||||
exact absurd this (Nat.lt_asymm h)
|
||||
| Or.inl h₁ =>
|
||||
/-
|
||||
> ###### Conclusion
|
||||
> By trichotomy, it follows `m ∈ n`.
|
||||
-/
|
||||
exact h₁
|
||||
|
||||
#check Nat.add_lt_add_iff_right
|
||||
|
||||
theorem theorem_4n_ii (m n p : ℕ)
|
||||
: m < n ↔ m * p.succ < n * p.succ := by
|
||||
/-
|
||||
> Let `m` and `n` be natural numbers.
|
||||
>
|
||||
> ##### (⇒)
|
||||
> Suppose `m ∈ n`. Let
|
||||
>
|
||||
> `S = {p ∈ ω | m ⬝ p⁺ ∈ n ⬝ p⁺}`.
|
||||
-/
|
||||
have hf : ∀ m n : ℕ, m < n → m * p.succ < n * p.succ := by
|
||||
intro m n hp₁
|
||||
induction p with
|
||||
| zero =>
|
||||
/-
|
||||
> `0 ∈ S` by *Right Multiplicative Identity*.
|
||||
-/
|
||||
simp only [Nat.mul_one]
|
||||
exact hp₁
|
||||
| succ p ih =>
|
||||
/-
|
||||
> Next, suppose `p ∈ S`. That is, `m ⬝ p⁺ ∈ n ⬝ p⁺`. Then
|
||||
>
|
||||
> `m ⬝ p⁺⁺ = m ⬝ p⁺ + m` *Theorem 4J*
|
||||
> ` ∈ n ⬝ p⁺ + m` *(i)*
|
||||
> ` = m + n ⬝ p⁺` *Theorem 4K-2*
|
||||
> ` ∈ n + n ⬝ p⁺` *(i)*
|
||||
> ` = n ⬝p⁺ + n` *Theorem 4K-2*
|
||||
> ` n ⬝ p⁺⁺` *Theorem 4J*
|
||||
>
|
||||
> Therefore `p⁺ ∈ S`.
|
||||
-/
|
||||
have hp₂ : m * p.succ < n * p.succ := by
|
||||
by_cases hp₃ : p = 0
|
||||
· rw [hp₃] at *
|
||||
|
@ -347,21 +410,47 @@ theorem theorem_4n_ii (m n p : ℕ)
|
|||
_ = n * p.succ + n := by rw [theorem_4k_2]
|
||||
_ = n * p.succ.succ := rfl
|
||||
apply Iff.intro
|
||||
/-
|
||||
> Thus `S` is an inductive set. Hence *Theorem 4B* implies `S = ω`. By
|
||||
> *Theorem 4C*, every natural number except `0` is the successor of some natural
|
||||
> number. Therefore, for all `p ∈ ω` such that `p ≠ 0`, `m ∈ n` implies
|
||||
`m ⬝ p ∈ n ⬝ p`.
|
||||
-/
|
||||
· exact hf m n
|
||||
· intro hp
|
||||
/-
|
||||
> ##### (⇐)
|
||||
> Let `p ≠ 0` be a natural number and suppose `m ⬝ p ∈ n ⬝ p`. By the
|
||||
> *Trichotomy Law for `ω`*, there are two cases to consider regarding how `m`
|
||||
> and `n` relate to one another.
|
||||
-/
|
||||
intro hp
|
||||
match @trichotomous ℕ LT.lt _ m n with
|
||||
| Or.inl h₁ =>
|
||||
exact h₁
|
||||
| Or.inr (Or.inl h₁) =>
|
||||
/-
|
||||
> ###### Case 1
|
||||
> Suppose `m = n`. Then `m ⬝ p ∈ n ⬝ p = m ⬝ p`. *Lemma 4L(b)* shows this is
|
||||
> impossible.
|
||||
-/
|
||||
rw [← h₁] at hp
|
||||
exact absurd hp (lemma_4l_b (m * p.succ))
|
||||
| Or.inr (Or.inr h₁) =>
|
||||
/-
|
||||
> ###### Case 2
|
||||
> Suppose `n ∈ m`. Then *(⇒)* indicates `n ⬝ p ∈ m ⬝ p`. But this contradicts
|
||||
> *Trichotomy Law for `ω`* since, by hypothesis, `m ⬝ p ∈ n ⬝ p`.
|
||||
-/
|
||||
have := hf n m h₁
|
||||
exact absurd this (Nat.lt_asymm hp)
|
||||
| Or.inl h₁ =>
|
||||
/-
|
||||
> ###### Conclusion
|
||||
> By trichotomy, it follows `m ∈ n`.
|
||||
-/
|
||||
exact h₁
|
||||
|
||||
#check Nat.mul_lt_mul_of_pos_right
|
||||
|
||||
/-! #### Corollary 4P
|
||||
/-! ### Corollary 4P
|
||||
|
||||
The following cancellation laws hold for `m`, `n`, and `p` in `ω`:
|
||||
```
|
||||
|
@ -372,44 +461,87 @@ m ⬝ p = n ⬝ p ∧ p ≠ 0 ⇒ m = n.
|
|||
|
||||
theorem corollary_4p_i (m n p : ℕ) (h : m + p = n + p)
|
||||
: m = n := by
|
||||
/-
|
||||
> Suppose `m + p = n + p`. By the *Trichotomy Law for `ω`*, there are two cases
|
||||
> to consider regarding how `m` and `n` relate to one another.
|
||||
-/
|
||||
match @trichotomous ℕ LT.lt _ m n with
|
||||
| Or.inl h₁ =>
|
||||
/-
|
||||
> If `m ∈n`, then *Theorem 4N* implies `m + p ∈ n + p`.
|
||||
-/
|
||||
rw [theorem_4n_i m n p, h] at h₁
|
||||
exact absurd h₁ (lemma_4l_b (n + p))
|
||||
| Or.inr (Or.inl h₁) =>
|
||||
exact h₁
|
||||
| Or.inr (Or.inr h₁) =>
|
||||
/-
|
||||
> If `n ∈ m`, then *Theorem 4N* implies `n + p ∈ m + p`.
|
||||
-/
|
||||
rw [theorem_4n_i n m p, h] at h₁
|
||||
exact absurd h₁ (lemma_4l_b (n + p))
|
||||
/-
|
||||
> Both of these contradict the *Trichotomy Law for `ω`* of `m + p` and `n + p`.
|
||||
> Thus `m = n` is the only remaining possibility.
|
||||
-/
|
||||
| Or.inr (Or.inl h₁) =>
|
||||
exact h₁
|
||||
|
||||
|
||||
#check Nat.add_right_cancel
|
||||
|
||||
/-- #### Well Ordering of ω
|
||||
/-- ### Well Ordering of ω
|
||||
|
||||
Let `A` be a nonempty subset of `ω`. Then there is some `m ∈ A` such that
|
||||
`m ≤ n` for all `n ∈ A`.
|
||||
-/
|
||||
theorem well_ordering_nat {A : Set ℕ} (hA : Set.Nonempty A)
|
||||
: ∃ m ∈ A, ∀ n, n ∈ A → m ≤ n := by
|
||||
-- Assume `A` does not have a least element.
|
||||
/-
|
||||
> Let `A` be a nonempty subset of `ω`. For the sake of contradiciton, suppose
|
||||
> `A` does not have a least element.
|
||||
-/
|
||||
by_contra nh
|
||||
simp only [not_exists, not_and, not_forall, not_le, exists_prop] at nh
|
||||
|
||||
-- If we show the complement of `A` is `ω`, then `A = ∅`, a contradiction.
|
||||
/-
|
||||
> It then suffices to prove that the complement of `A` equals `ω`. If we do so,
|
||||
> then `A = ∅`, a contradiction.
|
||||
-/
|
||||
suffices A.compl = Set.univ by
|
||||
have h := Set.univ_diff_compl_eq_self A
|
||||
rw [this] at h
|
||||
simp only [sdiff_self, Set.bot_eq_empty] at h
|
||||
exact absurd h.symm (Set.Nonempty.ne_empty hA)
|
||||
|
||||
-- Use strong induction to prove every element of `ω` is in the complement.
|
||||
/-
|
||||
> Define
|
||||
>
|
||||
> `S = {n ∈ ω | (∀ m ∈ n)m ∉ A}`.
|
||||
>
|
||||
> We prove `S` is an inductive set by showing that (i) `0 ∈ S` and (ii) if
|
||||
> `n ∈ S`, then `n⁺ ∈ S`. Afterward we show that `ω - A = ω`, completing the
|
||||
> proof.
|
||||
-/
|
||||
have : ∀ n : ℕ, (∀ m, m < n → m ∈ A.compl) := by
|
||||
intro n
|
||||
induction n with
|
||||
| zero =>
|
||||
/-
|
||||
> #### (i)
|
||||
> It vacuously holds that `0 ∈ S`.
|
||||
-/
|
||||
intro m hm
|
||||
exact False.elim (Nat.not_lt_zero m hm)
|
||||
| succ n ih =>
|
||||
/-
|
||||
> #### (ii)
|
||||
> Suppose `n ∈ S`. We want to prove that
|
||||
>
|
||||
> `∀ m, m ∈ n⁺ ⇒ m ∉ A`.
|
||||
>
|
||||
> To this end, let `m ∈ ω` such that `m ∈ n⁺`. By definition of the successor,
|
||||
> `m ∈ n` or `m = n`. If the former, `n ∈ S` implies `m ∉ A`. If the latter, it
|
||||
> isn't possible for `n ∈ A` since the *Trichotomy Law for `ω`* would otherwise
|
||||
> imply `n` is the least element of `A`, which is assumed to not exist. Hence
|
||||
> `n⁺ ∈ S`.
|
||||
-/
|
||||
intro m hm
|
||||
have hm' : m < n ∨ m = n := by
|
||||
rw [Nat.lt_succ] at hm
|
||||
|
@ -429,7 +561,12 @@ theorem well_ordering_nat {A : Set ℕ} (hA : Set.Nonempty A)
|
|||
exact absurd hp.left (ih p hp.right)
|
||||
· rw [h]
|
||||
exact hn
|
||||
|
||||
/-
|
||||
> #### Conclusion
|
||||
> By *(i)* and *(ii)*, `S` is an inductive set. Since `S ⊆ ω`, *Theorem 4B*
|
||||
> implies `S = ω`. Bu this immediately implies `ω = ω - A` meaning `A` is the
|
||||
> empty set.
|
||||
-/
|
||||
ext x
|
||||
simp only [Set.mem_univ, iff_true]
|
||||
by_contra nh'
|
||||
|
@ -438,7 +575,7 @@ theorem well_ordering_nat {A : Set ℕ} (hA : Set.Nonempty A)
|
|||
|
||||
#check WellOrder
|
||||
|
||||
/-- #### Strong Induction Principle for ω
|
||||
/-- ### Strong Induction Principle for ω
|
||||
|
||||
Let `A` be a subset of `ω`, and assume that for every `n ∈ ω`, if every number
|
||||
less than `n` is in `A`, then `n ∈ A`. Then `A = ω`.
|
||||
|
@ -451,25 +588,31 @@ theorem strong_induction_principle_nat (A : Set ℕ)
|
|||
rw [this] at h'
|
||||
simp only [Set.diff_empty] at h'
|
||||
exact h'.symm
|
||||
|
||||
/-
|
||||
> For the sake of contradiction, suppose `ω - A` is a nonempty set. By
|
||||
> *Well Ordering of `ω`*, there exists a least element `m ∈ ω - A`.
|
||||
-/
|
||||
by_contra nh
|
||||
have ⟨m, hm⟩ := well_ordering_nat (Set.nmem_singleton_empty.mp nh)
|
||||
refine absurd (h m ?_) hm.left
|
||||
|
||||
/-
|
||||
> Then every number less than `m` is in `A`. But then *(4.23)* implies `m ∈ A`,
|
||||
> a contradiction. Thus `ω - A` is an empty set meaning `A = ω`.
|
||||
-/
|
||||
-- Show that every number less than `m` is in `A`.
|
||||
intro x hx
|
||||
by_contra nx
|
||||
have : x < x := Nat.lt_of_lt_of_le hx (hm.right x nx)
|
||||
simp at this
|
||||
|
||||
/-- #### Exercise 4.1
|
||||
/-- ### Exercise 4.1
|
||||
|
||||
Show that `1 ≠ 3` i.e., that `∅⁺ ≠ ∅⁺⁺⁺`.
|
||||
-/
|
||||
theorem exercise_4_1 : 1 ≠ 3 := by
|
||||
simp
|
||||
|
||||
/-- #### Exercise 4.13
|
||||
/-- ### Exercise 4.13
|
||||
|
||||
Let `m` and `n` be natural numbers such that `m ⬝ n = 0`. Show that either
|
||||
`m = 0` or `n = 0`.
|
||||
|
@ -498,7 +641,7 @@ Call a natural number *odd* if it has the form `(2 ⬝ p) + 1` for some `p`.
|
|||
-/
|
||||
def odd (n : ℕ) : Prop := ∃ p, (2 * p) + 1 = n
|
||||
|
||||
/-- #### Exercise 4.14
|
||||
/-- ### Exercise 4.14
|
||||
|
||||
Show that each natural number is either even or odd, but never both.
|
||||
-/
|
||||
|
@ -549,7 +692,7 @@ theorem exercise_4_14 (n : ℕ)
|
|||
have : even n := ⟨q, hq'⟩
|
||||
exact absurd this h
|
||||
|
||||
/-- #### Exercise 4.17
|
||||
/-- ### Exercise 4.17
|
||||
|
||||
Prove that `mⁿ⁺ᵖ = mⁿ ⬝ mᵖ.`
|
||||
-/
|
||||
|
@ -559,7 +702,7 @@ theorem exercise_4_17 (m n p : ℕ)
|
|||
| zero => calc m ^ (n + 0)
|
||||
_ = m ^ n := rfl
|
||||
_ = m ^ n * 1 := by rw [right_mul_id]
|
||||
_ = m ^ n * m ^ 0 := rfl
|
||||
_ = m ^ n * m ^ 0 := by rfl
|
||||
| succ p ih => calc m ^ (n + p.succ)
|
||||
_ = m ^ (n + p).succ := rfl
|
||||
_ = m ^ (n + p) * m := rfl
|
||||
|
@ -567,7 +710,7 @@ theorem exercise_4_17 (m n p : ℕ)
|
|||
_ = m ^ n * (m ^ p * m) := by rw [theorem_4k_4]
|
||||
_ = m ^ n * m ^ p.succ := rfl
|
||||
|
||||
/-- #### Exercise 4.19
|
||||
/-- ### Exercise 4.19
|
||||
|
||||
Prove that if `m` is a natural number and `d` is a nonzero number, then there
|
||||
exist numbers `q` and `r` such that `m = (d ⬝ q) + r` and `r` is less than `d`.
|
||||
|
@ -600,7 +743,7 @@ theorem exercise_4_19 (m d : ℕ) (hd : d ≠ 0)
|
|||
_ < d := hr
|
||||
simp at this
|
||||
|
||||
/-- #### Exercise 4.22
|
||||
/-- ### Exercise 4.22
|
||||
|
||||
Show that for any natural numbers `m` and `p` we have `m ∈ m + p⁺`.
|
||||
-/
|
||||
|
@ -612,7 +755,7 @@ theorem exercise_4_22 (m p : ℕ)
|
|||
_ < m + p.succ := ih
|
||||
_ < m + p.succ.succ := Nat.lt.base (m + p.succ)
|
||||
|
||||
/-- #### Exercise 4.23
|
||||
/-- ### Exercise 4.23
|
||||
|
||||
Assume that `m` and `n` are natural numbers with `m` less than `n`. Show that
|
||||
there is some `p` in `ω` for which `m + p⁺ = n`. (It follows from this and the
|
||||
|
@ -637,7 +780,7 @@ theorem exercise_4_23 {m n : ℕ} (hm : m < n)
|
|||
refine ⟨0, ?_⟩
|
||||
rw [hm₁]
|
||||
|
||||
/-- #### Exercise 4.24
|
||||
/-- ### Exercise 4.24
|
||||
|
||||
Assume that `m + n = p + q`. Show that
|
||||
```
|
||||
|
@ -660,7 +803,7 @@ theorem exercise_4_24 (m n p q : ℕ) (h : m + n = p + q)
|
|||
rw [← h] at hr
|
||||
exact (theorem_4n_i m p n).mpr hr
|
||||
|
||||
/-- #### Exercise 4.25
|
||||
/-- ### Exercise 4.25
|
||||
|
||||
Assume that `n ∈ m` and `q ∈ p`. Show that
|
||||
```
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -28,8 +28,7 @@ theorem ext_iff {x y u v : α}
|
|||
] at hu huv
|
||||
apply Or.elim hu <;> apply Or.elim huv
|
||||
|
||||
· -- #### Case 1
|
||||
-- `{u} = {x}` and `{u, v} = {x}`.
|
||||
· -- `{u} = {x}` and `{u, v} = {x}`.
|
||||
intro huv_x hu_x
|
||||
rw [Set.singleton_eq_singleton_iff] at hu_x
|
||||
rw [hu_x] at huv_x
|
||||
|
@ -41,8 +40,7 @@ theorem ext_iff {x y u v : α}
|
|||
rw [← hx_v] at this
|
||||
exact ⟨hu_x.symm, this⟩
|
||||
|
||||
· -- #### Case 2
|
||||
-- `{u} = {x}` and `{u, v} = {x, y}`.
|
||||
· -- `{u} = {x}` and `{u, v} = {x, y}`.
|
||||
intro huv_xy hu_x
|
||||
rw [Set.singleton_eq_singleton_iff] at hu_x
|
||||
rw [hu_x] at huv_xy
|
||||
|
@ -58,8 +56,7 @@ theorem ext_iff {x y u v : α}
|
|||
] at this
|
||||
exact ⟨hu_x.symm, Or.elim this (absurd ·.symm hx_v) (·.symm)⟩
|
||||
|
||||
· -- #### Case 3
|
||||
-- `{u} = {x, y}` and `{u, v} = {x}`.
|
||||
· -- `{u} = {x, y}` and `{u, v} = {x}`.
|
||||
intro huv_x hu_xy
|
||||
rw [Set.ext_iff] at huv_x hu_xy
|
||||
have hu_x := huv_x u
|
||||
|
@ -93,8 +90,7 @@ theorem ext_iff {x y u v : α}
|
|||
· intro hv_y
|
||||
exact ⟨hu_x.symm, hv_y.symm⟩
|
||||
|
||||
· -- #### Case 4
|
||||
-- `{u} = {x, y}` and `{u, v} = {x, y}`.
|
||||
· -- `{u} = {x, y}` and `{u, v} = {x, y}`.
|
||||
intro huv_xy hu_xy
|
||||
rw [Set.ext_iff] at huv_xy hu_xy
|
||||
have hx_u := hu_xy x
|
||||
|
|
|
@ -1 +1,10 @@
|
|||
import Bookshelf.Fraleigh.Chapter_1
|
||||
|
||||
/-! # A First Course in Abstract Algebra
|
||||
|
||||
## Fraleigh, John B.
|
||||
|
||||
### Lean
|
||||
|
||||
* [Chapter 1: Introduction and Examples](Bookshelf/Fraleigh/Chapter_1.html)
|
||||
-/
|
|
@ -9,7 +9,7 @@ considering they still have the same limitation described above during actual
|
|||
use. Their inclusion here serves more as pseudo-documentation than anything.
|
||||
-/
|
||||
|
||||
/-- #### Bald Eagle
|
||||
/-- ### Bald Eagle
|
||||
|
||||
`E'xy₁y₂y₃z₁z₂z₃ = x(y₁y₂y₃)(z₁z₂z₃)`
|
||||
-/
|
||||
|
@ -17,31 +17,31 @@ def E' (x : α → β → γ)
|
|||
(y₁ : δ → ε → α) (y₂ : δ) (y₃ : ε)
|
||||
(z₁ : ζ → η → β) (z₂ : ζ) (z₃ : η) := x (y₁ y₂ y₃) (z₁ z₂ z₃)
|
||||
|
||||
/-- #### Becard
|
||||
/-- ### Becard
|
||||
|
||||
`B₃xyzw = x(y(zw))`
|
||||
-/
|
||||
def B₃ (x : α → ε) (y : β → α) (z : γ → β) (w : γ) := x (y (z w))
|
||||
|
||||
/-- #### Blackbird
|
||||
/-- ### Blackbird
|
||||
|
||||
`B₁xyzw = x(yzw)`
|
||||
-/
|
||||
def B₁ (x : α → ε) (y : β → γ → α) (z : β) (w : γ) := x (y z w)
|
||||
|
||||
/-- #### Bluebird
|
||||
/-- ### Bluebird
|
||||
|
||||
`Bxyz = x(yz)`
|
||||
-/
|
||||
def B (x : α → γ) (y : β → α) (z : β) := x (y z)
|
||||
|
||||
/-- #### Bunting
|
||||
/-- ### Bunting
|
||||
|
||||
`B₂xyzwv = x(yzwv)`
|
||||
-/
|
||||
def B₂ (x : α → ζ) (y : β → γ → ε → α) (z : β) (w : γ) (v : ε) := x (y z w v)
|
||||
|
||||
/-- #### Cardinal Once Removed
|
||||
/-- ### Cardinal Once Removed
|
||||
|
||||
`C*xyzw = xywz`
|
||||
-/
|
||||
|
@ -49,48 +49,48 @@ def C_star (x : α → β → γ → δ) (y : α) (z : γ) (w : β) := x y w z
|
|||
|
||||
notation "C*" => C_star
|
||||
|
||||
/-- #### Cardinal
|
||||
/-- ### Cardinal
|
||||
|
||||
`Cxyz = xzy`
|
||||
-/
|
||||
def C (x : α → β → δ) (y : β) (z : α) := x z y
|
||||
|
||||
/-- #### Converse Warbler
|
||||
/-- ### Converse Warbler
|
||||
|
||||
`W'xy = yxx`
|
||||
-/
|
||||
def W' (x : α) (y : α → α → β) := y x x
|
||||
|
||||
/-- #### Dickcissel
|
||||
/-- ### Dickcissel
|
||||
|
||||
`D₁xyzwv = xyz(wv)`
|
||||
-/
|
||||
def D₁ (x : α → β → δ → ε) (y : α) (z : β) (w : γ → δ) (v : γ) := x y z (w v)
|
||||
|
||||
/-! #### Double Mockingbird
|
||||
/-! ### Double Mockingbird
|
||||
|
||||
`M₂xy = xy(xy)`
|
||||
-/
|
||||
|
||||
/-- #### Dove
|
||||
/-- ### Dove
|
||||
|
||||
`Dxyzw = xy(zw)`
|
||||
-/
|
||||
def D (x : α → γ → δ) (y : α) (z : β → γ) (w : β) := x y (z w)
|
||||
|
||||
/-- #### Dovekie
|
||||
/-- ### Dovekie
|
||||
|
||||
`D₂xyzwv = x(yz)(wv)`
|
||||
-/
|
||||
def D₂ (x : α → δ → ε) (y : β → α) (z : β) (w : γ → δ) (v : γ) := x (y z) (w v)
|
||||
|
||||
/-- #### Eagle
|
||||
/-- ### Eagle
|
||||
|
||||
`Exyzwv = xy(zwv)`
|
||||
-/
|
||||
def E (x : α → δ → ε) (y : α) (z : β → γ → δ) (w : β) (v : γ) := x y (z w v)
|
||||
|
||||
/-- #### Finch Once Removed
|
||||
/-- ### Finch Once Removed
|
||||
|
||||
`F*xyzw = xwzy`
|
||||
-/
|
||||
|
@ -98,95 +98,95 @@ def F_star (x : α → β → γ → δ) (y : γ) (z : β) (w : α) := x w z y
|
|||
|
||||
notation "F*" => F_star
|
||||
|
||||
/-- #### Finch
|
||||
/-- ### Finch
|
||||
|
||||
`Fxyz = zyx`
|
||||
-/
|
||||
def F (x : α) (y : β) (z : β → α → γ) := z y x
|
||||
|
||||
/-- #### Goldfinch
|
||||
/-- ### Goldfinch
|
||||
|
||||
`Gxyzw = xw(yz)`
|
||||
-/
|
||||
def G (x : α → γ → δ) (y : β → γ) (z : β) (w : α) := x w (y z)
|
||||
|
||||
/-- #### Hummingbird
|
||||
/-- ### Hummingbird
|
||||
|
||||
`Hxyz = xyzy`
|
||||
-/
|
||||
def H (x : α → β → α → γ) (y : α) (z : β) := x y z y
|
||||
|
||||
/-- #### Identity Bird
|
||||
/-- ### Identity Bird
|
||||
|
||||
`Ix = x`
|
||||
-/
|
||||
def I (x : α) : α := x
|
||||
|
||||
/-- #### Kestrel
|
||||
/-- ### Kestrel
|
||||
|
||||
`Kxy = x`
|
||||
-/
|
||||
def K (x : α) (_ : β) := x
|
||||
|
||||
/-! #### Lark
|
||||
/-! ### Lark
|
||||
|
||||
`Lxy = x(yy)`
|
||||
-/
|
||||
|
||||
/-! #### Mockingbird
|
||||
/-! ### Mockingbird
|
||||
|
||||
`Mx = xx`
|
||||
-/
|
||||
|
||||
/-- #### Owl
|
||||
/-- ### Owl
|
||||
|
||||
`Oxy = y(xy)`
|
||||
-/
|
||||
def O (x : (α → β) → α) (y : α → β) := y (x y)
|
||||
|
||||
/-- #### Phoenix
|
||||
/-- ### Phoenix
|
||||
|
||||
`Φxyzw = x(yw)(zw)`
|
||||
-/
|
||||
def Φ (x : β → γ → δ) (y : α → β) (z : α → γ) (w : α) := x (y w) (z w)
|
||||
|
||||
/-- #### Psi Bird
|
||||
/-- ### Psi Bird
|
||||
|
||||
`Ψxyzw = x(yz)(yw)`
|
||||
-/
|
||||
def Ψ (x : α → α → γ) (y : β → α) (z : β) (w : β) := x (y z) (y w)
|
||||
|
||||
/-- #### Quacky Bird
|
||||
/-- ### Quacky Bird
|
||||
|
||||
`Q₄xyz = z(yx)`
|
||||
-/
|
||||
def Q₄ (x : α) (y : α → β) (z : β → γ) := z (y x)
|
||||
|
||||
/-- #### Queer Bird
|
||||
/-- ### Queer Bird
|
||||
|
||||
`Qxyz = y(xz)`
|
||||
-/
|
||||
def Q (x : α → β) (y : β → γ) (z : α) := y (x z)
|
||||
|
||||
/-- #### Quirky Bird
|
||||
/-- ### Quirky Bird
|
||||
|
||||
`Q₃xyz = z(xy)`
|
||||
-/
|
||||
def Q₃ (x : α → β) (y : α) (z : β → γ) := z (x y)
|
||||
|
||||
/-- #### Quixotic Bird
|
||||
/-- ### Quixotic Bird
|
||||
|
||||
`Q₁xyz = x(zy)`
|
||||
-/
|
||||
def Q₁ (x : α → γ) (y : β) (z : β → α) := x (z y)
|
||||
|
||||
/-- #### Quizzical Bird
|
||||
/-- ### Quizzical Bird
|
||||
|
||||
`Q₂xyz = y(zx)`
|
||||
-/
|
||||
def Q₂ (x : α) (y : β → γ) (z : α → β) := y (z x)
|
||||
|
||||
/-- #### Robin Once Removed
|
||||
/-- ### Robin Once Removed
|
||||
|
||||
`R*xyzw = xzwy`
|
||||
-/
|
||||
|
@ -194,36 +194,36 @@ def R_star (x : α → β → γ → δ) (y : γ) (z : α) (w : β) := x z w y
|
|||
|
||||
notation "R*" => R_star
|
||||
|
||||
/-- #### Robin
|
||||
/-- ### Robin
|
||||
|
||||
`Rxyz = yzx`
|
||||
-/
|
||||
def R (x : α) (y : β → α → γ) (z : β) := y z x
|
||||
|
||||
/-- #### Sage Bird
|
||||
/-- ### Sage Bird
|
||||
|
||||
`Θx = x(Θx)`
|
||||
-/
|
||||
partial def Θ [Inhabited α] (x : α → α) := x (Θ x)
|
||||
|
||||
/-- #### Starling
|
||||
/-- ### Starling
|
||||
|
||||
`Sxyz = xz(yz)`
|
||||
-/
|
||||
def S (x : α → β → γ) (y : α → β) (z : α) := x z (y z)
|
||||
|
||||
/-- #### Thrush
|
||||
/-- ### Thrush
|
||||
|
||||
`Txy = yx`
|
||||
-/
|
||||
def T (x : α) (y : α → β) := y x
|
||||
|
||||
/-! #### Turing Bird
|
||||
/-! ### Turing Bird
|
||||
|
||||
`Uxy = y(xxy)`
|
||||
-/
|
||||
|
||||
/-- #### Vireo Once Removed
|
||||
/-- ### Vireo Once Removed
|
||||
|
||||
`V*xyzw = xwyz`
|
||||
-/
|
||||
|
@ -231,13 +231,13 @@ def V_star (x : α → β → γ → δ) (y : β) (z : γ) (w : α) := x w y z
|
|||
|
||||
notation "V*" => V_star
|
||||
|
||||
/-- #### Vireo
|
||||
/-- ### Vireo
|
||||
|
||||
`Vxyz = zxy`
|
||||
-/
|
||||
def V (x : α) (y : β) (z : α → β → γ) := z x y
|
||||
|
||||
/-- #### Warbler
|
||||
/-- ### Warbler
|
||||
|
||||
`Wxy = xyy`
|
||||
-/
|
||||
|
|
|
@ -162,7 +162,7 @@ theorem length_zipWith_self_tail_eq_length_sub_one
|
|||
rw [length_zipWith]
|
||||
simp only [length_cons, ge_iff_le, min_eq_right_iff]
|
||||
show length as ≤ length as + 1
|
||||
simp only [le_add_iff_nonneg_right]
|
||||
simp only [le_add_iff_nonneg_right, zero_le]
|
||||
|
||||
/--
|
||||
The output `List` of a `zipWith` is nonempty **iff** both of its inputs are
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import Mathlib.Logic.Basic
|
||||
import Mathlib.Data.Set.Basic
|
||||
import Mathlib.Tactic.Tauto
|
||||
|
||||
/-! # Common.Logic.Basic
|
||||
|
|
|
@ -9,23 +9,4 @@ If `n < m⁺`, then `n < m` or `n = m`.
|
|||
theorem lt_or_eq_of_lt {n m : Nat} (h : n < m.succ) : n < m ∨ n = m :=
|
||||
lt_or_eq_of_le (lt_succ.mp h)
|
||||
|
||||
/--
|
||||
The following cancellation law holds for `m`, `n`, and `p` in `ω`:
|
||||
```
|
||||
m ⬝ p = n ⬝ p ∧ p ≠ 0 → m = n
|
||||
```
|
||||
-/
|
||||
theorem mul_right_cancel (m n p : ℕ) (hp : 0 < p) : m * p = n * p → m = n := by
|
||||
intro hmn
|
||||
match @trichotomous ℕ LT.lt _ m n with
|
||||
| Or.inl h =>
|
||||
have : m * p < n * p := Nat.mul_lt_mul_of_pos_right h hp
|
||||
rw [hmn] at this
|
||||
simp at this
|
||||
| Or.inr (Or.inl h) => exact h
|
||||
| Or.inr (Or.inr h) =>
|
||||
have : n * p < m * p := Nat.mul_lt_mul_of_pos_right h hp
|
||||
rw [hmn] at this
|
||||
simp at this
|
||||
|
||||
end Nat
|
|
@ -1,6 +1,5 @@
|
|||
import Mathlib.Algebra.BigOperators.Basic
|
||||
import Mathlib.Data.Real.Basic
|
||||
import Mathlib.Data.Finset.Basic
|
||||
import Mathlib.Data.Real.Archimedean
|
||||
|
||||
/-! # Common.Real.Floor
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import Common.Set.Basic
|
||||
import Common.Set.Equinumerous
|
||||
import Common.Set.Function
|
||||
import Common.Set.Intervals
|
||||
import Common.Set.Peano
|
|
@ -193,6 +193,62 @@ theorem diff_ssubset_nonempty {A B : Set α} (h : A ⊂ B)
|
|||
· intro hx
|
||||
exact ⟨x, hx⟩
|
||||
|
||||
/--
|
||||
If an element `x` is not a member of a set `A`, then `A - {x} = A`.
|
||||
-/
|
||||
theorem not_mem_diff_eq_self {A : Set α} (h : a ∉ A)
|
||||
: A \ {a} = A := by
|
||||
ext x
|
||||
apply Iff.intro
|
||||
· exact And.left
|
||||
· intro hx
|
||||
refine ⟨hx, ?_⟩
|
||||
simp only [mem_singleton_iff]
|
||||
by_contra nx
|
||||
rw [nx] at hx
|
||||
exact absurd hx h
|
||||
|
||||
/--
|
||||
Given two sets `A` and `B`, `(A - {a}) - (B - {b}) = (A - B) - {a}`.
|
||||
-/
|
||||
theorem diff_mem_diff_mem_eq_diff_diff_mem {A B : Set α} {a : α}
|
||||
: (A \ {a}) \ (B \ {a}) = (A \ B) \ {a} := by
|
||||
calc (A \ {a}) \ (B \ {a})
|
||||
_ = { x | x ∈ A \ {a} ∧ x ∉ B \ {a} } := rfl
|
||||
_ = { x | x ∈ A \ {a} ∧ ¬(x ∈ B \ {a}) } := rfl
|
||||
_ = { x | (x ∈ A ∧ x ≠ a) ∧ ¬(x ∈ B ∧ x ≠ a) } := rfl
|
||||
_ = { x | (x ∈ A ∧ x ≠ a) ∧ (x ∉ B ∨ x = a) } := by
|
||||
ext x
|
||||
rw [mem_setOf_eq, not_and_de_morgan]
|
||||
simp
|
||||
_ = { x | (x ∈ A ∧ x ≠ a ∧ x ∉ B) ∨ (x ∈ A ∧ x ≠ a ∧ x = a) } := by
|
||||
ext x
|
||||
simp only [mem_setOf_eq]
|
||||
rw [and_or_left, and_assoc, and_assoc]
|
||||
_ = { x | x ∈ A ∧ x ≠ a ∧ x ∉ B } := by simp
|
||||
_ = { x | x ∈ A ∧ x ∉ B ∧ x ≠ a } := by
|
||||
ext x
|
||||
simp only [ne_eq, sep_and, mem_inter_iff, mem_setOf_eq]
|
||||
apply Iff.intro <;>
|
||||
· intro ⟨⟨_, hx₂⟩, hx₃, hx₄⟩
|
||||
exact ⟨⟨hx₃, hx₄⟩, ⟨hx₃, hx₂⟩⟩
|
||||
_ = { x | x ∈ A ∧ x ∉ B ∧ x ∉ ({a} : Set α) } := rfl
|
||||
_ = { x | x ∈ A \ B ∧ x ∉ ({a} : Set α) } := by
|
||||
ext x
|
||||
simp only [
|
||||
mem_singleton_iff,
|
||||
sep_and,
|
||||
mem_inter_iff,
|
||||
mem_setOf_eq,
|
||||
mem_diff,
|
||||
and_congr_right_iff,
|
||||
and_iff_right_iff_imp,
|
||||
and_imp
|
||||
]
|
||||
intro hx _ _
|
||||
exact hx
|
||||
_ = (A \ B) \ {a} := rfl
|
||||
|
||||
/--
|
||||
For any set `A`, the difference between the sample space and `A` is the
|
||||
complement of `A`.
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import Common.Nat.Basic
|
||||
import Common.Set.Basic
|
||||
import Mathlib.Data.Finset.Card
|
||||
import Mathlib.Data.Set.Finite
|
||||
|
||||
|
@ -8,6 +10,8 @@ Additional theorems around finite sets.
|
|||
|
||||
namespace Set
|
||||
|
||||
/-! ## Definitions -/
|
||||
|
||||
/--
|
||||
A set `A` is equinumerous to a set `B` (written `A ≈ B`) if and only if there is
|
||||
a one-to-one function from `A` onto `B`.
|
||||
|
@ -19,6 +23,26 @@ infix:50 " ≈ " => Equinumerous
|
|||
theorem equinumerous_def (A : Set α) (B : Set β)
|
||||
: A ≈ B ↔ ∃ F, Set.BijOn F A B := Iff.rfl
|
||||
|
||||
/--
|
||||
A set `A` is not equinumerous to a set `B` (written `A ≉ B`) if and only if
|
||||
there is no one-to-one function from `A` onto `B`.
|
||||
-/
|
||||
def NotEquinumerous (A : Set α) (B : Set β) : Prop := ¬ Equinumerous A B
|
||||
|
||||
infix:50 " ≉ " => NotEquinumerous
|
||||
|
||||
@[simp]
|
||||
theorem not_equinumerous_def : A ≉ B ↔ ∀ F, ¬ Set.BijOn F A B := by
|
||||
apply Iff.intro
|
||||
· intro h
|
||||
unfold NotEquinumerous Equinumerous at h
|
||||
simp only [not_exists] at h
|
||||
exact h
|
||||
· intro h
|
||||
unfold NotEquinumerous Equinumerous
|
||||
simp only [not_exists]
|
||||
exact h
|
||||
|
||||
/--
|
||||
For any set `A`, `A ≈ A`.
|
||||
-/
|
||||
|
@ -57,30 +81,131 @@ theorem eq_imp_equinumerous {A B : Set α} (h : A = B)
|
|||
conv at this => right; rw [h]
|
||||
exact this
|
||||
|
||||
/-! ## Finite Sets -/
|
||||
|
||||
/--
|
||||
A set is finite if and only if it is equinumerous to a natural number.
|
||||
-/
|
||||
axiom finite_iff_equinumerous_nat {α : Type _} {S : Set α}
|
||||
: Set.Finite S ↔ ∃ n : ℕ, S ≈ Set.Iio n
|
||||
|
||||
/-! ## Emptyset -/
|
||||
|
||||
/--
|
||||
A set `A` is not equinumerous to a set `B` (written `A ≉ B`) if and only if
|
||||
there is no one-to-one function from `A` onto `B`.
|
||||
Any set equinumerous to the emptyset is the emptyset.
|
||||
-/
|
||||
def NotEquinumerous (A : Set α) (B : Set β) : Prop := ¬ Equinumerous A B
|
||||
|
||||
infix:50 " ≉ " => NotEquinumerous
|
||||
|
||||
@[simp]
|
||||
theorem not_equinumerous_def : A ≉ B ↔ ∀ F, ¬ Set.BijOn F A B := by
|
||||
theorem equinumerous_zero_iff_emptyset {S : Set α}
|
||||
: S ≈ Set.Iio 0 ↔ S = ∅ := by
|
||||
apply Iff.intro
|
||||
· intro ⟨f, hf⟩
|
||||
by_contra nh
|
||||
rw [← Ne.def, ← Set.nonempty_iff_ne_empty] at nh
|
||||
have ⟨x, hx⟩ := nh
|
||||
have := hf.left hx
|
||||
simp at this
|
||||
· intro h
|
||||
unfold NotEquinumerous Equinumerous at h
|
||||
simp only [not_exists] at h
|
||||
rw [h]
|
||||
refine ⟨fun _ => ⊥, ?_, ?_, ?_⟩
|
||||
· intro _ hx
|
||||
simp at hx
|
||||
· intro _ hx
|
||||
simp at hx
|
||||
· unfold SurjOn
|
||||
simp only [bot_eq_zero', image_empty]
|
||||
show ∀ x, x ∈ Set.Iio 0 → x ∈ ∅
|
||||
intro _ hx
|
||||
simp at hx
|
||||
|
||||
/--
|
||||
Empty sets are always equinumerous, regardless of their underlying type.
|
||||
-/
|
||||
theorem equinumerous_emptyset_emptyset [Bot β]
|
||||
: (∅ : Set α) ≈ (∅ : Set β) := by
|
||||
refine ⟨fun _ => ⊥, ?_, ?_, ?_⟩
|
||||
· intro _ hx
|
||||
simp at hx
|
||||
· intro _ hx
|
||||
simp at hx
|
||||
· unfold SurjOn
|
||||
simp
|
||||
|
||||
/--
|
||||
For all natural numbers `m, n`, `m⁺ - n⁺ ≈ m - n`.
|
||||
-/
|
||||
theorem succ_diff_succ_equinumerous_diff {m n : ℕ}
|
||||
: Set.Iio m.succ \ Set.Iio n.succ ≈ Set.Iio m \ Set.Iio n := by
|
||||
refine Set.equinumerous_symm ⟨fun x => x + 1, ?_, ?_, ?_⟩
|
||||
· intro x ⟨hx₁, hx₂⟩
|
||||
simp at hx₁ hx₂ ⊢
|
||||
exact ⟨Nat.le_add_of_sub_le hx₂, Nat.add_lt_of_lt_sub hx₁⟩
|
||||
· intro _ _ _ _ h
|
||||
simp only [add_left_inj] at h
|
||||
exact h
|
||||
· intro h
|
||||
unfold NotEquinumerous Equinumerous
|
||||
simp only [not_exists]
|
||||
· unfold Set.SurjOn Set.image
|
||||
rw [Set.subset_def]
|
||||
intro x ⟨hx₁, hx₂⟩
|
||||
simp only [
|
||||
Set.Iio_diff_Iio,
|
||||
gt_iff_lt,
|
||||
not_lt,
|
||||
ge_iff_le,
|
||||
Set.mem_setOf_eq,
|
||||
Set.mem_Iio
|
||||
] at hx₁ hx₂ ⊢
|
||||
have ⟨p, hp⟩ : ∃ p : ℕ, x = p.succ := by
|
||||
refine Nat.exists_eq_succ_of_ne_zero ?_
|
||||
have := calc 0
|
||||
_ < n.succ := by simp
|
||||
_ ≤ x := hx₂
|
||||
exact Nat.pos_iff_ne_zero.mp this
|
||||
refine ⟨p, ⟨?_, ?_⟩, hp.symm⟩
|
||||
· rw [hp] at hx₂
|
||||
exact Nat.lt_succ.mp hx₂
|
||||
· rw [hp] at hx₁
|
||||
exact Nat.succ_lt_succ_iff.mp hx₁
|
||||
|
||||
/--
|
||||
For all natural numbers `m, n`, `m - n ∪ {m} ≈ m - n`.
|
||||
-/
|
||||
theorem diff_union_equinumerous_succ_diff {m n : ℕ} (hn: n ≤ m)
|
||||
: Set.Iio m \ Set.Iio n ∪ {m} ≈ Set.Iio (Nat.succ m) \ Set.Iio n := by
|
||||
refine ⟨fun x => x, ?_, ?_, ?_⟩
|
||||
· intro x hx
|
||||
simp at hx ⊢
|
||||
apply Or.elim hx
|
||||
· intro hx₁
|
||||
rw [hx₁]
|
||||
exact ⟨hn, by simp⟩
|
||||
· intro ⟨hx₁, hx₂⟩
|
||||
exact ⟨hx₁, calc x
|
||||
_ < m := hx₂
|
||||
_ < m + 1 := by simp⟩
|
||||
· intro _ _ _ _ h
|
||||
exact h
|
||||
· unfold Set.SurjOn Set.image
|
||||
rw [Set.subset_def]
|
||||
simp only [
|
||||
Set.Iio_diff_Iio,
|
||||
gt_iff_lt,
|
||||
not_lt,
|
||||
ge_iff_le,
|
||||
Set.mem_Ico,
|
||||
Set.union_singleton,
|
||||
lt_self_iff_false,
|
||||
and_false,
|
||||
Set.mem_insert_iff,
|
||||
exists_eq_right,
|
||||
Set.mem_setOf_eq,
|
||||
and_imp
|
||||
]
|
||||
intro x hn hm
|
||||
apply Or.elim (Nat.lt_or_eq_of_lt hm)
|
||||
· intro hx
|
||||
right
|
||||
exact ⟨hn, hx⟩
|
||||
· intro hx
|
||||
left
|
||||
exact hx
|
||||
|
||||
end Set
|
|
@ -0,0 +1,235 @@
|
|||
import Mathlib.Data.Set.Function
|
||||
|
||||
/-! # Common.Set.Function
|
||||
|
||||
Additional theorems around functions defined on sets.
|
||||
-/
|
||||
|
||||
namespace Set.Function
|
||||
|
||||
/--
|
||||
Produce a new function that swaps the outputs of the two specified inputs.
|
||||
-/
|
||||
def swap [DecidableEq α] (f : α → β) (x₁ x₂ : α) (x : α) : β :=
|
||||
if x = x₁ then f x₂ else
|
||||
if x = x₂ then f x₁ else
|
||||
f x
|
||||
|
||||
/--
|
||||
Swapping the same input yields the original function.
|
||||
-/
|
||||
@[simp]
|
||||
theorem swap_eq_eq_self [DecidableEq α] {f : α → β} {x : α}
|
||||
: swap f x x = f := by
|
||||
refine funext ?_
|
||||
intro y
|
||||
unfold swap
|
||||
by_cases hy : y = x
|
||||
· rw [if_pos hy, hy]
|
||||
· rw [if_neg hy, if_neg hy]
|
||||
|
||||
/--
|
||||
Swapping a function twice yields the original function.
|
||||
-/
|
||||
@[simp]
|
||||
theorem swap_swap_eq_self [DecidableEq α] {f : α → β} {x₁ x₂ : α}
|
||||
: swap (swap f x₁ x₂) x₁ x₂ = f := by
|
||||
refine funext ?_
|
||||
intro y
|
||||
by_cases hc₁ : x₂ = x₁
|
||||
· rw [hc₁]
|
||||
simp
|
||||
rw [← Ne.def] at hc₁
|
||||
unfold swap
|
||||
by_cases hc₂ : y = x₁ <;>
|
||||
by_cases hc₃ : y = x₂
|
||||
· rw [if_pos hc₂, if_neg hc₁, if_pos rfl, ← hc₂]
|
||||
· rw [if_pos hc₂, if_neg hc₁, if_pos rfl, ← hc₂]
|
||||
· rw [if_neg hc₂, if_pos hc₃, if_pos rfl, ← hc₃]
|
||||
· rw [if_neg hc₂, if_neg hc₃, if_neg hc₂, if_neg hc₃]
|
||||
|
||||
/--
|
||||
If `f : A → B`, then a swapped variant of `f` also maps `A` into `B`.
|
||||
-/
|
||||
theorem swap_MapsTo_self [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : MapsTo f A B)
|
||||
: MapsTo (swap f a₁ a₂) A B := by
|
||||
intro x hx
|
||||
unfold swap
|
||||
by_cases hc₁ : x = a₁ <;> by_cases hc₂ : x = a₂
|
||||
· rw [if_pos hc₁]
|
||||
exact hf ha₂
|
||||
· rw [if_pos hc₁]
|
||||
exact hf ha₂
|
||||
· rw [if_neg hc₁, if_pos hc₂]
|
||||
exact hf ha₁
|
||||
· rw [if_neg hc₁, if_neg hc₂]
|
||||
exact hf hx
|
||||
|
||||
/--
|
||||
The converse of `swap_MapsTo_self`.
|
||||
-/
|
||||
theorem self_MapsTo_swap [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : MapsTo (swap f a₁ a₂) A B)
|
||||
: MapsTo f A B := by
|
||||
rw [← @swap_swap_eq_self _ _ _ f a₁ a₂]
|
||||
exact swap_MapsTo_self ha₁ ha₂ hf
|
||||
|
||||
/--
|
||||
If `f : A → B`, then `f` maps `A` into `B` **iff** a swap of `f` maps `A` into
|
||||
`B`.
|
||||
-/
|
||||
theorem self_iff_swap_MapsTo [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A)
|
||||
: MapsTo (swap f a₁ a₂) A B ↔ MapsTo f A B :=
|
||||
⟨self_MapsTo_swap ha₁ ha₂, swap_MapsTo_self ha₁ ha₂⟩
|
||||
|
||||
/--
|
||||
If `f : A → B` is one-to-one, then a swapped variant of `f` is also one-to-one.
|
||||
-/
|
||||
theorem swap_InjOn_self [DecidableEq α]
|
||||
{A : Set α} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : InjOn f A)
|
||||
: InjOn (swap f a₁ a₂) A := by
|
||||
intro x₁ hx₁ x₂ hx₂ h
|
||||
unfold swap at h
|
||||
by_cases hc₁ : x₁ = a₁ <;>
|
||||
by_cases hc₂ : x₁ = a₂ <;>
|
||||
by_cases hc₃ : x₂ = a₁ <;>
|
||||
by_cases hc₄ : x₂ = a₂
|
||||
· rw [hc₁, hc₃]
|
||||
· rw [hc₁, hc₃]
|
||||
· rw [hc₂, hc₄]
|
||||
· rw [if_pos hc₁, if_neg hc₃, if_neg hc₄, ← hc₂] at h
|
||||
exact hf hx₁ hx₂ h
|
||||
· rw [hc₁, hc₃]
|
||||
· rw [hc₁, hc₃]
|
||||
· rw [if_pos hc₁, if_neg hc₃, if_pos hc₄, ← hc₁, ← hc₄] at h
|
||||
exact hf hx₁ hx₂ h.symm
|
||||
· rw [if_pos hc₁, if_neg hc₃, if_neg hc₄] at h
|
||||
exact absurd (hf hx₂ ha₂ h.symm) hc₄
|
||||
· rw [hc₂, hc₄]
|
||||
· rw [if_neg hc₁, if_pos hc₂, if_pos hc₃, ← hc₂, ← hc₃] at h
|
||||
exact hf hx₁ hx₂ h.symm
|
||||
· rw [hc₂, hc₄]
|
||||
· rw [if_neg hc₁, if_pos hc₂, if_neg hc₃, if_neg hc₄] at h
|
||||
exact absurd (hf hx₂ ha₁ h.symm) hc₃
|
||||
· rw [if_neg hc₁, if_neg hc₂, if_pos hc₃, ← hc₄] at h
|
||||
exact hf hx₁ hx₂ h
|
||||
· rw [if_neg hc₁, if_neg hc₂, if_pos hc₃] at h
|
||||
exact absurd (hf hx₁ ha₂ h) hc₂
|
||||
· rw [if_neg hc₁, if_neg hc₂, if_neg hc₃, if_pos hc₄] at h
|
||||
exact absurd (hf hx₁ ha₁ h) hc₁
|
||||
· rw [if_neg hc₁, if_neg hc₂, if_neg hc₃, if_neg hc₄] at h
|
||||
exact hf hx₁ hx₂ h
|
||||
|
||||
/--
|
||||
The converse of `swap_InjOn_self`.
|
||||
-/
|
||||
theorem self_InjOn_swap [DecidableEq α]
|
||||
{A : Set α} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : InjOn (swap f a₁ a₂) A)
|
||||
: InjOn f A := by
|
||||
rw [← @swap_swap_eq_self _ _ _ f a₁ a₂]
|
||||
exact swap_InjOn_self ha₁ ha₂ hf
|
||||
|
||||
/--
|
||||
If `f : A → B`, then `f` is one-to-one **iff** a swap of `f` is one-to-one.
|
||||
-/
|
||||
theorem self_iff_swap_InjOn [DecidableEq α]
|
||||
{A : Set α} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A)
|
||||
: InjOn (swap f a₁ a₂) A ↔ InjOn f A :=
|
||||
⟨self_InjOn_swap ha₁ ha₂, swap_InjOn_self ha₁ ha₂⟩
|
||||
|
||||
/--
|
||||
If `f : A → B` is onto, then a swapped variant of `f` is also onto.
|
||||
-/
|
||||
theorem swap_SurjOn_self [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : SurjOn f A B)
|
||||
: SurjOn (swap f a₁ a₂) A B := by
|
||||
show ∀ x, x ∈ B → ∃ a ∈ A, swap f a₁ a₂ a = x
|
||||
intro x hx
|
||||
have ⟨a, ha⟩ := hf hx
|
||||
by_cases hc₁ : a = a₁
|
||||
· refine ⟨a₂, ha₂, ?_⟩
|
||||
unfold swap
|
||||
by_cases hc₂ : a₁ = a₂
|
||||
· rw [if_pos hc₂.symm, ← hc₂, ← hc₁]
|
||||
exact ha.right
|
||||
· rw [← Ne.def] at hc₂
|
||||
rw [if_neg hc₂.symm, if_pos rfl, ← hc₁]
|
||||
exact ha.right
|
||||
· by_cases hc₂ : a = a₂
|
||||
· unfold swap
|
||||
refine ⟨a₁, ha₁, ?_⟩
|
||||
rw [if_pos rfl, ← hc₂]
|
||||
exact ha.right
|
||||
· refine ⟨a, ha.left, ?_⟩
|
||||
unfold swap
|
||||
rw [if_neg hc₁, if_neg hc₂]
|
||||
exact ha.right
|
||||
|
||||
/--
|
||||
The converse of `swap_SurjOn_self`.
|
||||
-/
|
||||
theorem self_SurjOn_swap [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : SurjOn (swap f a₁ a₂) A B)
|
||||
: SurjOn f A B := by
|
||||
rw [← @swap_swap_eq_self _ _ _ f a₁ a₂]
|
||||
exact swap_SurjOn_self ha₁ ha₂ hf
|
||||
|
||||
/--
|
||||
If `f : A → B`, then `f` is onto **iff** a swap of `f` is onto.
|
||||
-/
|
||||
theorem self_iff_swap_SurjOn [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A)
|
||||
: SurjOn (swap f a₁ a₂) A B ↔ SurjOn f A B :=
|
||||
⟨self_SurjOn_swap ha₁ ha₂, swap_SurjOn_self ha₁ ha₂⟩
|
||||
|
||||
/--
|
||||
If `f : A → B` is a one-to-one correspondence, then a swapped variant of `f` is
|
||||
also a one-to-one correspondence.
|
||||
-/
|
||||
theorem swap_BijOn_self [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : BijOn f A B)
|
||||
: BijOn (swap f a₁ a₂) A B := by
|
||||
have ⟨hf₁, hf₂, hf₃⟩ := hf
|
||||
exact ⟨
|
||||
swap_MapsTo_self ha₁ ha₂ hf₁,
|
||||
swap_InjOn_self ha₁ ha₂ hf₂,
|
||||
swap_SurjOn_self ha₁ ha₂ hf₃
|
||||
⟩
|
||||
|
||||
/--
|
||||
The converse of `swap_BijOn_self`.
|
||||
-/
|
||||
theorem self_BijOn_swap [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A) (hf : BijOn (swap f a₁ a₂) A B)
|
||||
: BijOn f A B := by
|
||||
have ⟨hf₁, hf₂, hf₃⟩ := hf
|
||||
exact ⟨
|
||||
self_MapsTo_swap ha₁ ha₂ hf₁,
|
||||
self_InjOn_swap ha₁ ha₂ hf₂,
|
||||
self_SurjOn_swap ha₁ ha₂ hf₃
|
||||
⟩
|
||||
|
||||
/--
|
||||
If `f : A → B`, `f` is a one-to-one correspondence **iff** a swap of `f` is a
|
||||
one-to-one correspondence.
|
||||
-/
|
||||
theorem self_iff_swap_BijOn [DecidableEq α]
|
||||
{A : Set α} {B : Set β} {f : α → β}
|
||||
(ha₁ : a₁ ∈ A) (ha₂ : a₂ ∈ A)
|
||||
: BijOn (swap f a₁ a₂) A B ↔ BijOn f A B :=
|
||||
⟨self_BijOn_swap ha₁ ha₂, swap_BijOn_self ha₁ ha₂⟩
|
||||
|
||||
end Set.Function
|
|
@ -1,4 +1,5 @@
|
|||
import Common.Logic.Basic
|
||||
import Mathlib.Data.Set.Function
|
||||
import Mathlib.Data.Set.Intervals.Basic
|
||||
|
||||
namespace Set
|
||||
|
@ -8,6 +9,9 @@ namespace Set
|
|||
Additional theorems around intervals.
|
||||
-/
|
||||
|
||||
/--
|
||||
If `m < n` then `{0, …, m - 1} ⊂ {0, …, n - 1}`.
|
||||
-/
|
||||
theorem Iio_nat_lt_ssubset {m n : ℕ} (h : m < n)
|
||||
: Iio m ⊂ Iio n := by
|
||||
rw [ssubset_def]
|
||||
|
@ -22,4 +26,22 @@ theorem Iio_nat_lt_ssubset {m n : ℕ} (h : m < n)
|
|||
simp only [not_forall, not_lt, exists_prop]
|
||||
exact ⟨m, h, by simp⟩
|
||||
|
||||
/--
|
||||
It is never the case that the emptyset is surjective
|
||||
-/
|
||||
theorem SurjOn_emptyset_Iio_iff_eq_zero {n : ℕ} {f : α → ℕ}
|
||||
: SurjOn f ∅ (Set.Iio n) ↔ n = 0 := by
|
||||
apply Iff.intro
|
||||
· intro h
|
||||
unfold SurjOn at h
|
||||
rw [subset_def] at h
|
||||
simp only [mem_Iio, image_empty, mem_empty_iff_false] at h
|
||||
by_contra nh
|
||||
exact h 0 (Nat.pos_of_ne_zero nh)
|
||||
· intro hn
|
||||
unfold SurjOn
|
||||
rw [hn, subset_def]
|
||||
intro x hx
|
||||
exact absurd hx (Nat.not_lt_zero x)
|
||||
|
||||
end Set
|
|
@ -1,9 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.Process
|
||||
import DocGen4.Load
|
||||
import DocGen4.Output
|
||||
import DocGen4.LeanInk
|
|
@ -1,7 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.LeanInk.Process
|
||||
import DocGen4.LeanInk.Output
|
|
@ -1,215 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving, Xubai Wang
|
||||
-/
|
||||
import DocGen4.Output.Base
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
import Lean.Data.Json
|
||||
import LeanInk.Annotation.Alectryon
|
||||
|
||||
namespace LeanInk.Annotation.Alectryon
|
||||
|
||||
open DocGen4 Output
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
structure AlectryonContext where
|
||||
counter : Nat
|
||||
|
||||
abbrev AlectryonT := StateT AlectryonContext
|
||||
abbrev AlectryonM := AlectryonT HtmlM
|
||||
|
||||
def getNextButtonLabel : AlectryonM String := do
|
||||
let val ← get
|
||||
let newCounter := val.counter + 1
|
||||
set { val with counter := newCounter }
|
||||
return s!"plain-lean4-lean-chk{val.counter}"
|
||||
|
||||
def TypeInfo.toHtml (tyi : TypeInfo) : AlectryonM Html := do
|
||||
pure
|
||||
<div class="alectryon-type-info-wrapper">
|
||||
<small class="alectryon-type-info">
|
||||
<div class="alectryon-goals">
|
||||
<blockquote class="alectryon-goal">
|
||||
<div class="goal-hyps">
|
||||
<span class="hyp-type">
|
||||
<var>{tyi.name}</var>
|
||||
<b>: </b>
|
||||
<span>[← DocGen4.Output.infoFormatToHtml tyi.type.fst]</span>
|
||||
</span>
|
||||
</div>
|
||||
</blockquote>
|
||||
</div>
|
||||
</small>
|
||||
</div>
|
||||
|
||||
def Token.processSemantic (t : Token) : Html :=
|
||||
match t.semanticType with
|
||||
| some "Name.Attribute" => <span class="na">{t.raw}</span>
|
||||
| some "Name.Variable" => <span class="nv">{t.raw}</span>
|
||||
| some "Keyword" => <span class="k">{t.raw}</span>
|
||||
| _ => Html.text t.raw
|
||||
|
||||
def Token.toHtml (t : Token) : AlectryonM Html := do
|
||||
-- Right now t.link is always none from LeanInk, ignore it
|
||||
-- TODO: render docstring
|
||||
let mut parts := #[]
|
||||
if let some tyi := t.typeinfo then
|
||||
parts := parts.push <| ← tyi.toHtml
|
||||
|
||||
parts := parts.push t.processSemantic
|
||||
|
||||
pure
|
||||
-- TODO: Show rest of token
|
||||
<span class="alectryon-token">
|
||||
[parts]
|
||||
</span>
|
||||
|
||||
def Contents.toHtml : Contents → AlectryonM Html
|
||||
| .string value =>
|
||||
pure
|
||||
<span class="alectryon-wsp">
|
||||
{value}
|
||||
</span>
|
||||
| .experimentalTokens values => do
|
||||
let values ← values.mapM Token.toHtml
|
||||
pure
|
||||
<span class="alectryon-wsp">
|
||||
[values]
|
||||
</span>
|
||||
|
||||
def Hypothesis.toHtml (h : Hypothesis) : AlectryonM Html := do
|
||||
let mut hypParts := #[<var>[h.names.intersperse ", " |>.map Html.text |>.toArray]</var>]
|
||||
if h.body.snd != "" then
|
||||
hypParts := hypParts.push
|
||||
<span class="hyp-body">
|
||||
<b>:= </b>
|
||||
<span>[← infoFormatToHtml h.body.fst]</span>
|
||||
</span>
|
||||
hypParts := hypParts.push
|
||||
<span class="hyp-type">
|
||||
<b>: </b>
|
||||
<span >[← infoFormatToHtml h.type.fst]</span>
|
||||
</span>
|
||||
|
||||
pure
|
||||
<span>
|
||||
[hypParts]
|
||||
</span>
|
||||
|
||||
def Goal.toHtml (g : Goal) : AlectryonM Html := do
|
||||
let mut hypotheses := #[]
|
||||
for hyp in g.hypotheses do
|
||||
let rendered ← hyp.toHtml
|
||||
hypotheses := hypotheses.push rendered
|
||||
hypotheses := hypotheses.push <br/>
|
||||
let conclusionHtml ←
|
||||
match g.conclusion with
|
||||
| .typed info _ => infoFormatToHtml info
|
||||
| .untyped str => pure #[Html.text str]
|
||||
|
||||
pure
|
||||
<blockquote class="alectryon-goal">
|
||||
<div class="goal-hyps">
|
||||
[hypotheses]
|
||||
</div>
|
||||
<span class="goal-separator">
|
||||
<hr><span class="goal-name">{g.name}</span></hr>
|
||||
</span>
|
||||
<div class="goal-conclusion">
|
||||
[conclusionHtml]
|
||||
</div>
|
||||
</blockquote>
|
||||
|
||||
def Message.toHtml (m : Message) : AlectryonM Html := do
|
||||
pure
|
||||
<blockquote class="alectryon-message">
|
||||
-- TODO: This might have to be done in a fancier way
|
||||
{m.contents}
|
||||
</blockquote>
|
||||
|
||||
def Sentence.toHtml (s : Sentence) : AlectryonM Html := do
|
||||
let messages :=
|
||||
if s.messages.size > 0 then
|
||||
#[
|
||||
<div class="alectryon-messages">
|
||||
[← s.messages.mapM Message.toHtml]
|
||||
</div>
|
||||
]
|
||||
else
|
||||
#[]
|
||||
|
||||
let goals :=
|
||||
if s.goals.size > 0 then
|
||||
-- TODO: Alectryon has a "alectryon-extra-goals" here, implement it
|
||||
#[
|
||||
<div class="alectryon-goals">
|
||||
[← s.goals.mapM Goal.toHtml]
|
||||
</div>
|
||||
]
|
||||
else
|
||||
#[]
|
||||
|
||||
let buttonLabel ← getNextButtonLabel
|
||||
|
||||
pure
|
||||
<span class="alectryon-sentence">
|
||||
<input class="alectryon-toggle" id={buttonLabel} style="display: none" type="checkbox"/>
|
||||
<label class="alectryon-input" for={buttonLabel}>
|
||||
{← s.contents.toHtml}
|
||||
</label>
|
||||
<small class="alectryon-output">
|
||||
[messages]
|
||||
[goals]
|
||||
</small>
|
||||
</span>
|
||||
|
||||
def Text.toHtml (t : Text) : AlectryonM Html := t.contents.toHtml
|
||||
|
||||
def Fragment.toHtml : Fragment → AlectryonM Html
|
||||
| .text value => value.toHtml
|
||||
| .sentence value => value.toHtml
|
||||
|
||||
def baseHtml (content : Array Html) : AlectryonM Html := do
|
||||
let banner :=
|
||||
<div «class»="alectryon-banner">
|
||||
Built with <a href="https://github.com/leanprover/doc-gen4">doc-gen4</a>, running Lean4.
|
||||
Bubbles (<span class="alectryon-bubble"></span>) indicate interactive fragments: hover for details, tap to reveal contents.
|
||||
Use <kbd>Ctrl+↑</kbd> <kbd>Ctrl+↓</kbd> to navigate, <kbd>Ctrl+🖱️</kbd> to focus.
|
||||
On Mac, use <kbd>Cmd</kbd> instead of <kbd>Ctrl</kbd>.
|
||||
</div>
|
||||
|
||||
pure
|
||||
<html lang="en" class="alectryon-standalone">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
||||
|
||||
<link rel="stylesheet" href={s!"{← getRoot}src/alectryon.css"}/>
|
||||
<link rel="stylesheet" href={s!"{← getRoot}src/pygments.css"}/>
|
||||
<link rel="stylesheet" href={s!"{← getRoot}src/docutils_basic.css"}/>
|
||||
<link rel="shortcut icon" href={s!"{← getRoot}favicon.ico"}/>
|
||||
|
||||
<script defer="true" src={s!"{← getRoot}src/alectryon.js"}></script>
|
||||
</head>
|
||||
<body>
|
||||
<article class="alectryon-root alectryon-centered">
|
||||
{banner}
|
||||
<pre class="alectryon-io highlight">
|
||||
[content]
|
||||
</pre>
|
||||
</article>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
def annotationsToFragments (as : List Annotation.Annotation) : AnalysisM (List Fragment) := do
|
||||
let config ← read
|
||||
annotateFileWithCompounds [] config.inputFileContents as
|
||||
|
||||
-- TODO: rework monad mess
|
||||
def renderAnnotations (as : List Annotation.Annotation) : HtmlT AnalysisM Html := do
|
||||
let fs ← annotationsToFragments as
|
||||
let (html, _) ← fs.mapM Fragment.toHtml >>= (baseHtml ∘ List.toArray) |>.run { counter := 0 }
|
||||
return html
|
||||
|
||||
end LeanInk.Annotation.Alectryon
|
|
@ -1,57 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
import LeanInk.Analysis
|
||||
import LeanInk.Annotation
|
||||
import DocGen4.LeanInk.Output
|
||||
import DocGen4.Output.Base
|
||||
|
||||
namespace DocGen4.Process.LeanInk
|
||||
|
||||
open Lean
|
||||
open DocGen4.Output
|
||||
|
||||
def docGenOutput (as : List LeanInk.Annotation.Annotation) : HtmlT LeanInk.AnalysisM UInt32 := do
|
||||
let some modName ← getCurrentName | unreachable!
|
||||
let srcHtml ← LeanInk.Annotation.Alectryon.renderAnnotations as
|
||||
let srcDir := moduleNameToDirectory srcBasePath modName
|
||||
let srcPath := moduleNameToFile srcBasePath modName
|
||||
IO.FS.createDirAll srcDir
|
||||
IO.FS.writeFile srcPath srcHtml.toString
|
||||
return 0
|
||||
|
||||
def execAuxM : HtmlT LeanInk.AnalysisM UInt32 := do
|
||||
let ctx ← readThe SiteContext
|
||||
let baseCtx ← readThe SiteBaseContext
|
||||
let outputFn := (docGenOutput · |>.run ctx baseCtx)
|
||||
return ← LeanInk.Analysis.runAnalysis {
|
||||
name := "doc-gen4"
|
||||
genOutput := outputFn
|
||||
}
|
||||
|
||||
def execAux (config : LeanInk.Configuration) : HtmlT IO UInt32 := do
|
||||
execAuxM.run (← readThe SiteContext) (← readThe SiteBaseContext) |>.run config
|
||||
|
||||
@[implemented_by enableInitializersExecution]
|
||||
private def enableInitializersExecutionWrapper : IO Unit := return ()
|
||||
|
||||
def runInk (sourceFilePath : System.FilePath) : HtmlT IO Unit := do
|
||||
let contents ← IO.FS.readFile sourceFilePath
|
||||
let config := {
|
||||
inputFilePath := sourceFilePath
|
||||
inputFileContents := contents
|
||||
lakeFile := none
|
||||
verbose := false
|
||||
prettifyOutput := true
|
||||
experimentalTypeInfo := true
|
||||
experimentalDocString := true
|
||||
experimentalSemanticType := true
|
||||
}
|
||||
enableInitializersExecutionWrapper
|
||||
if (← execAux config) != 0 then
|
||||
throw <| IO.userError s!"Analysis for \"{sourceFilePath}\" failed!"
|
||||
|
||||
end DocGen4.Process.LeanInk
|
|
@ -1,61 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
|
||||
import Lean
|
||||
import Lake
|
||||
import Lake.CLI.Main
|
||||
import DocGen4.Process
|
||||
import Lean.Data.HashMap
|
||||
|
||||
namespace DocGen4
|
||||
|
||||
open Lean System IO
|
||||
/--
|
||||
Sets up a lake workspace for the current project. Furthermore initialize
|
||||
the Lean search path with the path to the proper compiler from lean-toolchain
|
||||
as well as all the dependencies.
|
||||
-/
|
||||
def lakeSetup : IO (Except UInt32 Lake.Workspace) := do
|
||||
let (leanInstall?, lakeInstall?) ← Lake.findInstall?
|
||||
let config := Lake.mkLoadConfig.{0} {leanInstall?, lakeInstall?}
|
||||
match ←(EIO.toIO' config) with
|
||||
| .ok config =>
|
||||
let ws : Lake.Workspace ← Lake.loadWorkspace config
|
||||
|>.run Lake.MonadLog.eio
|
||||
|>.toIO (λ _ => IO.userError "Failed to load Lake workspace")
|
||||
pure <| Except.ok ws
|
||||
| .error err =>
|
||||
throw <| IO.userError err.toString
|
||||
|
||||
def envOfImports (imports : List Name) : IO Environment := do
|
||||
importModules (imports.map (Import.mk · false)) Options.empty
|
||||
|
||||
def loadInit (imports : List Name) : IO Hierarchy := do
|
||||
let env ← envOfImports imports
|
||||
pure <| Hierarchy.fromArray env.header.moduleNames
|
||||
|
||||
/--
|
||||
Load a list of modules from the current Lean search path into an `Environment`
|
||||
to process for documentation.
|
||||
-/
|
||||
def load (task : Process.AnalyzeTask) : IO (Process.AnalyzerResult × Hierarchy) := do
|
||||
let env ← envOfImports task.getLoad
|
||||
IO.println "Processing modules"
|
||||
let config := {
|
||||
-- TODO: parameterize maxHeartbeats
|
||||
maxHeartbeats := 100000000,
|
||||
options := ⟨[(`pp.tagAppFns, true)]⟩,
|
||||
-- TODO: Figure out whether this could cause some bugs
|
||||
fileName := default,
|
||||
fileMap := default,
|
||||
}
|
||||
|
||||
Prod.fst <$> Meta.MetaM.toIO (Process.process task) config { env := env } {} {}
|
||||
|
||||
def loadCore : IO (Process.AnalyzerResult × Hierarchy) := do
|
||||
load <| .loadAll [`Init, `Lean, `Lake]
|
||||
|
||||
end DocGen4
|
|
@ -1,156 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
import Lake
|
||||
import DocGen4.Process
|
||||
import DocGen4.Output.Base
|
||||
import DocGen4.Output.Index
|
||||
import DocGen4.Output.Module
|
||||
import DocGen4.Output.NotFound
|
||||
import DocGen4.Output.Find
|
||||
import DocGen4.Output.SourceLinker
|
||||
import DocGen4.Output.Search
|
||||
import DocGen4.Output.ToJson
|
||||
import DocGen4.Output.FoundationalTypes
|
||||
import DocGen4.LeanInk.Process
|
||||
import Lean.Data.HashMap
|
||||
|
||||
namespace DocGen4
|
||||
|
||||
open Lean IO System Output Process
|
||||
|
||||
def htmlOutputSetup (config : SiteBaseContext) : IO Unit := do
|
||||
let findBasePath := basePath / "find"
|
||||
|
||||
-- Base structure
|
||||
FS.createDirAll basePath
|
||||
FS.createDirAll findBasePath
|
||||
FS.createDirAll srcBasePath
|
||||
FS.createDirAll declarationsBasePath
|
||||
|
||||
-- All the doc-gen static stuff
|
||||
let indexHtml := ReaderT.run index config |>.toString
|
||||
let notFoundHtml := ReaderT.run notFound config |>.toString
|
||||
let foundationalTypesHtml := ReaderT.run foundationalTypes config |>.toString
|
||||
let navbarHtml := ReaderT.run navbar config |>.toString
|
||||
let searchHtml := ReaderT.run search config |>.toString
|
||||
let docGenStatic := #[
|
||||
("style.css", styleCss),
|
||||
("declaration-data.js", declarationDataCenterJs),
|
||||
("color-scheme.js", colorSchemeJs),
|
||||
("nav.js", navJs),
|
||||
("how-about.js", howAboutJs),
|
||||
("search.html", searchHtml),
|
||||
("search.js", searchJs),
|
||||
("mathjax-config.js", mathjaxConfigJs),
|
||||
("instances.js", instancesJs),
|
||||
("importedBy.js", importedByJs),
|
||||
("index.html", indexHtml),
|
||||
("foundational_types.html", foundationalTypesHtml),
|
||||
("404.html", notFoundHtml),
|
||||
("navbar.html", navbarHtml)
|
||||
]
|
||||
for (fileName, content) in docGenStatic do
|
||||
FS.writeFile (basePath / fileName) content
|
||||
|
||||
let findHtml := ReaderT.run find { config with depthToRoot := 1 } |>.toString
|
||||
let findStatic := #[
|
||||
("index.html", findHtml),
|
||||
("find.js", findJs)
|
||||
]
|
||||
for (fileName, content) in findStatic do
|
||||
FS.writeFile (findBasePath / fileName) content
|
||||
|
||||
let alectryonStatic := #[
|
||||
("alectryon.css", alectryonCss),
|
||||
("alectryon.js", alectryonJs),
|
||||
("docutils_basic.css", docUtilsCss),
|
||||
("pygments.css", pygmentsCss)
|
||||
]
|
||||
|
||||
for (fileName, content) in alectryonStatic do
|
||||
FS.writeFile (srcBasePath / fileName) content
|
||||
|
||||
def htmlOutputDeclarationDatas (result : AnalyzerResult) : HtmlT IO Unit := do
|
||||
for (_, mod) in result.moduleInfo.toArray do
|
||||
let jsonDecls ← Module.toJson mod
|
||||
FS.writeFile (declarationsBasePath / s!"declaration-data-{mod.name}.bmp") (toJson jsonDecls).compress
|
||||
|
||||
def htmlOutputResults (baseConfig : SiteBaseContext) (result : AnalyzerResult) (ws : Lake.Workspace) (ink : Bool) : IO Unit := do
|
||||
let config : SiteContext := {
|
||||
result := result,
|
||||
sourceLinker := ← SourceLinker.sourceLinker ws
|
||||
leanInkEnabled := ink
|
||||
}
|
||||
|
||||
FS.createDirAll basePath
|
||||
FS.createDirAll declarationsBasePath
|
||||
|
||||
-- Rendering the entire lean compiler takes time....
|
||||
--let sourceSearchPath := ((←Lean.findSysroot) / "src" / "lean") :: ws.root.srcDir :: ws.leanSrcPath
|
||||
let sourceSearchPath := ws.root.srcDir :: ws.leanSrcPath
|
||||
|
||||
discard <| htmlOutputDeclarationDatas result |>.run config baseConfig
|
||||
|
||||
for (modName, module) in result.moduleInfo.toArray do
|
||||
let fileDir := moduleNameToDirectory basePath modName
|
||||
let filePath := moduleNameToFile basePath modName
|
||||
-- path: 'basePath/module/components/till/last.html'
|
||||
-- The last component is the file name, so we drop it from the depth to root.
|
||||
let baseConfig := { baseConfig with
|
||||
depthToRoot := modName.components.dropLast.length
|
||||
currentName := some modName
|
||||
}
|
||||
let moduleHtml := moduleToHtml module |>.run config baseConfig
|
||||
FS.createDirAll fileDir
|
||||
FS.writeFile filePath moduleHtml.toString
|
||||
if ink then
|
||||
if let some inputPath ← Lean.SearchPath.findModuleWithExt sourceSearchPath "lean" module.name then
|
||||
IO.println s!"Inking: {modName.toString}"
|
||||
-- path: 'basePath/src/module/components/till/last.html'
|
||||
-- The last component is the file name, however we are in src/ here so dont drop it this time
|
||||
let baseConfig := {baseConfig with depthToRoot := modName.components.length }
|
||||
Process.LeanInk.runInk inputPath |>.run config baseConfig
|
||||
|
||||
def getSimpleBaseContext (hierarchy : Hierarchy) : IO SiteBaseContext := do
|
||||
return {
|
||||
depthToRoot := 0,
|
||||
currentName := none,
|
||||
hierarchy
|
||||
projectGithubUrl := ← SourceLinker.getProjectGithubUrl
|
||||
projectCommit := ← SourceLinker.getProjectCommit
|
||||
}
|
||||
|
||||
def htmlOutputIndex (baseConfig : SiteBaseContext) : IO Unit := do
|
||||
htmlOutputSetup baseConfig
|
||||
|
||||
let mut index : JsonIndex := {}
|
||||
let mut headerIndex : JsonHeaderIndex := {}
|
||||
for entry in ← System.FilePath.readDir declarationsBasePath do
|
||||
if entry.fileName.startsWith "declaration-data-" && entry.fileName.endsWith ".bmp" then
|
||||
let fileContent ← FS.readFile entry.path
|
||||
let .ok jsonContent := Json.parse fileContent | unreachable!
|
||||
let .ok (module : JsonModule) := fromJson? jsonContent | unreachable!
|
||||
index := index.addModule module |>.run baseConfig
|
||||
headerIndex := headerIndex.addModule module
|
||||
|
||||
let finalJson := toJson index
|
||||
let finalHeaderJson := toJson headerIndex
|
||||
-- The root JSON for find
|
||||
FS.writeFile (declarationsBasePath / "declaration-data.bmp") finalJson.compress
|
||||
FS.writeFile (declarationsBasePath / "header-data.bmp") finalHeaderJson.compress
|
||||
|
||||
/--
|
||||
The main entrypoint for outputting the documentation HTML based on an
|
||||
`AnalyzerResult`.
|
||||
-/
|
||||
def htmlOutput (result : AnalyzerResult) (hierarchy : Hierarchy) (ws : Lake.Workspace) (ink : Bool) : IO Unit := do
|
||||
let baseConfig ← getSimpleBaseContext hierarchy
|
||||
htmlOutputResults baseConfig result ws ink
|
||||
htmlOutputIndex baseConfig
|
||||
|
||||
end DocGen4
|
||||
|
|
@ -1,285 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.Process
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
|
||||
namespace DocGen4.Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean System Widget Elab Process
|
||||
|
||||
def basePath := FilePath.mk "." / "build" / "doc"
|
||||
def srcBasePath := basePath / "src"
|
||||
def declarationsBasePath := basePath / "declarations"
|
||||
|
||||
/--
|
||||
The context used in the `BaseHtmlM` monad for HTML templating.
|
||||
-/
|
||||
structure SiteBaseContext where
|
||||
|
||||
/--
|
||||
The module hierarchy as a tree structure.
|
||||
-/
|
||||
hierarchy : Hierarchy
|
||||
/--
|
||||
How far away we are from the page root, used for relative links to the root.
|
||||
-/
|
||||
depthToRoot: Nat
|
||||
/--
|
||||
The name of the current module if there is one, there exist a few
|
||||
pages that don't have a module name.
|
||||
-/
|
||||
currentName : Option Name
|
||||
/--
|
||||
The Github URL of the project that we are building docs for.
|
||||
-/
|
||||
projectGithubUrl : String
|
||||
/--
|
||||
The commit of the project that we are building docs for.
|
||||
-/
|
||||
projectCommit : String
|
||||
|
||||
/--
|
||||
The context used in the `HtmlM` monad for HTML templating.
|
||||
-/
|
||||
structure SiteContext where
|
||||
/--
|
||||
The full analysis result from the Process module.
|
||||
-/
|
||||
result : AnalyzerResult
|
||||
/--
|
||||
A function to link declaration names to their source URLs, usually Github ones.
|
||||
-/
|
||||
sourceLinker : Name → Option DeclarationRange → String
|
||||
/--
|
||||
Whether LeanInk is enabled
|
||||
-/
|
||||
leanInkEnabled : Bool
|
||||
|
||||
def setCurrentName (name : Name) (ctx : SiteBaseContext) := {ctx with currentName := some name}
|
||||
|
||||
abbrev BaseHtmlT := ReaderT SiteBaseContext
|
||||
abbrev BaseHtmlM := BaseHtmlT Id
|
||||
|
||||
abbrev HtmlT (m) := ReaderT SiteContext (BaseHtmlT m)
|
||||
abbrev HtmlM := HtmlT Id
|
||||
|
||||
def HtmlT.run (x : HtmlT m α) (ctx : SiteContext) (baseCtx : SiteBaseContext) : m α :=
|
||||
ReaderT.run x ctx |>.run baseCtx
|
||||
|
||||
def HtmlM.run (x : HtmlM α) (ctx : SiteContext) (baseCtx : SiteBaseContext) : α :=
|
||||
ReaderT.run x ctx |>.run baseCtx |>.run
|
||||
|
||||
instance [Monad m] : MonadLift HtmlM (HtmlT m) where
|
||||
monadLift x := do return x.run (← readThe SiteContext) (← readThe SiteBaseContext)
|
||||
|
||||
instance [Monad m] : MonadLift BaseHtmlM (BaseHtmlT m) where
|
||||
monadLift x := do return x.run (← readThe SiteBaseContext)
|
||||
|
||||
/--
|
||||
Obtains the root URL as a relative one to the current depth.
|
||||
-/
|
||||
def getRoot : BaseHtmlM String := do
|
||||
let rec go: Nat -> String
|
||||
| 0 => "./"
|
||||
| Nat.succ n' => "../" ++ go n'
|
||||
let d <- SiteBaseContext.depthToRoot <$> read
|
||||
return (go d)
|
||||
|
||||
def getHierarchy : BaseHtmlM Hierarchy := do return (← read).hierarchy
|
||||
def getCurrentName : BaseHtmlM (Option Name) := do return (← read).currentName
|
||||
def getResult : HtmlM AnalyzerResult := do return (← read).result
|
||||
def getSourceUrl (module : Name) (range : Option DeclarationRange): HtmlM String := do return (← read).sourceLinker module range
|
||||
def leanInkEnabled? : HtmlM Bool := do return (← read).leanInkEnabled
|
||||
def getProjectGithubUrl : BaseHtmlM String := do return (← read).projectGithubUrl
|
||||
def getProjectCommit : BaseHtmlM String := do return (← read).projectCommit
|
||||
|
||||
/--
|
||||
If a template is meant to be extended because it for example only provides the
|
||||
header but no real content this is the way to fill the template with content.
|
||||
This is untyped so HtmlM and BaseHtmlM can be mixed.
|
||||
-/
|
||||
def templateExtends {α β} {m} [Bind m] (base : α → m β) (new : m α) : m β :=
|
||||
new >>= base
|
||||
|
||||
def templateLiftExtends {α β} {m n} [Bind m] [MonadLift n m] (base : α → n β) (new : m α) : m β :=
|
||||
new >>= (monadLift ∘ base)
|
||||
|
||||
/-
|
||||
Returns the doc-gen4 link to a module `NameExt`.
|
||||
-/
|
||||
def moduleNameExtToLink (n : NameExt) : BaseHtmlM String := do
|
||||
let parts := n.name.components.map Name.toString
|
||||
return (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ "." ++ n.ext.toString
|
||||
|
||||
/--
|
||||
Returns the doc-gen4 link to a module name.
|
||||
-/
|
||||
def moduleNameToHtmlLink (n : Name) : BaseHtmlM String :=
|
||||
moduleNameExtToLink ⟨n, .html⟩
|
||||
|
||||
/--
|
||||
Returns the HTML doc-gen4 link to a module name.
|
||||
-/
|
||||
def moduleToHtmlLink (module : Name) : BaseHtmlM Html := do
|
||||
return <a href={← moduleNameToHtmlLink module}>{module.toString}</a>
|
||||
|
||||
/--
|
||||
Returns the LeanInk link to a module name.
|
||||
-/
|
||||
def moduleNameToInkLink (n : Name) : BaseHtmlM String := do
|
||||
let parts := "src" :: n.components.map Name.toString
|
||||
return (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
||||
|
||||
/--
|
||||
Returns the path to the HTML file that contains information about a module.
|
||||
-/
|
||||
def moduleNameToFile (basePath : FilePath) (n : Name) : FilePath :=
|
||||
let parts := n.components.map Name.toString
|
||||
FilePath.withExtension (basePath / parts.foldl (· / ·) (FilePath.mk ".")) "html"
|
||||
|
||||
/--
|
||||
Returns the directory of the HTML file that contains information about a module.
|
||||
-/
|
||||
def moduleNameToDirectory (basePath : FilePath) (n : Name) : FilePath :=
|
||||
let parts := n.components.dropLast.map Name.toString
|
||||
basePath / parts.foldl (· / ·) (FilePath.mk ".")
|
||||
|
||||
section Static
|
||||
/-!
|
||||
The following section contains all the statically included files that
|
||||
are used in documentation generation, notably JS and CSS ones.
|
||||
-/
|
||||
def styleCss : String := include_str "../../static/style.css"
|
||||
def declarationDataCenterJs : String := include_str "../../static/declaration-data.js"
|
||||
def colorSchemeJs : String := include_str "../../static/color-scheme.js"
|
||||
def navJs : String := include_str "../../static/nav.js"
|
||||
def howAboutJs : String := include_str "../../static/how-about.js"
|
||||
def searchJs : String := include_str "../../static/search.js"
|
||||
def instancesJs : String := include_str "../../static/instances.js"
|
||||
def importedByJs : String := include_str "../../static/importedBy.js"
|
||||
def findJs : String := include_str "../../static/find/find.js"
|
||||
def mathjaxConfigJs : String := include_str "../../static/mathjax-config.js"
|
||||
|
||||
def alectryonCss : String := include_str "../../static/alectryon/alectryon.css"
|
||||
def alectryonJs : String := include_str "../../static/alectryon/alectryon.js"
|
||||
def docUtilsCss : String := include_str "../../static/alectryon/docutils_basic.css"
|
||||
def pygmentsCss : String := include_str "../../static/alectryon/pygments.css"
|
||||
end Static
|
||||
|
||||
/--
|
||||
Returns the doc-gen4 link to a declaration name.
|
||||
-/
|
||||
def declNameToLink (name : Name) : HtmlM String := do
|
||||
let res ← getResult
|
||||
let module := res.moduleNames[res.name2ModIdx.find! name |>.toNat]!
|
||||
return (← moduleNameToHtmlLink module) ++ "#" ++ name.toString
|
||||
|
||||
/--
|
||||
Returns the HTML doc-gen4 link to a declaration name.
|
||||
-/
|
||||
def declNameToHtmlLink (name : Name) : HtmlM Html := do
|
||||
return <a href={← declNameToLink name}>{name.toString}</a>
|
||||
|
||||
/--
|
||||
Returns the LeanInk link to a declaration name.
|
||||
-/
|
||||
def declNameToInkLink (name : Name) : HtmlM String := do
|
||||
let res ← getResult
|
||||
let module := res.moduleNames[res.name2ModIdx.find! name |>.toNat]!
|
||||
return (← moduleNameToInkLink module) ++ "#" ++ name.toString
|
||||
|
||||
/--
|
||||
Returns a name splitted into parts.
|
||||
Together with "break_within" CSS class this helps browser to break a name
|
||||
nicely.
|
||||
-/
|
||||
def breakWithin (name: String) : (Array Html) :=
|
||||
name.splitOn "."
|
||||
|> .map (fun (s: String) => <span class="name">{s}</span>)
|
||||
|> .intersperse "."
|
||||
|> List.toArray
|
||||
|
||||
/--
|
||||
Returns the HTML doc-gen4 link to a declaration name with "break_within"
|
||||
set as class.
|
||||
-/
|
||||
def declNameToHtmlBreakWithinLink (name : Name) : HtmlM Html := do
|
||||
return <a class="break_within" href={← declNameToLink name}>
|
||||
[breakWithin name.toString]
|
||||
</a>
|
||||
|
||||
/--
|
||||
In Lean syntax declarations the following pattern is quite common:
|
||||
```
|
||||
syntax term " + " term : term
|
||||
```
|
||||
that is, we place spaces around the operator in the middle. When the
|
||||
`InfoTree` framework provides us with information about what source token
|
||||
corresponds to which identifier it will thus say that `" + "` corresponds to
|
||||
`HAdd.hadd`. This is however not the way we want this to be linked, in the HTML
|
||||
only `+` should be linked, taking care of this is what this function is
|
||||
responsible for.
|
||||
-/
|
||||
def splitWhitespaces (s : String) : (String × String × String) := Id.run do
|
||||
let front := "".pushn ' ' <| s.offsetOfPos (s.find (!Char.isWhitespace ·))
|
||||
let mut s := s.trimLeft
|
||||
let back := "".pushn ' ' (s.length - s.offsetOfPos (s.find Char.isWhitespace))
|
||||
s := s.trimRight
|
||||
(front, s, back)
|
||||
|
||||
/--
|
||||
Turns a `CodeWithInfos` object, that is basically a Lean syntax tree with
|
||||
information about what the identifiers mean, into an HTML object that links
|
||||
to as much information as possible.
|
||||
-/
|
||||
partial def infoFormatToHtml (i : CodeWithInfos) : HtmlM (Array Html) := do
|
||||
match i with
|
||||
| .text t => return #[Html.escape t]
|
||||
| .append tt => tt.foldlM (fun acc t => do return acc ++ (← infoFormatToHtml t)) #[]
|
||||
| .tag a t =>
|
||||
match a.info.val.info with
|
||||
| Info.ofTermInfo i =>
|
||||
let cleanExpr := i.expr.consumeMData
|
||||
match cleanExpr with
|
||||
| .const name _ =>
|
||||
-- TODO: this is some very primitive blacklisting but real Blacklisting needs MetaM
|
||||
-- find a better solution
|
||||
if (← getResult).name2ModIdx.contains name then
|
||||
match t with
|
||||
| .text t =>
|
||||
let (front, t, back) := splitWhitespaces <| Html.escape t
|
||||
let elem := <a href={← declNameToLink name}>{t}</a>
|
||||
return #[Html.text front, elem, Html.text back]
|
||||
| _ =>
|
||||
return #[<a href={← declNameToLink name}>[← infoFormatToHtml t]</a>]
|
||||
else
|
||||
return #[<span class="fn">[← infoFormatToHtml t]</span>]
|
||||
| .sort _ =>
|
||||
match t with
|
||||
| .text t =>
|
||||
let mut sortPrefix :: rest := t.splitOn " " | unreachable!
|
||||
let sortLink := <a href={s!"{← getRoot}foundational_types.html"}>{sortPrefix}</a>
|
||||
if rest != [] then
|
||||
rest := " " :: rest
|
||||
return #[sortLink, Html.text <| String.join rest]
|
||||
| _ =>
|
||||
return #[<a href={s!"{← getRoot}foundational_types.html"}>[← infoFormatToHtml t]</a>]
|
||||
| _ =>
|
||||
return #[<span class="fn">[← infoFormatToHtml t]</span>]
|
||||
| _ => return #[<span class="fn">[← infoFormatToHtml t]</span>]
|
||||
|
||||
def baseHtmlHeadDeclarations : BaseHtmlM (Array Html) := do
|
||||
return #[
|
||||
<meta charset="UTF-8"/>,
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>,
|
||||
<link rel="stylesheet" href={s!"{← getRoot}style.css"}/>,
|
||||
<link rel="stylesheet" href={s!"{← getRoot}src/pygments.css"}/>,
|
||||
<link rel="shortcut icon" href={s!"{← getRoot}favicon.ico"}/>,
|
||||
<link rel="prefetch" href={s!"{← getRoot}/declarations/declaration-data.bmp"} as="image"/>
|
||||
]
|
||||
|
||||
end DocGen4.Output
|
|
@ -1,22 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.Structure
|
||||
import DocGen4.Process
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean
|
||||
|
||||
def classInstancesToHtml (className : Name) : HtmlM Html := do
|
||||
pure
|
||||
<details «class»="instances">
|
||||
<summary>Instances</summary>
|
||||
<ul id={s!"instances-list-{className}"} class="instances-list"></ul>
|
||||
</details>
|
||||
|
||||
def classToHtml (i : Process.ClassInfo) : HtmlM (Array Html) := do
|
||||
structureToHtml i
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,14 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.Class
|
||||
import DocGen4.Output.Inductive
|
||||
import DocGen4.Process
|
||||
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
def classInductiveToHtml (i : Process.ClassInductiveInfo) : HtmlM (Array Html) := do
|
||||
inductiveToHtml i
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,51 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.DocString
|
||||
import DocGen4.Process
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean Widget
|
||||
|
||||
/-- This is basically an arbitrary number that seems to work okay. -/
|
||||
def equationLimit : Nat := 200
|
||||
|
||||
def equationToHtml (c : CodeWithInfos) : HtmlM Html := do
|
||||
return <li class="equation">[← infoFormatToHtml c]</li>
|
||||
|
||||
/--
|
||||
Attempt to render all `simp` equations for this definition. At a size
|
||||
defined in `equationLimit` we stop trying since they:
|
||||
- are too ugly to read most of the time
|
||||
- take too long
|
||||
-/
|
||||
def equationsToHtml (i : Process.DefinitionInfo) : HtmlM (Array Html) := do
|
||||
if let some eqs := i.equations then
|
||||
let equationsHtml ← eqs.mapM equationToHtml
|
||||
let filteredEquationsHtml := equationsHtml.filter (·.textLength < equationLimit)
|
||||
if equationsHtml.size ≠ filteredEquationsHtml.size then
|
||||
return #[
|
||||
<details>
|
||||
<summary>Equations</summary>
|
||||
<ul class="equations">
|
||||
<li class="equation">One or more equations did not get rendered due to their size.</li>
|
||||
[filteredEquationsHtml]
|
||||
</ul>
|
||||
</details>
|
||||
]
|
||||
else
|
||||
return #[
|
||||
<details>
|
||||
<summary>Equations</summary>
|
||||
<ul class="equations">
|
||||
[filteredEquationsHtml]
|
||||
</ul>
|
||||
</details>
|
||||
]
|
||||
else
|
||||
return #[]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
||||
|
|
@ -1,216 +0,0 @@
|
|||
import CMark
|
||||
import DocGen4.Output.Template
|
||||
import Lean.Data.Parsec
|
||||
import UnicodeBasic
|
||||
|
||||
open Lean Xml Parser Parsec DocGen4.Process
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
/-- Auxiliary function for `splitAround`. -/
|
||||
@[specialize] partial def splitAroundAux (s : String) (p : Char → Bool) (b i : String.Pos) (r : List String) : List String :=
|
||||
if s.atEnd i then
|
||||
let r := (s.extract b i)::r
|
||||
r.reverse
|
||||
else
|
||||
let c := s.get i
|
||||
if p c then
|
||||
let i := s.next i
|
||||
splitAroundAux s p i i (c.toString::s.extract b (i-⟨1⟩)::r)
|
||||
else
|
||||
splitAroundAux s p b (s.next i) r
|
||||
|
||||
/--
|
||||
Similar to `Stirng.split` in Lean core, but keeps the separater.
|
||||
e.g. `splitAround "a,b,c" (fun c => c = ',') = ["a", ",", "b", ",", "c"]`
|
||||
-/
|
||||
def splitAround (s : String) (p : Char → Bool) : List String := splitAroundAux s p 0 0 []
|
||||
|
||||
instance : Inhabited Element := ⟨"", Lean.RBMap.empty, #[]⟩
|
||||
|
||||
/-- Parse an array of Xml/Html document from String. -/
|
||||
def manyDocument : Parsec (Array Element) := many (prolog *> element <* many Misc) <* eof
|
||||
|
||||
/--
|
||||
Generate id for heading elements, with the following rules:
|
||||
|
||||
1. Characters in `letter`, `mark`, `number` and `symbol` unicode categories are preserved.
|
||||
2. Any sequences of Characters in `punctuation`, `separator` and `other` categories are replaced by a single dash.
|
||||
3. Cases (upper and lower) are preserved.
|
||||
4. Xml/Html tags are ignored.
|
||||
-/
|
||||
partial def xmlGetHeadingId (el : Xml.Element) : String :=
|
||||
elementToPlainText el |> replaceCharSeq unicodeToDrop "-"
|
||||
where
|
||||
elementToPlainText el := match el with
|
||||
| (Element.Element _ _ contents) =>
|
||||
"".intercalate (contents.toList.map contentToPlainText)
|
||||
contentToPlainText c := match c with
|
||||
| Content.Element el => elementToPlainText el
|
||||
| Content.Comment _ => ""
|
||||
| Content.Character s => s
|
||||
replaceCharSeq pattern replacement s :=
|
||||
s.split pattern
|
||||
|>.filter (!·.isEmpty)
|
||||
|> replacement.intercalate
|
||||
unicodeToDrop (c : Char) : Bool :=
|
||||
let cats := [
|
||||
Unicode.GeneralCategory.P, -- punctuation
|
||||
Unicode.GeneralCategory.Z, -- separator
|
||||
Unicode.GeneralCategory.C -- other
|
||||
]
|
||||
cats.any (Unicode.isInGeneralCategory c)
|
||||
|
||||
/--
|
||||
This function try to find the given name, both globally and in current module.
|
||||
|
||||
For global search, a precise name is need. If the global search fails, the function
|
||||
tries to find a local one that ends with the given search name.
|
||||
-/
|
||||
def nameToLink? (s : String) : HtmlM (Option String) := do
|
||||
let res ← getResult
|
||||
if let some name := Lean.Syntax.decodeNameLit ("`" ++ s) then
|
||||
-- with exactly the same name
|
||||
if res.name2ModIdx.contains name then
|
||||
declNameToLink name
|
||||
-- module name
|
||||
else if res.moduleNames.contains name then
|
||||
moduleNameToHtmlLink name
|
||||
-- find similar name in the same module
|
||||
else
|
||||
match (← getCurrentName) with
|
||||
| some currentName =>
|
||||
match res.moduleInfo.find! currentName |>.members |> filterDocInfo |>.find? (sameEnd ·.getName name) with
|
||||
| some info =>
|
||||
declNameToLink info.getName
|
||||
| _ => return none
|
||||
| _ => return none
|
||||
else
|
||||
return none
|
||||
where
|
||||
-- check if two names have the same ending components
|
||||
sameEnd n1 n2 :=
|
||||
List.zip n1.componentsRev n2.componentsRev
|
||||
|>.all fun ⟨a, b⟩ => a == b
|
||||
|
||||
/--
|
||||
Extend links with following rules:
|
||||
|
||||
1. if the link starts with `##`, a name search is used and will panic if not found
|
||||
2. if the link starts with `#`, it's treated as id link, no modification
|
||||
3. if the link starts with `http`, it's an absolute one, no modification
|
||||
4. otherwise it's a relative link, extend it with base url
|
||||
-/
|
||||
def extendLink (s : String) : HtmlM String := do
|
||||
-- for intra doc links
|
||||
if s.startsWith "##" then
|
||||
if let some link ← nameToLink? (s.drop 2) then
|
||||
return link
|
||||
else
|
||||
panic! s!"Cannot find {s.drop 2}, only full name and abbrev in current module is supported"
|
||||
-- for id
|
||||
else if s.startsWith "#" then
|
||||
return s
|
||||
-- for absolute and relative urls
|
||||
else if s.startsWith "http" then
|
||||
return s
|
||||
else return ((← getRoot) ++ s)
|
||||
|
||||
/-- Add attributes for heading. -/
|
||||
def addHeadingAttributes (el : Element) (modifyElement : Element → HtmlM Element) : HtmlM Element := do
|
||||
match el with
|
||||
| Element.Element name attrs contents => do
|
||||
let id := xmlGetHeadingId el
|
||||
let anchorAttributes := Lean.RBMap.empty
|
||||
|>.insert "class" "hover-link"
|
||||
|>.insert "href" s!"#{id}"
|
||||
let anchor := Element.Element "a" anchorAttributes #[Content.Character "#"]
|
||||
let newAttrs := attrs
|
||||
|>.insert "id" id
|
||||
|>.insert "class" "markdown-heading"
|
||||
let newContents := (←
|
||||
contents.mapM (fun c => match c with
|
||||
| Content.Element e => return Content.Element (← modifyElement e)
|
||||
| _ => pure c))
|
||||
|>.push (Content.Character " ")
|
||||
|>.push (Content.Element anchor)
|
||||
return ⟨ name, newAttrs, newContents⟩
|
||||
|
||||
/-- Extend anchor links. -/
|
||||
def extendAnchor (el : Element) : HtmlM Element := do
|
||||
match el with
|
||||
| Element.Element name attrs contents =>
|
||||
let newAttrs ← match attrs.find? "href" with
|
||||
| some href => pure (attrs.insert "href" (← extendLink href))
|
||||
| none => pure attrs
|
||||
return ⟨ name, newAttrs, contents⟩
|
||||
|
||||
/-- Automatically add intra documentation link for inline code span. -/
|
||||
def autoLink (el : Element) : HtmlM Element := do
|
||||
match el with
|
||||
| Element.Element name attrs contents =>
|
||||
let mut newContents := #[]
|
||||
for c in contents do
|
||||
match c with
|
||||
| Content.Character s =>
|
||||
newContents := newContents ++ (← splitAround s unicodeToSplit |>.mapM linkify).join
|
||||
| _ => newContents := newContents.push c
|
||||
return ⟨ name, attrs, newContents ⟩
|
||||
where
|
||||
linkify s := do
|
||||
let link? ← nameToLink? s
|
||||
match link? with
|
||||
| some link =>
|
||||
let attributes := Lean.RBMap.empty.insert "href" link
|
||||
return [Content.Element <| Element.Element "a" attributes #[Content.Character s]]
|
||||
| none =>
|
||||
let sHead := s.dropRightWhile (· != '.')
|
||||
let sTail := s.takeRightWhile (· != '.')
|
||||
let link'? ← nameToLink? sTail
|
||||
match link'? with
|
||||
| some link' =>
|
||||
let attributes := Lean.RBMap.empty.insert "href" link'
|
||||
return [
|
||||
Content.Character sHead,
|
||||
Content.Element <| Element.Element "a" attributes #[Content.Character sTail]
|
||||
]
|
||||
| none =>
|
||||
return [Content.Character s]
|
||||
unicodeToSplit (c : Char) : Bool :=
|
||||
let cats := [
|
||||
Unicode.GeneralCategory.Z, -- separator
|
||||
Unicode.GeneralCategory.C -- other
|
||||
]
|
||||
cats.any (Unicode.isInGeneralCategory c)
|
||||
|
||||
/-- Core function of modifying the cmark rendered docstring html. -/
|
||||
partial def modifyElement (element : Element) : HtmlM Element :=
|
||||
match element with
|
||||
| el@(Element.Element name attrs contents) => do
|
||||
-- add id and class to <h_></h_>
|
||||
if name = "h1" ∨ name = "h2" ∨ name = "h3" ∨ name = "h4" ∨ name = "h5" ∨ name = "h6" then
|
||||
addHeadingAttributes el modifyElement
|
||||
-- extend relative href for <a></a>
|
||||
else if name = "a" then
|
||||
extendAnchor el
|
||||
-- auto link for inline <code></code>
|
||||
else if name = "code" then
|
||||
autoLink el
|
||||
-- recursively modify
|
||||
else
|
||||
let newContents ← contents.mapM fun c => match c with
|
||||
| Content.Element e => return Content.Element (← modifyElement e)
|
||||
| _ => pure c
|
||||
return ⟨ name, attrs, newContents ⟩
|
||||
|
||||
/-- Convert docstring to Html. -/
|
||||
def docStringToHtml (s : String) : HtmlM (Array Html) := do
|
||||
let rendered := CMark.renderHtml s
|
||||
match manyDocument rendered.mkIterator with
|
||||
| Parsec.ParseResult.success _ res =>
|
||||
res.mapM fun x => do return Html.text <| toString (← modifyElement x)
|
||||
| _ => return #[Html.text rendered]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,22 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean
|
||||
|
||||
def find : BaseHtmlM Html := do
|
||||
pure
|
||||
<html lang="en">
|
||||
<head>
|
||||
<link rel="preload" href={s!"{← getRoot}/declarations/declaration-data.bmp"} as="image"/>
|
||||
<script>{s!"const SITE_ROOT={String.quote (← getRoot)};"}</script>
|
||||
<script type="module" async="true" src="./find.js"></script>
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
|
||||
end Output
|
||||
end DocGen4
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.Inductive
|
||||
|
||||
namespace DocGen4.Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def foundationalTypes : BaseHtmlM Html := templateLiftExtends (baseHtml "Foundational Types") do
|
||||
pure <|
|
||||
<main>
|
||||
<a id="top"></a>
|
||||
<h1>Foundational Types</h1>
|
||||
|
||||
<p>Some of Lean's types are not defined in any Lean source files (even the <code>prelude</code>) since they come from its foundational type theory. This page provides basic documentation for these types.</p>
|
||||
<p>For a more in-depth explanation of Lean's type theory, refer to
|
||||
<a href="https://leanprover.github.io/theorem_proving_in_lean4/dependent_type_theory.html">TPiL</a>.</p>
|
||||
|
||||
|
||||
<h2 id="codesort-ucode"><code>Sort u</code></h2>
|
||||
<p><code>Sort u</code> is the type of types in Lean, and <code>Sort u : Sort (u + 1)</code>.</p>
|
||||
{← instancesForToHtml `_builtin_sortu}
|
||||
|
||||
<h2 id="codetype-ucode"><code>Type u</code></h2>
|
||||
<p><code>Type u</code> is notation for <code>Sort (u + 1)</code>.</p>
|
||||
{← instancesForToHtml `_builtin_typeu}
|
||||
|
||||
<h2 id="codepropcode"><code>Prop</code></h2>
|
||||
<p><code>Prop</code> is notation for <code>Sort 0</code>.</p>
|
||||
{← instancesForToHtml `_builtin_prop}
|
||||
|
||||
<h2 id="pi-types-codeπ-a--α-β-acode">Pi types, <code>{"(a : α) → β a"}</code></h2>
|
||||
<p>The type of dependent functions is known as a pi type.
|
||||
Non-dependent functions and implications are a special case.</p>
|
||||
<p>Note that these can also be written with the alternative notations:</p>
|
||||
<ul>
|
||||
<li><code>∀ a : α, β a</code>, conventionally used where <code>β a : Prop</code>.</li>
|
||||
<li><code>(a : α) → β a</code></li>
|
||||
<li><code>α → γ</code>, possible only if <code>β a = γ</code> for all <code>a</code>.</li>
|
||||
</ul>
|
||||
<p>Lean also permits ASCII-only spellings of the three variants:</p>
|
||||
<ul>
|
||||
<li><code>forall a : A, B a</code> for <code>{"∀ a : α, β a"}</code></li>
|
||||
<li><code>(a : A) -> B a</code>, for <code>(a : α) → β a</code></li>
|
||||
<li><code>A -> B</code>, for <code>α → β</code></li>
|
||||
</ul>
|
||||
<p>Note that despite not itself being a function, <code>(→)</code> is available as infix notation for
|
||||
<code>{"fun α β, α → β"}</code>.</p>
|
||||
-- TODO: instances for pi types
|
||||
</main>
|
||||
|
||||
end DocGen4.Output
|
|
@ -1,83 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
import DocGen4.Output.Template
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def index : BaseHtmlM Html := do templateExtends (baseHtml "Index") <|
|
||||
pure <|
|
||||
<main>
|
||||
<a id="top"></a>
|
||||
<h1>Bookshelf</h1>
|
||||
<p>
|
||||
A study of the books listed below. Most proofs are conducted in LaTeX.
|
||||
Where feasible, theorems are also formally proven in
|
||||
<a target="_blank" href="https://leanprover.github.io/">Lean</a>.
|
||||
</p>
|
||||
|
||||
<h2>In Progress</h2>
|
||||
<ul>
|
||||
<li>Apostol, Tom M. Calculus, Vol. 1: One-Variable Calculus, with an Introduction to Linear Algebra. 2nd ed. Vol. 1. 2 vols. Wiley, 1991.</li>
|
||||
<li>Enderton, Herbert B. A Mathematical Introduction to Logic. 2nd ed. San Diego: Harcourt/Academic Press, 2001.</li>
|
||||
<li>Enderton, Herbert B. Elements of Set Theory. New York: Academic Press, 1977.</li>
|
||||
</ul>
|
||||
|
||||
<h2>Complete</h2>
|
||||
<ul>
|
||||
<li>Avigad, Jeremy. ‘Theorem Proving in Lean’, n.d.</li>
|
||||
</ul>
|
||||
|
||||
<h2>Pending</h2>
|
||||
<ul>
|
||||
<li>Axler, Sheldon. Linear Algebra Done Right. Undergraduate Texts in Mathematics. Cham: Springer International Publishing, 2015.</li>
|
||||
<li>Cormen, Thomas H., Charles E. Leiserson, Ronald L. Rivest, and Clifford Stein. Introduction to Algorithms. 3rd ed. Cambridge, Mass: MIT Press, 2009.</li>
|
||||
<li>Gries, David. The Science of Programming. Texts and Monographs in Computer Science. New York: Springer-Verlag, 1981.</li>
|
||||
<li>Gustedt, Jens. Modern C. Shelter Island, NY: Manning Publications Co, 2020.</li>
|
||||
<li>Ross, Sheldon. A First Course in Probability Theory. 8th ed. Pearson Prentice Hall, n.d.</li>
|
||||
<li>Smullyan, Raymond M. To Mock a Mockingbird: And Other Logic Puzzles Including an Amazing Adventure in Combinatory Logic. Oxford: Oxford university press, 2000.</li>
|
||||
</ul>
|
||||
|
||||
<h2>Legend</h2>
|
||||
<p>
|
||||
A color/symbol code is used on generated PDF headers to indicate their
|
||||
status:
|
||||
<ul>
|
||||
<li>
|
||||
<span style="color:darkgray">Dark gray statements </span> are
|
||||
reserved for definitions and axioms that have been encoded in LaTeX.
|
||||
A reference to a definition in Lean may also be provided.
|
||||
</li>
|
||||
<li>
|
||||
<span style="color:teal">Teal statements </span> are reserved for
|
||||
statements, theorems, lemmas, etc. that have been proven in LaTeX
|
||||
and have a corresponding proof in Lean.
|
||||
</li>
|
||||
<li>
|
||||
<span style="color:olive">Olive statements </span> are reserved for
|
||||
statements, theorems, lemmas, etc. that have been proven in LaTeX.
|
||||
A reference to a statement in Lean may also be provided.
|
||||
</li>
|
||||
<li>
|
||||
<span style="color:fuchsia">Fuchsia statements </span> are reserved
|
||||
for statements, theorems, lemmas, etc. that have been proven in
|
||||
LaTeX and <i>will </i> have a corresponding proof in Lean.
|
||||
</li>
|
||||
<li>
|
||||
<span style="color:maroon">Maroon </span> serves as a catch-all for
|
||||
statements that don't fit the above categorizations. Incomplete
|
||||
definitions, statements without proof, etc. belong here.
|
||||
</li>
|
||||
</ul>
|
||||
</p>
|
||||
<p>This was built using Lean 4 at commit <a href={s!"https://github.com/leanprover/lean4/tree/{Lean.githash}"}>{Lean.githash}</a></p>
|
||||
</main>
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,39 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.DocString
|
||||
import DocGen4.Process
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean
|
||||
|
||||
def instancesForToHtml (typeName : Name) : BaseHtmlM Html := do
|
||||
pure
|
||||
<details «class»="instances">
|
||||
<summary>Instances For</summary>
|
||||
<ul id={s!"instances-for-list-{typeName}"} class="instances-for-list"></ul>
|
||||
</details>
|
||||
|
||||
def ctorToHtml (c : Process.NameInfo) : HtmlM Html := do
|
||||
let shortName := c.name.componentsRev.head!.toString
|
||||
let name := c.name.toString
|
||||
if let some doc := c.doc then
|
||||
let renderedDoc ← docStringToHtml doc
|
||||
pure
|
||||
<li class="constructor" id={name}>
|
||||
{shortName} : [← infoFormatToHtml c.type]
|
||||
<div class="inductive_ctor_doc">[renderedDoc]</div>
|
||||
</li>
|
||||
else
|
||||
pure
|
||||
<li class="constructor" id={name}>
|
||||
{shortName} : [← infoFormatToHtml c.type]
|
||||
</li>
|
||||
|
||||
def inductiveToHtml (i : Process.InductiveInfo) : HtmlM (Array Html) := do
|
||||
let constructorsHtml := <ul class="constructors">[← i.ctors.toArray.mapM ctorToHtml]</ul>
|
||||
return #[constructorsHtml]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,8 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.Definition
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,216 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.Inductive
|
||||
import DocGen4.Output.Structure
|
||||
import DocGen4.Output.Class
|
||||
import DocGen4.Output.Definition
|
||||
import DocGen4.Output.Instance
|
||||
import DocGen4.Output.ClassInductive
|
||||
import DocGen4.Output.DocString
|
||||
import DocGen4.Process
|
||||
import Lean.Data.Xml.Parser
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean Process
|
||||
|
||||
/--
|
||||
Render an `Arg` as HTML, adding opacity effects etc. depending on what
|
||||
type of binder it has.
|
||||
-/
|
||||
def argToHtml (arg : Arg) : HtmlM Html := do
|
||||
let (l, r, implicit) := match arg.binderInfo with
|
||||
| BinderInfo.default => ("(", ")", false)
|
||||
| BinderInfo.implicit => ("{", "}", true)
|
||||
| BinderInfo.strictImplicit => ("⦃", "⦄", true)
|
||||
| BinderInfo.instImplicit => ("[", "]", true)
|
||||
let mut nodes := #[Html.text s!"{l}{arg.name.toString} : "]
|
||||
nodes := nodes.append (← infoFormatToHtml arg.type)
|
||||
nodes := nodes.push r
|
||||
let inner := <span class="fn">[nodes]</span>
|
||||
let html := Html.element "span" false #[("class", "decl_args")] #[inner]
|
||||
if implicit then
|
||||
return <span class="impl_arg">{html}</span>
|
||||
else
|
||||
return html
|
||||
|
||||
/--
|
||||
Render the structures this structure extends from as HTML so it can be
|
||||
added to the top level.
|
||||
-/
|
||||
def structureInfoHeader (s : Process.StructureInfo) : HtmlM (Array Html) := do
|
||||
let mut nodes := #[]
|
||||
if s.parents.size > 0 then
|
||||
nodes := nodes.push <span class="decl_extends">extends</span>
|
||||
let mut parents := #[]
|
||||
for parent in s.parents do
|
||||
let link ← declNameToHtmlBreakWithinLink parent
|
||||
let inner := <span class="fn">{link}</span>
|
||||
let html:= Html.element "span" false #[("class", "decl_parent")] #[inner]
|
||||
parents := parents.push html
|
||||
nodes := nodes.append (parents.toList.intersperse (Html.text ", ")).toArray
|
||||
return nodes
|
||||
|
||||
/--
|
||||
Render the general header of a declaration containing its declaration type
|
||||
and name.
|
||||
-/
|
||||
def docInfoHeader (doc : DocInfo) : HtmlM Html := do
|
||||
let mut nodes := #[]
|
||||
nodes := nodes.push <| Html.element "span" false #[("class", "decl_kind")] #[doc.getKindDescription]
|
||||
nodes := nodes.push
|
||||
<span class="decl_name">
|
||||
{← declNameToHtmlBreakWithinLink doc.getName}
|
||||
</span>
|
||||
for arg in doc.getArgs do
|
||||
nodes := nodes.push (← argToHtml arg)
|
||||
|
||||
match doc with
|
||||
| DocInfo.structureInfo i => nodes := nodes.append (← structureInfoHeader i)
|
||||
| DocInfo.classInfo i => nodes := nodes.append (← structureInfoHeader i)
|
||||
| _ => nodes := nodes
|
||||
|
||||
nodes := nodes.push <| Html.element "span" true #[("class", "decl_args")] #[" :"]
|
||||
nodes := nodes.push <div class="decl_type">[← infoFormatToHtml doc.getType]</div>
|
||||
return <div class="decl_header"> [nodes] </div>
|
||||
|
||||
/--
|
||||
The main entry point for rendering a single declaration inside a given module.
|
||||
-/
|
||||
def docInfoToHtml (module : Name) (doc : DocInfo) : HtmlM Html := do
|
||||
-- basic info like headers, types, structure fields, etc.
|
||||
let docInfoHtml ← match doc with
|
||||
| DocInfo.inductiveInfo i => inductiveToHtml i
|
||||
| DocInfo.structureInfo i => structureToHtml i
|
||||
| DocInfo.classInfo i => classToHtml i
|
||||
| DocInfo.classInductiveInfo i => classInductiveToHtml i
|
||||
| _ => pure #[]
|
||||
-- rendered doc stirng
|
||||
let docStringHtml ← match doc.getDocString with
|
||||
| some s => docStringToHtml s
|
||||
| none => pure #[]
|
||||
-- extra information like equations and instances
|
||||
let extraInfoHtml ← match doc with
|
||||
| DocInfo.classInfo i => pure #[← classInstancesToHtml i.name]
|
||||
| DocInfo.definitionInfo i => pure ((← equationsToHtml i) ++ #[← instancesForToHtml i.name])
|
||||
| DocInfo.instanceInfo i => equationsToHtml i.toDefinitionInfo
|
||||
| DocInfo.classInductiveInfo i => pure #[← classInstancesToHtml i.name]
|
||||
| DocInfo.inductiveInfo i => pure #[← instancesForToHtml i.name]
|
||||
| DocInfo.structureInfo i => pure #[← instancesForToHtml i.name]
|
||||
| _ => pure #[]
|
||||
let attrs := doc.getAttrs
|
||||
let attrsHtml :=
|
||||
if attrs.size > 0 then
|
||||
let attrStr := "@[" ++ String.intercalate ", " doc.getAttrs.toList ++ "]"
|
||||
#[Html.element "div" false #[("class", "attributes")] #[attrStr]]
|
||||
else
|
||||
#[]
|
||||
let leanInkHtml :=
|
||||
if ← leanInkEnabled? then
|
||||
#[
|
||||
<div class="ink_link">
|
||||
<a href={← declNameToInkLink doc.getName}>ink</a>
|
||||
</div>
|
||||
]
|
||||
else
|
||||
#[]
|
||||
|
||||
pure
|
||||
<div class="decl" id={doc.getName.toString}>
|
||||
<div class={doc.getKind}>
|
||||
<div class="gh_link">
|
||||
<a href={← getSourceUrl module doc.getDeclarationRange}>source</a>
|
||||
</div>
|
||||
[leanInkHtml]
|
||||
[attrsHtml]
|
||||
{← docInfoHeader doc}
|
||||
[docInfoHtml]
|
||||
[docStringHtml]
|
||||
[extraInfoHtml]
|
||||
</div>
|
||||
</div>
|
||||
|
||||
/--
|
||||
Rendering a module doc string, that is the ones with an ! after the opener
|
||||
as HTML.
|
||||
-/
|
||||
def modDocToHtml (mdoc : ModuleDoc) : HtmlM Html := do
|
||||
pure
|
||||
<div class="mod_doc">
|
||||
[← docStringToHtml mdoc.doc]
|
||||
</div>
|
||||
|
||||
/--
|
||||
Render a module member, that is either a module doc string or a declaration
|
||||
as HTML.
|
||||
-/
|
||||
def moduleMemberToHtml (module : Name) (member : ModuleMember) : HtmlM Html := do
|
||||
match member with
|
||||
| ModuleMember.docInfo d => docInfoToHtml module d
|
||||
| ModuleMember.modDoc d => modDocToHtml d
|
||||
|
||||
def declarationToNavLink (module : Name) : Html :=
|
||||
<div class="nav_link">
|
||||
<a class="break_within" href={s!"#{module.toString}"}>
|
||||
[breakWithin module.toString]
|
||||
</a>
|
||||
</div>
|
||||
|
||||
/--
|
||||
Returns the list of all imports this module does.
|
||||
-/
|
||||
def getImports (module : Name) : HtmlM (Array Name) := do
|
||||
let res ← getResult
|
||||
return res.moduleInfo.find! module |>.imports
|
||||
|
||||
/--
|
||||
Sort the list of all modules this one is importing, linkify it
|
||||
and return the HTML.
|
||||
-/
|
||||
def importsHtml (moduleName : Name) : HtmlM (Array Html) := do
|
||||
let imports := (← getImports moduleName).qsort Name.lt
|
||||
imports.mapM (fun i => do return <li>{← moduleToHtmlLink i}</li>)
|
||||
|
||||
/--
|
||||
Render the internal nav bar (the thing on the right on all module pages).
|
||||
-/
|
||||
def internalNav (members : Array Name) (moduleName : Name) : HtmlM Html := do
|
||||
pure
|
||||
<nav class="internal_nav">
|
||||
<h3><a class="break_within" href="#top">[breakWithin moduleName.toString]</a></h3>
|
||||
<p class="gh_nav_link"><a href={← getSourceUrl moduleName none}>source</a></p>
|
||||
<div class="imports">
|
||||
<details>
|
||||
<summary>Imports</summary>
|
||||
<ul>
|
||||
[← importsHtml moduleName]
|
||||
</ul>
|
||||
</details>
|
||||
<details>
|
||||
<summary>Imported by</summary>
|
||||
<ul id={s!"imported-by-{moduleName}"} class="imported-by-list"> </ul>
|
||||
</details>
|
||||
</div>
|
||||
[members.map declarationToNavLink]
|
||||
</nav>
|
||||
|
||||
/--
|
||||
The main entry point to rendering the HTML for an entire module.
|
||||
-/
|
||||
def moduleToHtml (module : Process.Module) : HtmlM Html := withTheReader SiteBaseContext (setCurrentName module.name) do
|
||||
let relevantMembers := module.members.filter Process.ModuleMember.shouldRender
|
||||
let memberDocs ← relevantMembers.mapM (moduleMemberToHtml module.name)
|
||||
let memberNames := filterDocInfo relevantMembers |>.map DocInfo.getName
|
||||
templateLiftExtends (baseHtmlGenerator module.name.toString) <| pure #[
|
||||
← internalNav memberNames module.name,
|
||||
Html.element "main" false #[] memberDocs
|
||||
]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,111 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
import DocGen4.Output.Base
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open Lean
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def moduleListFile (file : NameExt) : BaseHtmlM Html := do
|
||||
let contents :=
|
||||
if file.ext == .pdf then
|
||||
<span>{s!"🗎 {file.getString!} (<a class=\"pdf\" target=\"_blank\" href={← moduleNameExtToLink file}>pdf</a>)"}</span>
|
||||
else
|
||||
<a href={← moduleNameExtToLink file}>{file.getString!}</a>
|
||||
return <div class={if (← getCurrentName) == file.name then "nav_link visible" else "nav_link"}>
|
||||
{contents}
|
||||
</div>
|
||||
|
||||
/--
|
||||
Build the HTML tree representing the module hierarchy.
|
||||
-/
|
||||
partial def moduleListDir (h : Hierarchy) : BaseHtmlM Html := do
|
||||
let children := Array.mk (h.getChildren.toList.map Prod.snd)
|
||||
let nodes ← children.mapM (fun c =>
|
||||
if c.getChildren.toList.length != 0 then
|
||||
moduleListDir c
|
||||
else if Hierarchy.isFile c && c.getChildren.toList.length = 0 then
|
||||
moduleListFile (Hierarchy.getNameExt c)
|
||||
else
|
||||
pure ""
|
||||
)
|
||||
let moduleLink ← moduleNameToHtmlLink h.getName
|
||||
let summary :=
|
||||
if h.isFile then
|
||||
<summary>{s!"{h.getName.getString!} ({<a href={← moduleNameToHtmlLink h.getName}>file</a>})"} </summary>
|
||||
else
|
||||
<summary>{h.getName.getString!}</summary>
|
||||
|
||||
pure
|
||||
<details class="nav_sect" "data-path"={moduleLink} [if (← getCurrentName).any (h.getName.isPrefixOf ·) then #[("open", "")] else #[]]>
|
||||
{summary}
|
||||
[nodes]
|
||||
</details>
|
||||
|
||||
/--
|
||||
Return a list of top level modules, linkified and rendered as HTML
|
||||
-/
|
||||
def moduleList : BaseHtmlM Html := do
|
||||
let hierarchy ← getHierarchy
|
||||
let mut list := Array.empty
|
||||
for (_, cs) in hierarchy.getChildren do
|
||||
list := list.push <| ← moduleListDir cs
|
||||
return <div class="module_list">[list]</div>
|
||||
|
||||
/--
|
||||
The main entry point to rendering the navbar on the left hand side.
|
||||
-/
|
||||
def navbar : BaseHtmlM Html := do
|
||||
pure
|
||||
<html lang="en">
|
||||
<head>
|
||||
[← baseHtmlHeadDeclarations]
|
||||
|
||||
<script type="module" src={s!"{← getRoot}nav.js"}></script>
|
||||
<script type="module" src={s!"{← getRoot}color-scheme.js"}></script>
|
||||
<base target="_parent" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="navframe">
|
||||
<nav class="nav">
|
||||
<h3>General documentation</h3>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}"}>index</a></div>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}foundational_types.html"}>foundational types</a></div>
|
||||
/-
|
||||
TODO: Add these in later
|
||||
<div class="nav_link"><a href={s!"{← getRoot}tactics.html"}>tactics</a></div>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}commands.html"}>commands</a></div>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}hole_commands.html"}>hole commands</a></div>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}attributes.html"}>attributes</a></div>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}notes.html"}>notes</a></div>
|
||||
<div class="nav_link"><a href={s!"{← getRoot}references.html"}>references</a></div>
|
||||
-/
|
||||
<h3>Library</h3>
|
||||
{← moduleList}
|
||||
<div id="settings" hidden="hidden">
|
||||
-- `input` is a void tag, but can be self-closed to make parsing easier.
|
||||
<h3>Color scheme</h3>
|
||||
<form id="color-theme-switcher">
|
||||
<label for="color-theme-dark">
|
||||
<input type="radio" name="color_theme" id="color-theme-dark" value="dark" autocomplete="off"/>dark</label>
|
||||
<label for="color-theme-system" title="Match system theme settings">
|
||||
<input type="radio" name="color_theme" id="color-theme-system" value="system" autocomplete="off"/>system</label>
|
||||
<label for="color-theme-light">
|
||||
<input type="radio" name="color_theme" id="color-theme-light" value="light" autocomplete="off"/>light</label>
|
||||
</form>
|
||||
</div>
|
||||
</nav>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,26 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
import DocGen4.Output.Template
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
/--
|
||||
Render the 404 page.
|
||||
-/
|
||||
def notFound : BaseHtmlM Html := do templateExtends (baseHtml "404") <|
|
||||
pure <|
|
||||
<main>
|
||||
<h1>404 Not Found</h1>
|
||||
<p> Unfortunately, the page you were looking for is no longer here. </p>
|
||||
<div id="howabout"></div>
|
||||
</main>
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,48 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2023 Jeremy Salwen. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Jeremy Salwen
|
||||
-/
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
import DocGen4.Output.Template
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def search : BaseHtmlM Html := do templateExtends (baseHtml "Search") <|
|
||||
pure <|
|
||||
<main>
|
||||
<h1> Search Results </h1>
|
||||
<label for="search_page_query">Query:</label>
|
||||
<input id="search_page_query" />
|
||||
<div id="kinds">
|
||||
Allowed Kinds:
|
||||
<input type="checkbox" id="def_checkbox" class="kind_checkbox" value="def" checked="checked" />
|
||||
<label for="def_checkbox">def</label>
|
||||
<input type="checkbox" id="theorem_checkbox" class="kind_checkbox" value="theorem" checked="checked" />
|
||||
<label for="theorem_checkbox">theorem</label>
|
||||
<input type="checkbox" id="inductive_checkbox" class="kind_checkbox" value="inductive" checked="checked" />
|
||||
<label for="inductive_checkbox">inductive</label>
|
||||
<input type="checkbox" id="structure_checkbox" class="kind_checkbox" value="structure" checked="checked" />
|
||||
<label for="structure_checkbox">structure</label>
|
||||
<input type="checkbox" id="class_checkbox" class="kind_checkbox" value="class" checked="checked" />
|
||||
<label for="class_checkbox">class</label>
|
||||
<input type="checkbox" id="instance_checkbox" class="kind_checkbox" value="instance" checked="checked" />
|
||||
<label for="instance_checkbox">instance</label>
|
||||
<input type="checkbox" id="axiom_checkbox" class="axiom_checkbox" value="axiom" checked="checked" />
|
||||
<label for="axiom_checkbox">axiom</label>
|
||||
<input type="checkbox" id="opaque_checkbox" class="kind_checkbox" value="opaque" checked="checked" />
|
||||
<label for="opaque_checkbox">opaque</label>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.getElementById("search_page_query").value = new URL(window.location.href).searchParams.get("q")
|
||||
</script>
|
||||
<div id="search_results">
|
||||
</div>
|
||||
</main>
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,103 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
import Lake.Load
|
||||
|
||||
namespace DocGen4.Output.SourceLinker
|
||||
|
||||
open Lean
|
||||
|
||||
/--
|
||||
Turns a Github git remote URL into an HTTPS Github URL.
|
||||
Three link types from git supported:
|
||||
- https://github.com/org/repo
|
||||
- https://github.com/org/repo.git
|
||||
- git@github.com:org/repo.git
|
||||
|
||||
TODO: This function is quite brittle and very Github specific, we can
|
||||
probably do better.
|
||||
-/
|
||||
def getGithubBaseUrl (gitUrl : String) : String := Id.run do
|
||||
let mut url := gitUrl
|
||||
if url.startsWith "git@" then
|
||||
url := url.drop 15
|
||||
url := url.dropRight 4
|
||||
return s!"https://github.com/{url}"
|
||||
else if url.endsWith ".git" then
|
||||
return url.dropRight 4
|
||||
else
|
||||
return url
|
||||
|
||||
/--
|
||||
Obtain the Github URL of a project by parsing the origin remote.
|
||||
-/
|
||||
def getProjectGithubUrl (directory : System.FilePath := "." ) : IO String := do
|
||||
let out ← IO.Process.output {
|
||||
cmd := "git",
|
||||
args := #["remote", "get-url", "origin"],
|
||||
cwd := directory
|
||||
}
|
||||
if out.exitCode != 0 then
|
||||
throw <| IO.userError <| "git exited with code " ++ toString out.exitCode
|
||||
return out.stdout.trimRight
|
||||
|
||||
/--
|
||||
Obtain the git commit hash of the project that is currently getting analyzed.
|
||||
-/
|
||||
def getProjectCommit (directory : System.FilePath := "." ) : IO String := do
|
||||
let out ← IO.Process.output {
|
||||
cmd := "git",
|
||||
args := #["rev-parse", "HEAD"]
|
||||
cwd := directory
|
||||
}
|
||||
if out.exitCode != 0 then
|
||||
throw <| IO.userError <| "git exited with code " ++ toString out.exitCode
|
||||
return out.stdout.trimRight
|
||||
|
||||
/--
|
||||
Given a lake workspace with all the dependencies as well as the hash of the
|
||||
compiler release to work with this provides a function to turn names of
|
||||
declarations into (optionally positional) Github URLs.
|
||||
-/
|
||||
def sourceLinker (ws : Lake.Workspace) : IO (Name → Option DeclarationRange → String) := do
|
||||
let leanHash := ws.lakeEnv.lean.githash
|
||||
-- Compute a map from package names to source URL
|
||||
let mut gitMap := Lean.mkHashMap
|
||||
let projectBaseUrl := getGithubBaseUrl (← getProjectGithubUrl)
|
||||
let projectCommit ← getProjectCommit
|
||||
gitMap := gitMap.insert ws.root.name (projectBaseUrl, projectCommit)
|
||||
let manifest ← Lake.Manifest.loadOrEmpty ws.root.manifestFile
|
||||
|>.run (Lake.MonadLog.eio .normal)
|
||||
|>.toIO (fun _ => IO.userError "Failed to load lake manifest")
|
||||
for pkg in manifest.entryArray do
|
||||
match pkg with
|
||||
| .git _ _ _ url rev .. => gitMap := gitMap.insert pkg.name (getGithubBaseUrl url, rev)
|
||||
| .path _ _ _ path =>
|
||||
let pkgBaseUrl := getGithubBaseUrl (← getProjectGithubUrl path)
|
||||
let pkgCommit ← getProjectCommit path
|
||||
gitMap := gitMap.insert pkg.name (pkgBaseUrl, pkgCommit)
|
||||
|
||||
return fun module range =>
|
||||
let parts := module.components.map Name.toString
|
||||
let path := (parts.intersperse "/").foldl (· ++ ·) ""
|
||||
let root := module.getRoot
|
||||
let basic := if root == `Lean ∨ root == `Init then
|
||||
s!"https://github.com/leanprover/lean4/blob/{leanHash}/src/{path}.lean"
|
||||
else if root == `Lake then
|
||||
s!"https://github.com/leanprover/lean4/blob/{leanHash}/src/lake/{path}.lean"
|
||||
else
|
||||
match ws.packageArray.find? (·.isLocalModule module) with
|
||||
| some pkg =>
|
||||
match gitMap.find? pkg.name with
|
||||
| some (baseUrl, commit) => s!"{baseUrl}/blob/{commit}/{path}.lean"
|
||||
| none => "https://example.com"
|
||||
| none => "https://example.com"
|
||||
|
||||
match range with
|
||||
| some range => s!"{basic}#L{range.pos.line}-L{range.endPos.line}"
|
||||
| none => basic
|
||||
|
||||
end DocGen4.Output.SourceLinker
|
|
@ -1,51 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.DocString
|
||||
import DocGen4.Process
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
open Lean
|
||||
|
||||
/--
|
||||
Render a single field consisting of its documentation, its name and its type as HTML.
|
||||
-/
|
||||
def fieldToHtml (f : Process.NameInfo) : HtmlM Html := do
|
||||
let shortName := f.name.componentsRev.head!.toString
|
||||
let name := f.name.toString
|
||||
if let some doc := f.doc then
|
||||
let renderedDoc ← docStringToHtml doc
|
||||
pure
|
||||
<li id={name} class="structure_field">
|
||||
<div class="structure_field_info">{s!"{shortName} "} : [← infoFormatToHtml f.type]</div>
|
||||
<div class="structure_field_doc">[renderedDoc]</div>
|
||||
</li>
|
||||
else
|
||||
pure
|
||||
<li id={name} class="structure_field">
|
||||
<div class="structure_field_info">{s!"{shortName} "} : [← infoFormatToHtml f.type]</div>
|
||||
</li>
|
||||
|
||||
/--
|
||||
Render all information about a structure as HTML.
|
||||
-/
|
||||
def structureToHtml (i : Process.StructureInfo) : HtmlM (Array Html) := do
|
||||
let structureHtml :=
|
||||
if Name.isSuffixOf `mk i.ctor.name then
|
||||
(<ul class="structure_fields" id={i.ctor.name.toString}>
|
||||
[← i.fieldInfo.mapM fieldToHtml]
|
||||
</ul>)
|
||||
else
|
||||
let ctorShortName := i.ctor.name.componentsRev.head!.toString
|
||||
(<ul class="structure_ext">
|
||||
<li id={i.ctor.name.toString} class="structure_ext_ctor">{s!"{ctorShortName} "} :: (</li>
|
||||
<ul class="structure_ext_fields">
|
||||
[← i.fieldInfo.mapM fieldToHtml]
|
||||
</ul>
|
||||
<li class="structure_ext_ctor">)</li>
|
||||
</ul>)
|
||||
return #[structureHtml]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,70 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import DocGen4.Output.ToHtmlFormat
|
||||
import DocGen4.Output.Navbar
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
/--
|
||||
The HTML template used for all pages.
|
||||
-/
|
||||
def baseHtmlGenerator (title : String) (site : Array Html) : BaseHtmlM Html := do
|
||||
let moduleConstant :=
|
||||
if let some module := ← getCurrentName then
|
||||
#[<script>{s!"const MODULE_NAME={String.quote module.toString};"}</script>]
|
||||
else
|
||||
#[]
|
||||
pure
|
||||
<html lang="en">
|
||||
<head>
|
||||
[← baseHtmlHeadDeclarations]
|
||||
|
||||
<title>{title}</title>
|
||||
<script defer="true" src={s!"{← getRoot}mathjax-config.js"}></script>
|
||||
<script defer="true" src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
|
||||
<script defer="true" src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
|
||||
|
||||
<script>{s!"const SITE_ROOT={String.quote (← getRoot)};"}</script>
|
||||
[moduleConstant]
|
||||
<script type="module" src={s!"{← getRoot}search.js"}></script>
|
||||
<script type="module" src={s!"{← getRoot}how-about.js"}></script>
|
||||
<script type="module" src={s!"{← getRoot}instances.js"}></script>
|
||||
<script type="module" src={s!"{← getRoot}importedBy.js"}></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<input id="nav_toggle" type="checkbox"/>
|
||||
|
||||
<header>
|
||||
<h1><label for="nav_toggle"></label>Documentation</h1>
|
||||
<p class="header_filename break_within">[breakWithin title]</p>
|
||||
<form action="https://google.com/search" method="get" id="search_form">
|
||||
<input type="hidden" name="sitesearch" value="https://leanprover-community.github.io/mathlib4_docs"/>
|
||||
<input type="text" name="q" autocomplete="off"/> 
|
||||
<button id="search_button" onclick={s!"javascript: form.action='{← getRoot}search.html';"}>Search</button>
|
||||
<button>Google site search</button>
|
||||
</form>
|
||||
</header>
|
||||
|
||||
[site]
|
||||
|
||||
<nav class="nav">
|
||||
<iframe src={s!"{← getRoot}navbar.html"} class="navframe" frameBorder="0"></iframe>
|
||||
</nav>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
/--
|
||||
A comfortability wrapper around `baseHtmlGenerator`.
|
||||
-/
|
||||
def baseHtml (title : String) (site : Html) : BaseHtmlM Html := baseHtmlGenerator title #[site]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -1,149 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Wojciech Nawrocki. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
|
||||
Authors: Wojciech Nawrocki, Sebastian Ullrich, Henrik Böving
|
||||
-/
|
||||
import Lean.Data.Json
|
||||
import Lean.Parser
|
||||
|
||||
/-! This module defines:
|
||||
- a representation of HTML trees
|
||||
- together with a JSX-like DSL for writing them
|
||||
- and widget support for visualizing any type as HTML. -/
|
||||
|
||||
namespace DocGen4
|
||||
|
||||
open Lean
|
||||
|
||||
inductive Html where
|
||||
-- TODO(WN): it's nameless for shorter JSON; re-add names when we have deriving strategies for From/ToJson
|
||||
-- element (tag : String) (flatten : Bool) (attrs : Array HtmlAttribute) (children : Array Html)
|
||||
| element : String → Bool → Array (String × String) → Array Html → Html
|
||||
| text : String → Html
|
||||
deriving Repr, BEq, Inhabited, FromJson, ToJson
|
||||
|
||||
instance : Coe String Html :=
|
||||
⟨Html.text⟩
|
||||
|
||||
namespace Html
|
||||
|
||||
def attributesToString (attrs : Array (String × String)) :String :=
|
||||
attrs.foldl (fun acc (k, v) => acc ++ " " ++ k ++ "=\"" ++ v ++ "\"") ""
|
||||
|
||||
-- TODO: Termination proof
|
||||
partial def toStringAux : Html → String
|
||||
| element tag false attrs #[text s] => s!"<{tag}{attributesToString attrs}>{s}</{tag}>\n"
|
||||
| element tag false attrs #[child] => s!"<{tag}{attributesToString attrs}>\n{child.toStringAux}</{tag}>\n"
|
||||
| element tag false attrs children => s!"<{tag}{attributesToString attrs}>\n{children.foldl (· ++ toStringAux ·) ""}</{tag}>\n"
|
||||
| element tag true attrs children => s!"<{tag}{attributesToString attrs}>{children.foldl (· ++ toStringAux ·) ""}</{tag}>"
|
||||
| text s => s
|
||||
|
||||
def toString (html : Html) : String :=
|
||||
html.toStringAux.trimRight
|
||||
|
||||
instance : ToString Html :=
|
||||
⟨toString⟩
|
||||
|
||||
partial def textLength : Html → Nat
|
||||
| text s => s.length
|
||||
| element _ _ _ children =>
|
||||
let lengths := children.map textLength
|
||||
lengths.foldl Nat.add 0
|
||||
|
||||
def escapePairs : Array (String × String) :=
|
||||
#[
|
||||
("&", "&"),
|
||||
("<", "<"),
|
||||
(">", ">"),
|
||||
("\"", """)
|
||||
]
|
||||
|
||||
def escape (s : String) : String :=
|
||||
escapePairs.foldl (fun acc (o, r) => acc.replace o r) s
|
||||
|
||||
end Html
|
||||
|
||||
namespace Jsx
|
||||
open Parser PrettyPrinter
|
||||
|
||||
declare_syntax_cat jsxElement
|
||||
declare_syntax_cat jsxChild
|
||||
|
||||
-- JSXTextCharacter : SourceCharacter but not one of {, <, > or }
|
||||
def jsxText : Parser :=
|
||||
withAntiquot (mkAntiquot "jsxText" `jsxText) {
|
||||
fn := fun c s =>
|
||||
let startPos := s.pos
|
||||
let s := takeWhile1Fn (not ∘ "[{<>}]$".contains) "expected JSX text" c s
|
||||
mkNodeToken `jsxText startPos c s }
|
||||
|
||||
@[combinator_formatter DocGen4.Jsx.jsxText] def jsxText.formatter : Formatter := pure ()
|
||||
@[combinator_parenthesizer DocGen4.Jsx.jsxText] def jsxText.parenthesizer : Parenthesizer := pure ()
|
||||
|
||||
syntax jsxAttrName := rawIdent <|> str
|
||||
syntax jsxAttrVal := str <|> group("{" term "}")
|
||||
syntax jsxSimpleAttr := jsxAttrName "=" jsxAttrVal
|
||||
syntax jsxAttrSpread := "[" term "]"
|
||||
syntax jsxAttr := jsxSimpleAttr <|> jsxAttrSpread
|
||||
|
||||
syntax "<" rawIdent jsxAttr* "/>" : jsxElement
|
||||
syntax "<" rawIdent jsxAttr* ">" jsxChild* "</" rawIdent ">" : jsxElement
|
||||
|
||||
syntax jsxText : jsxChild
|
||||
syntax "{" term "}" : jsxChild
|
||||
syntax "[" term "]" : jsxChild
|
||||
syntax jsxElement : jsxChild
|
||||
|
||||
scoped syntax:max jsxElement : term
|
||||
|
||||
def translateAttrs (attrs : Array (TSyntax `DocGen4.Jsx.jsxAttr)) : MacroM (TSyntax `term) := do
|
||||
let mut as ← `(#[])
|
||||
for attr in attrs.map TSyntax.raw do
|
||||
as ← match attr with
|
||||
| `(jsxAttr| $n:jsxAttrName=$v:jsxAttrVal) =>
|
||||
let n ← match n with
|
||||
| `(jsxAttrName| $n:str) => pure n
|
||||
| `(jsxAttrName| $n:ident) => pure <| quote (toString n.getId)
|
||||
| _ => Macro.throwUnsupported
|
||||
let v ← match v with
|
||||
| `(jsxAttrVal| {$v}) => pure v
|
||||
| `(jsxAttrVal| $v:str) => pure v
|
||||
| _ => Macro.throwUnsupported
|
||||
`(($as).push ($n, ($v : String)))
|
||||
| `(jsxAttr| [$t]) => `($as ++ ($t : Array (String × String)))
|
||||
| _ => Macro.throwUnsupported
|
||||
return as
|
||||
|
||||
private def htmlHelper (n : Syntax) (children : Array Syntax) (m : Syntax) : MacroM (String × (TSyntax `term)):= do
|
||||
unless n.getId == m.getId do
|
||||
withRef m <| Macro.throwError s!"Leading and trailing part of tags don't match: '{n}', '{m}'"
|
||||
let mut cs ← `(#[])
|
||||
for child in children do
|
||||
cs ← match child with
|
||||
| `(jsxChild|$t:jsxText) => `(($cs).push (Html.text $(quote t.raw[0]!.getAtomVal)))
|
||||
-- TODO(WN): elab as list of children if type is `t Html` where `Foldable t`
|
||||
| `(jsxChild|{$t}) => `(($cs).push ($t : Html))
|
||||
| `(jsxChild|[$t]) => `($cs ++ ($t : Array Html))
|
||||
| `(jsxChild|$e:jsxElement) => `(($cs).push ($e:jsxElement : Html))
|
||||
| _ => Macro.throwUnsupported
|
||||
let tag := toString n.getId
|
||||
pure <| (tag, cs)
|
||||
|
||||
macro_rules
|
||||
| `(<$n $attrs* />) => do
|
||||
let kind := quote (toString n.getId)
|
||||
let attrs ← translateAttrs attrs
|
||||
`(Html.element $kind true $attrs #[])
|
||||
| `(<$n $attrs* >$children*</$m>) => do
|
||||
let (tag, children) ← htmlHelper n children m
|
||||
`(Html.element $(quote tag) true $(← translateAttrs attrs) $children)
|
||||
|
||||
end Jsx
|
||||
|
||||
/-- A type which implements `ToHtmlFormat` will be visualized
|
||||
as the resulting HTML in editors which support it. -/
|
||||
class ToHtmlFormat (α : Type u) where
|
||||
formatHtml : α → Html
|
||||
|
||||
end DocGen4
|
|
@ -1,126 +0,0 @@
|
|||
import Lean
|
||||
import DocGen4.Process
|
||||
import DocGen4.Output.Base
|
||||
import DocGen4.Output.Module
|
||||
import Lean.Data.RBMap
|
||||
|
||||
namespace DocGen4.Output
|
||||
|
||||
open Lean
|
||||
|
||||
structure JsonDeclarationInfo where
|
||||
name : String
|
||||
kind : String
|
||||
doc : String
|
||||
docLink : String
|
||||
sourceLink : String
|
||||
line : Nat
|
||||
deriving FromJson, ToJson
|
||||
|
||||
structure JsonDeclaration where
|
||||
info : JsonDeclarationInfo
|
||||
header : String
|
||||
deriving FromJson, ToJson
|
||||
|
||||
structure JsonInstance where
|
||||
name : String
|
||||
className : String
|
||||
typeNames : Array String
|
||||
deriving FromJson, ToJson
|
||||
|
||||
structure JsonModule where
|
||||
name : String
|
||||
declarations : List JsonDeclaration
|
||||
instances : Array JsonInstance
|
||||
imports : Array String
|
||||
deriving FromJson, ToJson
|
||||
|
||||
structure JsonHeaderIndex where
|
||||
headers : List (String × String) := []
|
||||
|
||||
structure JsonIndex where
|
||||
declarations : List (String × JsonDeclarationInfo) := []
|
||||
instances : HashMap String (RBTree String Ord.compare) := .empty
|
||||
importedBy : HashMap String (Array String) := .empty
|
||||
modules : List (String × String) := []
|
||||
instancesFor : HashMap String (RBTree String Ord.compare) := .empty
|
||||
|
||||
instance : ToJson JsonHeaderIndex where
|
||||
toJson idx := Json.mkObj <| idx.headers.map (fun (k, v) => (k, toJson v))
|
||||
|
||||
instance : ToJson JsonIndex where
|
||||
toJson idx := Id.run do
|
||||
let jsonDecls := Json.mkObj <| idx.declarations.map (fun (k, v) => (k, toJson v))
|
||||
let jsonInstances := Json.mkObj <| idx.instances.toList.map (fun (k, v) => (k, toJson v.toArray))
|
||||
let jsonImportedBy := Json.mkObj <| idx.importedBy.toList.map (fun (k, v) => (k, toJson v))
|
||||
let jsonModules := Json.mkObj <| idx.modules.map (fun (k, v) => (k, toJson v))
|
||||
let jsonInstancesFor := Json.mkObj <| idx.instancesFor.toList.map (fun (k, v) => (k, toJson v.toArray))
|
||||
let finalJson := Json.mkObj [
|
||||
("declarations", jsonDecls),
|
||||
("instances", jsonInstances),
|
||||
("importedBy", jsonImportedBy),
|
||||
("modules", jsonModules),
|
||||
("instancesFor", jsonInstancesFor)
|
||||
]
|
||||
return finalJson
|
||||
|
||||
def JsonHeaderIndex.addModule (index : JsonHeaderIndex) (module : JsonModule) : JsonHeaderIndex :=
|
||||
let merge idx decl := { idx with headers := (decl.info.name, decl.header) :: idx.headers }
|
||||
module.declarations.foldl merge index
|
||||
|
||||
def JsonIndex.addModule (index : JsonIndex) (module : JsonModule) : BaseHtmlM JsonIndex := do
|
||||
let mut index := index
|
||||
let newModule := (module.name, ← moduleNameToHtmlLink (String.toName module.name))
|
||||
let newDecls := module.declarations.map (fun d => (d.info.name, d.info))
|
||||
index := { index with
|
||||
modules := newModule :: index.modules
|
||||
declarations := newDecls ++ index.declarations
|
||||
}
|
||||
-- TODO: In theory one could sort instances and imports by name and batch the writes
|
||||
for inst in module.instances do
|
||||
let mut insts := index.instances.findD inst.className {}
|
||||
insts := insts.insert inst.name
|
||||
index := { index with instances := index.instances.insert inst.className insts }
|
||||
for typeName in inst.typeNames do
|
||||
let mut instsFor := index.instancesFor.findD typeName {}
|
||||
instsFor := instsFor.insert inst.name
|
||||
index := { index with instancesFor := index.instancesFor.insert typeName instsFor }
|
||||
|
||||
for imp in module.imports do
|
||||
let mut impBy := index.importedBy.findD imp #[]
|
||||
impBy := impBy.push module.name
|
||||
index := { index with importedBy := index.importedBy.insert imp impBy }
|
||||
return index
|
||||
|
||||
def DocInfo.toJson (module : Name) (info : Process.DocInfo) : HtmlM JsonDeclaration := do
|
||||
let name := info.getName.toString
|
||||
let kind := info.getKind
|
||||
let doc := info.getDocString.getD ""
|
||||
let docLink ← declNameToLink info.getName
|
||||
let sourceLink ← getSourceUrl module info.getDeclarationRange
|
||||
let line := info.getDeclarationRange.pos.line
|
||||
let header := (← docInfoHeader info).toString
|
||||
let info := { name, kind, doc, docLink, sourceLink, line }
|
||||
return { info, header }
|
||||
|
||||
def Process.Module.toJson (module : Process.Module) : HtmlM Json := do
|
||||
let mut jsonDecls := []
|
||||
let mut instances := #[]
|
||||
let declInfo := Process.filterDocInfo module.members
|
||||
for decl in declInfo do
|
||||
jsonDecls := (← DocInfo.toJson module.name decl) :: jsonDecls
|
||||
if let .instanceInfo i := decl then
|
||||
instances := instances.push {
|
||||
name := i.name.toString,
|
||||
className := i.className.toString
|
||||
typeNames := i.typeNames.map Name.toString
|
||||
}
|
||||
let jsonMod : JsonModule := {
|
||||
name := module.name.toString,
|
||||
declarations := jsonDecls,
|
||||
instances,
|
||||
imports := module.imports.map Name.toString
|
||||
}
|
||||
return ToJson.toJson jsonMod
|
||||
|
||||
end DocGen4.Output
|
|
@ -1,21 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
|
||||
import DocGen4.Process.Analyze
|
||||
import DocGen4.Process.Attributes
|
||||
import DocGen4.Process.AxiomInfo
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.ClassInfo
|
||||
import DocGen4.Process.DefinitionInfo
|
||||
import DocGen4.Process.DocInfo
|
||||
import DocGen4.Process.Hierarchy
|
||||
import DocGen4.Process.InductiveInfo
|
||||
import DocGen4.Process.InstanceInfo
|
||||
import DocGen4.Process.NameExt
|
||||
import DocGen4.Process.NameInfo
|
||||
import DocGen4.Process.OpaqueInfo
|
||||
import DocGen4.Process.StructureInfo
|
||||
import DocGen4.Process.TheoremInfo
|
|
@ -1,158 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
|
||||
import Lean
|
||||
import Lean.Data.HashMap
|
||||
import Lean.Data.HashSet
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.Hierarchy
|
||||
import DocGen4.Process.DocInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
/--
|
||||
Member of a module, either a declaration or some module doc string.
|
||||
-/
|
||||
inductive ModuleMember where
|
||||
| docInfo (info : DocInfo) : ModuleMember
|
||||
| modDoc (doc : ModuleDoc) : ModuleMember
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
A Lean module.
|
||||
-/
|
||||
structure Module where
|
||||
/--
|
||||
Name of the module.
|
||||
-/
|
||||
name : Name
|
||||
/--
|
||||
All members of the module, sorted according to their line numbers.
|
||||
-/
|
||||
members : Array ModuleMember
|
||||
imports : Array Name
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
The result of running a full doc-gen analysis on a project.
|
||||
-/
|
||||
structure AnalyzerResult where
|
||||
/--
|
||||
The map from module names to indices of the `moduleNames` array.
|
||||
-/
|
||||
name2ModIdx : HashMap Name ModuleIdx
|
||||
/--
|
||||
The list of all modules, accessible nicely via `name2ModIdx`.
|
||||
-/
|
||||
moduleNames : Array Name
|
||||
/--
|
||||
A map from module names to information about these modules.
|
||||
-/
|
||||
moduleInfo : HashMap Name Module
|
||||
deriving Inhabited
|
||||
|
||||
namespace ModuleMember
|
||||
|
||||
def getDeclarationRange : ModuleMember → DeclarationRange
|
||||
| docInfo i => i.getDeclarationRange
|
||||
| modDoc i => i.declarationRange
|
||||
|
||||
/--
|
||||
An order for module members, based on their declaration range.
|
||||
-/
|
||||
def order (l r : ModuleMember) : Bool :=
|
||||
Position.lt l.getDeclarationRange.pos r.getDeclarationRange.pos
|
||||
|
||||
def getName : ModuleMember → Name
|
||||
| docInfo i => i.getName
|
||||
| modDoc _ => Name.anonymous
|
||||
|
||||
def getDocString : ModuleMember → Option String
|
||||
| docInfo i => i.getDocString
|
||||
| modDoc i => i.doc
|
||||
|
||||
def shouldRender : ModuleMember → Bool
|
||||
| docInfo i => i.shouldRender
|
||||
| modDoc _ => true
|
||||
|
||||
end ModuleMember
|
||||
|
||||
inductive AnalyzeTask where
|
||||
| loadAll (load : List Name) : AnalyzeTask
|
||||
| loadAllLimitAnalysis (analyze : List Name) : AnalyzeTask
|
||||
|
||||
def AnalyzeTask.getLoad : AnalyzeTask → List Name
|
||||
| loadAll load => load
|
||||
| loadAllLimitAnalysis load => load
|
||||
|
||||
def getAllModuleDocs (relevantModules : Array Name) : MetaM (HashMap Name Module) := do
|
||||
let env ← getEnv
|
||||
let mut res := mkHashMap relevantModules.size
|
||||
for module in relevantModules do
|
||||
let modDocs := getModuleDoc? env module |>.getD #[] |>.map .modDoc
|
||||
let some modIdx := env.getModuleIdx? module | unreachable!
|
||||
let moduleData := env.header.moduleData.get! modIdx
|
||||
let imports := moduleData.imports.map Import.module
|
||||
res := res.insert module <| Module.mk module modDocs imports
|
||||
return res
|
||||
|
||||
/--
|
||||
Run the doc-gen analysis on all modules that are loaded into the `Environment`
|
||||
of this `MetaM` run and mentioned by the `AnalyzeTask`.
|
||||
-/
|
||||
def process (task : AnalyzeTask) : MetaM (AnalyzerResult × Hierarchy) := do
|
||||
let env ← getEnv
|
||||
let relevantModules := match task with
|
||||
| .loadAll _ => HashSet.fromArray env.header.moduleNames
|
||||
| .loadAllLimitAnalysis analysis => HashSet.fromArray analysis.toArray
|
||||
let allModules := env.header.moduleNames
|
||||
|
||||
let mut res ← getAllModuleDocs relevantModules.toArray
|
||||
|
||||
for (name, cinfo) in env.constants.toList do
|
||||
let some modidx := env.getModuleIdxFor? name | unreachable!
|
||||
let moduleName := env.allImportedModuleNames.get! modidx
|
||||
if !relevantModules.contains moduleName then
|
||||
continue
|
||||
|
||||
try
|
||||
let config := {
|
||||
maxHeartbeats := 5000000,
|
||||
options := ← getOptions,
|
||||
fileName := ← getFileName,
|
||||
fileMap := ← getFileMap
|
||||
}
|
||||
let analysis ← Prod.fst <$> Meta.MetaM.toIO (DocInfo.ofConstant (name, cinfo)) config { env := env } {} {}
|
||||
if let some dinfo := analysis then
|
||||
let moduleName := env.allImportedModuleNames.get! modidx
|
||||
let module := res.find! moduleName
|
||||
res := res.insert moduleName {module with members := module.members.push (ModuleMember.docInfo dinfo)}
|
||||
catch e =>
|
||||
IO.println s!"WARNING: Failed to obtain information for: {name}: {← e.toMessageData.toString}"
|
||||
|
||||
-- TODO: This could probably be faster if we did sorted insert above instead
|
||||
for (moduleName, module) in res.toArray do
|
||||
res := res.insert moduleName {module with members := module.members.qsort ModuleMember.order}
|
||||
|
||||
let hierarchy := Hierarchy.fromArray allModules
|
||||
let analysis := {
|
||||
name2ModIdx := env.const2ModIdx,
|
||||
moduleNames := allModules,
|
||||
moduleInfo := res,
|
||||
}
|
||||
return (analysis, hierarchy)
|
||||
|
||||
def filterDocInfo (ms : Array ModuleMember) : Array DocInfo :=
|
||||
ms.filterMap filter
|
||||
where
|
||||
filter : ModuleMember → Option DocInfo
|
||||
| ModuleMember.docInfo i => some i
|
||||
| _ => none
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,174 +0,0 @@
|
|||
import Lean
|
||||
|
||||
namespace DocGen4
|
||||
|
||||
open Lean Meta
|
||||
-- The following is probably completely overengineered but I love it
|
||||
|
||||
/--
|
||||
Captures the notion of a value based attributes, `attrKind` is things like
|
||||
`EnumAttributes`.
|
||||
-/
|
||||
class ValueAttr (attrKind : Type → Type) where
|
||||
/--
|
||||
Given a certain value based attribute, an `Environment` and the `Name` of
|
||||
a declaration returns the value of the attribute on this declaration if present.
|
||||
-/
|
||||
getValue {α : Type} [Inhabited α] [ToString α] : attrKind α → Environment → Name → Option String
|
||||
|
||||
/--
|
||||
Contains a specific attribute declaration of a certain attribute kind (enum based, parametric etc.).
|
||||
-/
|
||||
structure ValueAttrWrapper (attrKind : Type → Type) [ValueAttr attrKind] where
|
||||
{α : Type}
|
||||
attr : attrKind α
|
||||
[str : ToString α]
|
||||
[inhab : Inhabited α]
|
||||
|
||||
/--
|
||||
Obtain the value of an enum attribute for a certain name.
|
||||
-/
|
||||
def enumGetValue {α : Type} [Inhabited α] [ToString α] (attr : EnumAttributes α) (env : Environment) (decl : Name) : Option String := do
|
||||
let val ← EnumAttributes.getValue attr env decl
|
||||
some (toString val)
|
||||
|
||||
instance : ValueAttr EnumAttributes where
|
||||
getValue := enumGetValue
|
||||
|
||||
/--
|
||||
Obtain the value of a parametric attribute for a certain name.
|
||||
-/
|
||||
def parametricGetValue {α : Type} [Inhabited α] [ToString α] (attr : ParametricAttribute α) (env : Environment) (decl : Name) : Option String := do
|
||||
let val ← ParametricAttribute.getParam? attr env decl
|
||||
some (attr.attr.name.toString ++ " " ++ toString val)
|
||||
|
||||
instance : ValueAttr ParametricAttribute where
|
||||
getValue := parametricGetValue
|
||||
|
||||
abbrev EnumAttrWrapper := ValueAttrWrapper EnumAttributes
|
||||
abbrev ParametricAttrWrapper := ValueAttrWrapper ParametricAttribute
|
||||
|
||||
/--
|
||||
The list of all tag based attributes doc-gen knows about and can recover.
|
||||
-/
|
||||
def tagAttributes : Array TagAttribute :=
|
||||
#[IR.UnboxResult.unboxAttr, neverExtractAttr,
|
||||
Elab.Term.elabWithoutExpectedTypeAttr, matchPatternAttr]
|
||||
|
||||
deriving instance Repr for Compiler.InlineAttributeKind
|
||||
deriving instance Repr for Compiler.SpecializeAttributeKind
|
||||
|
||||
open Compiler in
|
||||
instance : ToString InlineAttributeKind where
|
||||
toString kind :=
|
||||
match kind with
|
||||
| .inline => "inline"
|
||||
| .noinline => "noinline"
|
||||
| .macroInline => "macro_inline"
|
||||
| .inlineIfReduce => "inline_if_reduce"
|
||||
| .alwaysInline => "always_inline"
|
||||
|
||||
open Compiler in
|
||||
instance : ToString SpecializeAttributeKind where
|
||||
toString kind :=
|
||||
match kind with
|
||||
| .specialize => "specialize"
|
||||
| .nospecialize => "nospecialize"
|
||||
|
||||
instance : ToString ReducibilityStatus where
|
||||
toString kind :=
|
||||
match kind with
|
||||
| .reducible => "reducible"
|
||||
| .semireducible => "semireducible"
|
||||
| .irreducible => "irreducible"
|
||||
|
||||
/--
|
||||
The list of all enum based attributes doc-gen knows about and can recover.
|
||||
-/
|
||||
@[reducible]
|
||||
def enumAttributes : Array EnumAttrWrapper := #[⟨Compiler.inlineAttrs⟩, ⟨reducibilityAttrs⟩]
|
||||
|
||||
instance : ToString ExternEntry where
|
||||
toString entry :=
|
||||
match entry with
|
||||
| .adhoc `all => ""
|
||||
| .adhoc backend => s!"{backend} adhoc"
|
||||
| .standard `all fn => fn
|
||||
| .standard backend fn => s!"{backend} {fn}"
|
||||
| .inline backend pattern => s!"{backend} inline {String.quote pattern}"
|
||||
-- TODO: The docs in the module dont specific how to render this
|
||||
| .foreign backend fn => s!"{backend} foreign {fn}"
|
||||
|
||||
instance : ToString ExternAttrData where
|
||||
toString data := (data.arity?.map toString |>.getD "") ++ " " ++ String.intercalate " " (data.entries.map toString)
|
||||
|
||||
/--
|
||||
The list of all parametric attributes (that is, attributes with any kind of information attached)
|
||||
doc-gen knows about and can recover.
|
||||
-/
|
||||
def parametricAttributes : Array ParametricAttrWrapper := #[⟨externAttr⟩, ⟨Compiler.implementedByAttr⟩, ⟨exportAttr⟩, ⟨Compiler.specializeAttr⟩]
|
||||
|
||||
def getTags (decl : Name) : MetaM (Array String) := do
|
||||
let env ← getEnv
|
||||
return tagAttributes.filter (TagAttribute.hasTag · env decl) |>.map (·.attr.name.toString)
|
||||
|
||||
def getValuesAux {α : Type} {attrKind : Type → Type} [va : ValueAttr attrKind] [Inhabited α] [ToString α] (decl : Name) (attr : attrKind α) : MetaM (Option String) := do
|
||||
let env ← getEnv
|
||||
return va.getValue attr env decl
|
||||
|
||||
def getValues {attrKind : Type → Type} [ValueAttr attrKind] (decl : Name) (attrs : Array (ValueAttrWrapper attrKind)) : MetaM (Array String) := do
|
||||
let mut res := #[]
|
||||
for attr in attrs do
|
||||
if let some val ← @getValuesAux attr.α attrKind _ attr.inhab attr.str decl attr.attr then
|
||||
res := res.push val
|
||||
return res
|
||||
|
||||
def getEnumValues (decl : Name) : MetaM (Array String) := getValues decl enumAttributes
|
||||
def getParametricValues (decl : Name) : MetaM (Array String) := getValues decl parametricAttributes
|
||||
|
||||
def getDefaultInstance (decl : Name) (className : Name) : MetaM (Option String) := do
|
||||
let insts ← getDefaultInstances className
|
||||
for (inst, prio) in insts do
|
||||
if inst == decl then
|
||||
return some s!"defaultInstance {prio}"
|
||||
return none
|
||||
|
||||
def hasSimp (decl : Name) : MetaM (Option String) := do
|
||||
let thms ← simpExtension.getTheorems
|
||||
if thms.isLemma (.decl decl) then
|
||||
return "simp"
|
||||
else
|
||||
return none
|
||||
|
||||
def hasCsimp (decl : Name) : MetaM (Option String) := do
|
||||
let env ← getEnv
|
||||
if Compiler.hasCSimpAttribute env decl then
|
||||
return some "csimp"
|
||||
else
|
||||
return none
|
||||
|
||||
/--
|
||||
The list of custom attributes, that don't fit in the parametric or enum
|
||||
attribute kinds, doc-gen konws about and can recover.
|
||||
-/
|
||||
def customAttrs := #[hasSimp, hasCsimp]
|
||||
|
||||
def getCustomAttrs (decl : Name) : MetaM (Array String) := do
|
||||
let mut values := #[]
|
||||
for attr in customAttrs do
|
||||
if let some value ← attr decl then
|
||||
values := values.push value
|
||||
return values
|
||||
|
||||
/--
|
||||
The main entry point for recovering all attribute values for a given
|
||||
declaration.
|
||||
-/
|
||||
def getAllAttributes (decl : Name) : MetaM (Array String) := do
|
||||
let tags ← getTags decl
|
||||
let enums ← getEnumValues decl
|
||||
let parametric ← getParametricValues decl
|
||||
let customs ← getCustomAttrs decl
|
||||
return customs ++ tags ++ enums ++ parametric
|
||||
|
||||
end DocGen4
|
|
@ -1,22 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
def AxiomInfo.ofAxiomVal (v : AxiomVal) : MetaM AxiomInfo := do
|
||||
let info ← Info.ofConstantVal v.toConstantVal
|
||||
return {
|
||||
toInfo := info,
|
||||
isUnsafe := v.isUnsafe
|
||||
}
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,196 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
|
||||
import Lean
|
||||
|
||||
namespace DocGen4.Process
|
||||
open Lean Widget Meta
|
||||
|
||||
/--
|
||||
Stores information about a typed name.
|
||||
-/
|
||||
structure NameInfo where
|
||||
/--
|
||||
The name that has this info attached.
|
||||
-/
|
||||
name : Name
|
||||
/--
|
||||
The pretty printed type of this name.
|
||||
-/
|
||||
type : CodeWithInfos
|
||||
/--
|
||||
The doc string of the name if it exists.
|
||||
-/
|
||||
doc : Option String
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
An argument to a declaration, e.g. the `(x : Nat)` in `def foo (x : Nat) := x`.
|
||||
-/
|
||||
structure Arg where
|
||||
/--
|
||||
The name of the argument.
|
||||
-/
|
||||
name : Name
|
||||
/--
|
||||
The pretty printed type of the argument.
|
||||
-/
|
||||
type : CodeWithInfos
|
||||
/--
|
||||
What kind of binder was used for the argument.
|
||||
-/
|
||||
binderInfo : BinderInfo
|
||||
|
||||
/--
|
||||
A base structure for information about a declaration.
|
||||
-/
|
||||
structure Info extends NameInfo where
|
||||
/--
|
||||
The list of arguments to the declaration.
|
||||
-/
|
||||
args : Array Arg
|
||||
/--
|
||||
In which lines the declaration was created.
|
||||
-/
|
||||
declarationRange : DeclarationRange
|
||||
/--
|
||||
A list of (known) attributes that are attached to the declaration.
|
||||
-/
|
||||
attrs : Array String
|
||||
/--
|
||||
Whether this info item should be rendered
|
||||
-/
|
||||
render : Bool := true
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about an `axiom` declaration.
|
||||
-/
|
||||
structure AxiomInfo extends Info where
|
||||
isUnsafe : Bool
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about a `theorem` declaration.
|
||||
-/
|
||||
structure TheoremInfo extends Info
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about an `opaque` declaration.
|
||||
-/
|
||||
structure OpaqueInfo extends Info where
|
||||
/--
|
||||
The pretty printed value of the declaration.
|
||||
-/
|
||||
value : CodeWithInfos
|
||||
/--
|
||||
A value of partial is interpreted as this opaque being part of a partial def
|
||||
since the actual definition for a partial def is hidden behind an inaccessible value.
|
||||
-/
|
||||
definitionSafety : DefinitionSafety
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about a `def` declaration, note that partial defs are handled by `OpaqueInfo`.
|
||||
-/
|
||||
structure DefinitionInfo extends Info where
|
||||
isUnsafe : Bool
|
||||
hints : ReducibilityHints
|
||||
equations : Option (Array CodeWithInfos)
|
||||
isNonComputable : Bool
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about an `instance` declaration.
|
||||
-/
|
||||
structure InstanceInfo extends DefinitionInfo where
|
||||
className : Name
|
||||
typeNames : Array Name
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about an `inductive` declaration
|
||||
-/
|
||||
structure InductiveInfo extends Info where
|
||||
/--
|
||||
List of all constructors of this inductive type.
|
||||
-/
|
||||
ctors : List NameInfo
|
||||
isUnsafe : Bool
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about a `structure` declaration.
|
||||
-/
|
||||
structure StructureInfo extends Info where
|
||||
/--
|
||||
Information about all the fields of the structure.
|
||||
-/
|
||||
fieldInfo : Array NameInfo
|
||||
/--
|
||||
All the structures this one inherited from.
|
||||
-/
|
||||
parents : Array Name
|
||||
/--
|
||||
The constructor of the structure.
|
||||
-/
|
||||
ctor : NameInfo
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Information about a `class` declaration.
|
||||
-/
|
||||
abbrev ClassInfo := StructureInfo
|
||||
|
||||
/--
|
||||
Information about a `class inductive` declaration.
|
||||
-/
|
||||
abbrev ClassInductiveInfo := InductiveInfo
|
||||
|
||||
|
||||
/--
|
||||
Information about a constructor of an inductive type
|
||||
-/
|
||||
abbrev ConstructorInfo := Info
|
||||
|
||||
/--
|
||||
A general type for informations about declarations.
|
||||
-/
|
||||
inductive DocInfo where
|
||||
| axiomInfo (info : AxiomInfo) : DocInfo
|
||||
| theoremInfo (info : TheoremInfo) : DocInfo
|
||||
| opaqueInfo (info : OpaqueInfo) : DocInfo
|
||||
| definitionInfo (info : DefinitionInfo) : DocInfo
|
||||
| instanceInfo (info : InstanceInfo) : DocInfo
|
||||
| inductiveInfo (info : InductiveInfo) : DocInfo
|
||||
| structureInfo (info : StructureInfo) : DocInfo
|
||||
| classInfo (info : ClassInfo) : DocInfo
|
||||
| classInductiveInfo (info : ClassInductiveInfo) : DocInfo
|
||||
| ctorInfo (info : ConstructorInfo) : DocInfo
|
||||
deriving Inhabited
|
||||
|
||||
/--
|
||||
Turns an `Expr` into a pretty printed `CodeWithInfos`.
|
||||
-/
|
||||
def prettyPrintTerm (expr : Expr) : MetaM CodeWithInfos := do
|
||||
let ⟨fmt, infos⟩ ← PrettyPrinter.ppExprWithInfos expr
|
||||
let tt := TaggedText.prettyTagged fmt
|
||||
let ctx := {
|
||||
env := ← getEnv
|
||||
mctx := ← getMCtx
|
||||
options := ← getOptions
|
||||
currNamespace := ← getCurrNamespace
|
||||
openDecls := ← getOpenDecls
|
||||
fileMap := default,
|
||||
ngen := ← getNGen
|
||||
}
|
||||
return tagCodeInfos ctx infos tt
|
||||
|
||||
def isInstance (declName : Name) : MetaM Bool := do
|
||||
return (instanceExtension.getState (← getEnv)).instanceNames.contains declName
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,24 +0,0 @@
|
|||
|
||||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
import DocGen4.Process.StructureInfo
|
||||
import DocGen4.Process.InductiveInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
def ClassInfo.ofInductiveVal (v : InductiveVal) : MetaM ClassInfo := do
|
||||
StructureInfo.ofInductiveVal v
|
||||
|
||||
def ClassInductiveInfo.ofInductiveVal (v : InductiveVal) : MetaM ClassInductiveInfo := do
|
||||
InductiveInfo.ofInductiveVal v
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,75 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta Widget
|
||||
|
||||
partial def stripArgs (e : Expr) : Expr :=
|
||||
match e.consumeMData with
|
||||
| Expr.lam name _ body _ =>
|
||||
let name := name.eraseMacroScopes
|
||||
stripArgs (Expr.instantiate1 body (mkFVar ⟨name⟩))
|
||||
| Expr.forallE name _ body _ =>
|
||||
let name := name.eraseMacroScopes
|
||||
stripArgs (Expr.instantiate1 body (mkFVar ⟨name⟩))
|
||||
| _ => e
|
||||
|
||||
def processEq (eq : Name) : MetaM CodeWithInfos := do
|
||||
let type ← (mkConstWithFreshMVarLevels eq >>= inferType)
|
||||
let final := stripArgs type
|
||||
prettyPrintTerm final
|
||||
|
||||
def valueToEq (v : DefinitionVal) : MetaM Expr := withLCtx {} {} do
|
||||
withOptions (tactic.hygienic.set . false) do
|
||||
lambdaTelescope v.value fun xs body => do
|
||||
let us := v.levelParams.map mkLevelParam
|
||||
let type ← mkEq (mkAppN (Lean.mkConst v.name us) xs) body
|
||||
let type ← mkForallFVars xs type
|
||||
return type
|
||||
|
||||
def DefinitionInfo.ofDefinitionVal (v : DefinitionVal) : MetaM DefinitionInfo := do
|
||||
let info ← Info.ofConstantVal v.toConstantVal
|
||||
let isUnsafe := v.safety == DefinitionSafety.unsafe
|
||||
let isNonComputable := isNoncomputable (← getEnv) v.name
|
||||
try
|
||||
let eqs? ← getEqnsFor? v.name
|
||||
match eqs? with
|
||||
| some eqs =>
|
||||
let equations ← eqs.mapM processEq
|
||||
return {
|
||||
toInfo := info,
|
||||
isUnsafe,
|
||||
hints := v.hints,
|
||||
equations,
|
||||
isNonComputable
|
||||
}
|
||||
| none =>
|
||||
let equations := #[← prettyPrintTerm <| stripArgs (← valueToEq v)]
|
||||
return {
|
||||
toInfo := info,
|
||||
isUnsafe,
|
||||
hints := v.hints,
|
||||
equations,
|
||||
isNonComputable
|
||||
}
|
||||
catch err =>
|
||||
IO.println s!"WARNING: Failed to calculate equational lemmata for {v.name}: {← err.toMessageData.toString}"
|
||||
return {
|
||||
toInfo := info,
|
||||
isUnsafe,
|
||||
hints := v.hints,
|
||||
equations := none,
|
||||
isNonComputable
|
||||
}
|
||||
|
||||
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,223 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.AxiomInfo
|
||||
import DocGen4.Process.TheoremInfo
|
||||
import DocGen4.Process.OpaqueInfo
|
||||
import DocGen4.Process.InstanceInfo
|
||||
import DocGen4.Process.DefinitionInfo
|
||||
import DocGen4.Process.ClassInfo
|
||||
import DocGen4.Process.StructureInfo
|
||||
import DocGen4.Process.InductiveInfo
|
||||
|
||||
|
||||
namespace DocGen4.Process
|
||||
namespace DocInfo
|
||||
|
||||
open Lean Meta Widget
|
||||
|
||||
def getDeclarationRange : DocInfo → DeclarationRange
|
||||
| axiomInfo i => i.declarationRange
|
||||
| theoremInfo i => i.declarationRange
|
||||
| opaqueInfo i => i.declarationRange
|
||||
| definitionInfo i => i.declarationRange
|
||||
| instanceInfo i => i.declarationRange
|
||||
| inductiveInfo i => i.declarationRange
|
||||
| structureInfo i => i.declarationRange
|
||||
| classInfo i => i.declarationRange
|
||||
| classInductiveInfo i => i.declarationRange
|
||||
| ctorInfo i => i.declarationRange
|
||||
|
||||
def getName : DocInfo → Name
|
||||
| axiomInfo i => i.name
|
||||
| theoremInfo i => i.name
|
||||
| opaqueInfo i => i.name
|
||||
| definitionInfo i => i.name
|
||||
| instanceInfo i => i.name
|
||||
| inductiveInfo i => i.name
|
||||
| structureInfo i => i.name
|
||||
| classInfo i => i.name
|
||||
| classInductiveInfo i => i.name
|
||||
| ctorInfo i => i.name
|
||||
|
||||
def getKind : DocInfo → String
|
||||
| axiomInfo _ => "axiom"
|
||||
| theoremInfo _ => "theorem"
|
||||
| opaqueInfo _ => "opaque"
|
||||
| definitionInfo _ => "def"
|
||||
| instanceInfo _ => "instance"
|
||||
| inductiveInfo _ => "inductive"
|
||||
| structureInfo _ => "structure"
|
||||
| classInfo _ => "class"
|
||||
| classInductiveInfo _ => "class"
|
||||
| ctorInfo _ => "ctor" -- TODO: kind ctor support in js
|
||||
|
||||
def getType : DocInfo → CodeWithInfos
|
||||
| axiomInfo i => i.type
|
||||
| theoremInfo i => i.type
|
||||
| opaqueInfo i => i.type
|
||||
| definitionInfo i => i.type
|
||||
| instanceInfo i => i.type
|
||||
| inductiveInfo i => i.type
|
||||
| structureInfo i => i.type
|
||||
| classInfo i => i.type
|
||||
| classInductiveInfo i => i.type
|
||||
| ctorInfo i => i.type
|
||||
|
||||
def getArgs : DocInfo → Array Arg
|
||||
| axiomInfo i => i.args
|
||||
| theoremInfo i => i.args
|
||||
| opaqueInfo i => i.args
|
||||
| definitionInfo i => i.args
|
||||
| instanceInfo i => i.args
|
||||
| inductiveInfo i => i.args
|
||||
| structureInfo i => i.args
|
||||
| classInfo i => i.args
|
||||
| classInductiveInfo i => i.args
|
||||
| ctorInfo i => i.args
|
||||
|
||||
def getAttrs : DocInfo → Array String
|
||||
| axiomInfo i => i.attrs
|
||||
| theoremInfo i => i.attrs
|
||||
| opaqueInfo i => i.attrs
|
||||
| definitionInfo i => i.attrs
|
||||
| instanceInfo i => i.attrs
|
||||
| inductiveInfo i => i.attrs
|
||||
| structureInfo i => i.attrs
|
||||
| classInfo i => i.attrs
|
||||
| classInductiveInfo i => i.attrs
|
||||
| ctorInfo i => i.attrs
|
||||
|
||||
def getDocString : DocInfo → Option String
|
||||
| axiomInfo i => i.doc
|
||||
| theoremInfo i => i.doc
|
||||
| opaqueInfo i => i.doc
|
||||
| definitionInfo i => i.doc
|
||||
| instanceInfo i => i.doc
|
||||
| inductiveInfo i => i.doc
|
||||
| structureInfo i => i.doc
|
||||
| classInfo i => i.doc
|
||||
| classInductiveInfo i => i.doc
|
||||
| ctorInfo i => i.doc
|
||||
|
||||
def shouldRender : DocInfo → Bool
|
||||
| axiomInfo i => i.render
|
||||
| theoremInfo i => i.render
|
||||
| opaqueInfo i => i.render
|
||||
| definitionInfo i => i.render
|
||||
| instanceInfo i => i.render
|
||||
| inductiveInfo i => i.render
|
||||
| structureInfo i => i.render
|
||||
| classInfo i => i.render
|
||||
| classInductiveInfo i => i.render
|
||||
| ctorInfo i => i.render
|
||||
|
||||
def isBlackListed (declName : Name) : MetaM Bool := do
|
||||
match ← findDeclarationRanges? declName with
|
||||
| some _ =>
|
||||
let env ← getEnv
|
||||
pure (declName.isInternal)
|
||||
<||> (pure <| isAuxRecursor env declName)
|
||||
<||> (pure <| isNoConfusion env declName)
|
||||
<||> isRec declName
|
||||
<||> isMatcher declName
|
||||
-- TODO: Evaluate whether filtering out declarations without range is sensible
|
||||
| none => return true
|
||||
|
||||
-- TODO: Is this actually the best way?
|
||||
def isProjFn (declName : Name) : MetaM Bool := do
|
||||
let env ← getEnv
|
||||
match declName with
|
||||
| Name.str parent name =>
|
||||
if isStructure env parent then
|
||||
match getStructureInfo? env parent with
|
||||
| some i =>
|
||||
match i.fieldNames.find? (· == name) with
|
||||
| some _ => return true
|
||||
| none => return false
|
||||
| none => panic! s!"{parent} is not a structure"
|
||||
else
|
||||
return false
|
||||
| _ => return false
|
||||
|
||||
def ofConstant : (Name × ConstantInfo) → MetaM (Option DocInfo) := fun (name, info) => do
|
||||
if ← isBlackListed name then
|
||||
return none
|
||||
match info with
|
||||
| ConstantInfo.axiomInfo i => return some <| axiomInfo (← AxiomInfo.ofAxiomVal i)
|
||||
| ConstantInfo.thmInfo i => return some <| theoremInfo (← TheoremInfo.ofTheoremVal i)
|
||||
| ConstantInfo.opaqueInfo i => return some <| opaqueInfo (← OpaqueInfo.ofOpaqueVal i)
|
||||
| ConstantInfo.defnInfo i =>
|
||||
if ← isProjFn i.name then
|
||||
let info ← DefinitionInfo.ofDefinitionVal i
|
||||
return some <| definitionInfo { info with render := false }
|
||||
else
|
||||
if ← isInstance i.name then
|
||||
let info ← InstanceInfo.ofDefinitionVal i
|
||||
return some <| instanceInfo info
|
||||
else
|
||||
let info ← DefinitionInfo.ofDefinitionVal i
|
||||
return some <| definitionInfo info
|
||||
| ConstantInfo.inductInfo i =>
|
||||
let env ← getEnv
|
||||
if isStructure env i.name then
|
||||
if isClass env i.name then
|
||||
return some <| classInfo (← ClassInfo.ofInductiveVal i)
|
||||
else
|
||||
return some <| structureInfo (← StructureInfo.ofInductiveVal i)
|
||||
else
|
||||
if isClass env i.name then
|
||||
return some <| classInductiveInfo (← ClassInductiveInfo.ofInductiveVal i)
|
||||
else
|
||||
return some <| inductiveInfo (← InductiveInfo.ofInductiveVal i)
|
||||
| ConstantInfo.ctorInfo i =>
|
||||
let info ← Info.ofConstantVal i.toConstantVal
|
||||
return some <| ctorInfo { info with render := false }
|
||||
-- we ignore these for now
|
||||
| ConstantInfo.recInfo _ | ConstantInfo.quotInfo _ => return none
|
||||
|
||||
def getKindDescription : DocInfo → String
|
||||
| axiomInfo i => if i.isUnsafe then "unsafe axiom" else "axiom"
|
||||
| theoremInfo _ => "theorem"
|
||||
| opaqueInfo i =>
|
||||
match i.definitionSafety with
|
||||
| DefinitionSafety.safe => "opaque"
|
||||
| DefinitionSafety.unsafe => "unsafe opaque"
|
||||
| DefinitionSafety.partial => "partial def"
|
||||
| definitionInfo i => Id.run do
|
||||
let mut modifiers := #[]
|
||||
if i.isUnsafe then
|
||||
modifiers := modifiers.push "unsafe"
|
||||
if i.isNonComputable then
|
||||
modifiers := modifiers.push "noncomputable"
|
||||
|
||||
let defKind :=
|
||||
if i.hints.isAbbrev then
|
||||
"abbrev"
|
||||
else
|
||||
"def"
|
||||
modifiers := modifiers.push defKind
|
||||
return String.intercalate " " modifiers.toList
|
||||
| instanceInfo i => Id.run do
|
||||
let mut modifiers := #[]
|
||||
if i.isUnsafe then
|
||||
modifiers := modifiers.push "unsafe"
|
||||
if i.isNonComputable then
|
||||
modifiers := modifiers.push "noncomputable"
|
||||
|
||||
modifiers := modifiers.push "instance"
|
||||
return String.intercalate " " modifiers.toList
|
||||
| inductiveInfo i => if i.isUnsafe then "unsafe inductive" else "inductive"
|
||||
| structureInfo _ => "structure"
|
||||
| classInfo _ => "class"
|
||||
| classInductiveInfo _ => "class inductive"
|
||||
| ctorInfo _ => "constructor"
|
||||
|
||||
end DocInfo
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,136 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2021 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
import Lean.Data.HashMap
|
||||
|
||||
import DocGen4.Process.NameExt
|
||||
|
||||
def Lean.HashSet.fromArray [BEq α] [Hashable α] (xs : Array α) : Lean.HashSet α :=
|
||||
xs.foldr (flip .insert) .empty
|
||||
|
||||
namespace DocGen4
|
||||
|
||||
open Lean Name
|
||||
|
||||
def getNLevels (name : Name) (levels: Nat) : Name :=
|
||||
let components := name.componentsRev
|
||||
(components.drop (components.length - levels)).reverse.foldl (· ++ ·) Name.anonymous
|
||||
|
||||
inductive Hierarchy where
|
||||
| node (name : NameExt) (isFile : Bool) (children : RBNode NameExt (fun _ => Hierarchy)) : Hierarchy
|
||||
|
||||
instance : Inhabited Hierarchy := ⟨Hierarchy.node ⟨.anonymous, .html⟩ false RBNode.leaf⟩
|
||||
|
||||
abbrev HierarchyMap := RBNode NameExt (fun _ => Hierarchy)
|
||||
|
||||
-- Everything in this namespace is adapted from stdlib's RBNode
|
||||
namespace HierarchyMap
|
||||
|
||||
def toList : HierarchyMap → List (NameExt × Hierarchy)
|
||||
| t => t.revFold (fun ps k v => (k, v)::ps) []
|
||||
|
||||
def toArray : HierarchyMap → Array (NameExt × Hierarchy)
|
||||
| t => t.fold (fun ps k v => ps ++ #[(k, v)] ) #[]
|
||||
|
||||
def hForIn [Monad m] (t : HierarchyMap) (init : σ) (f : (NameExt × Hierarchy) → σ → m (ForInStep σ)) : m σ :=
|
||||
t.forIn init (fun a b acc => f (a, b) acc)
|
||||
|
||||
instance : ForIn m HierarchyMap (NameExt × Hierarchy) where
|
||||
forIn := HierarchyMap.hForIn
|
||||
|
||||
end HierarchyMap
|
||||
|
||||
namespace Hierarchy
|
||||
|
||||
def empty (n : NameExt) (isFile : Bool) : Hierarchy :=
|
||||
node n isFile RBNode.leaf
|
||||
|
||||
def getName : Hierarchy → Name
|
||||
| node n _ _ => n.name
|
||||
|
||||
def getNameExt : Hierarchy → NameExt
|
||||
| node n _ _ => n
|
||||
|
||||
def getChildren : Hierarchy → HierarchyMap
|
||||
| node _ _ c => c
|
||||
|
||||
def isFile : Hierarchy → Bool
|
||||
| node _ f _ => f
|
||||
|
||||
partial def insert! (h : Hierarchy) (n : NameExt) : Hierarchy := Id.run do
|
||||
let hn := h.getNameExt
|
||||
let mut cs := h.getChildren
|
||||
|
||||
if getNumParts hn.name + 1 == getNumParts n.name then
|
||||
match cs.find NameExt.cmp n with
|
||||
| none =>
|
||||
node hn h.isFile (cs.insert NameExt.cmp n <| empty n true)
|
||||
| some (node _ true _) => h
|
||||
| some (node _ false ccs) =>
|
||||
cs := cs.erase NameExt.cmp n
|
||||
node hn h.isFile (cs.insert NameExt.cmp n <| node n true ccs)
|
||||
else
|
||||
let leveled := ⟨getNLevels n.name (getNumParts hn.name + 1), .html⟩
|
||||
match cs.find NameExt.cmp leveled with
|
||||
| some nextLevel =>
|
||||
cs := cs.erase NameExt.cmp leveled
|
||||
-- BUG?
|
||||
node hn h.isFile <| cs.insert NameExt.cmp leveled (nextLevel.insert! n)
|
||||
| none =>
|
||||
let child := (insert! (empty leveled false) n)
|
||||
node hn h.isFile <| cs.insert NameExt.cmp leveled child
|
||||
|
||||
partial def fromArray (names : Array Name) : Hierarchy :=
|
||||
(names.map (fun n => NameExt.mk n .html)).foldl insert! (empty ⟨anonymous, .html⟩ false)
|
||||
|
||||
partial def fromArrayExt (names : Array NameExt) : Hierarchy :=
|
||||
names.foldl insert! (empty ⟨anonymous, .html⟩ false)
|
||||
|
||||
def baseDirBlackList : HashSet String :=
|
||||
HashSet.fromArray #[
|
||||
"404.html",
|
||||
"color-scheme.js",
|
||||
"declaration-data.js",
|
||||
"declarations",
|
||||
"find",
|
||||
"how-about.js",
|
||||
"index.html",
|
||||
"search.html",
|
||||
"foundational_types.html",
|
||||
"mathjax-config.js",
|
||||
"navbar.html",
|
||||
"nav.js",
|
||||
"search.js",
|
||||
"src",
|
||||
"style.css"
|
||||
]
|
||||
|
||||
partial def fromDirectoryAux (dir : System.FilePath) (previous : Name) : IO (Array NameExt) := do
|
||||
let mut children := #[]
|
||||
for entry in ← System.FilePath.readDir dir do
|
||||
if ← entry.path.isDir then
|
||||
children := children ++ (← fromDirectoryAux entry.path (.str previous entry.fileName))
|
||||
else if entry.path.extension = some "html" then
|
||||
children := children.push <| ⟨.str previous (entry.fileName.dropRight ".html".length), .html⟩
|
||||
else if entry.path.extension = some "pdf" then
|
||||
children := children.push <| ⟨.str previous (entry.fileName.dropRight ".pdf".length), .pdf⟩
|
||||
return children
|
||||
|
||||
def fromDirectory (dir : System.FilePath) : IO Hierarchy := do
|
||||
let mut children := #[]
|
||||
for entry in ← System.FilePath.readDir dir do
|
||||
if baseDirBlackList.contains entry.fileName then
|
||||
continue
|
||||
else if ← entry.path.isDir then
|
||||
children := children ++ (← fromDirectoryAux entry.path (.mkSimple entry.fileName))
|
||||
else if entry.path.extension = some "html" then
|
||||
children := children.push <| ⟨.mkSimple (entry.fileName.dropRight ".html".length), .html⟩
|
||||
else if entry.path.extension = some "pdf" then
|
||||
children := children.push <| ⟨.mkSimple (entry.fileName.dropRight ".pdf".length), .pdf⟩
|
||||
return Hierarchy.fromArrayExt children
|
||||
|
||||
end Hierarchy
|
||||
end DocGen4
|
|
@ -1,30 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
def getConstructorType (ctor : Name) : MetaM Expr := do
|
||||
let env ← getEnv
|
||||
match env.find? ctor with
|
||||
| some (ConstantInfo.ctorInfo i) => pure i.type
|
||||
| _ => panic! s!"Constructor {ctor} was requested but does not exist"
|
||||
|
||||
def InductiveInfo.ofInductiveVal (v : InductiveVal) : MetaM InductiveInfo := do
|
||||
let info ← Info.ofConstantVal v.toConstantVal
|
||||
let ctors ← v.ctors.mapM (fun name => do NameInfo.ofTypedName name (← getConstructorType name))
|
||||
return {
|
||||
toInfo := info,
|
||||
ctors,
|
||||
isUnsafe := v.isUnsafe
|
||||
}
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,41 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
import DocGen4.Process.DefinitionInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
def getInstanceTypes (typ : Expr) : MetaM (Array Name) := do
|
||||
let (_, _, tail) ← forallMetaTelescopeReducing typ
|
||||
let args := tail.getAppArgs
|
||||
let (_, names) ← args.mapM (Expr.forEach · findName) |>.run .empty
|
||||
return names
|
||||
where
|
||||
findName : Expr → StateRefT (Array Name) MetaM Unit
|
||||
| .const name _ => modify (·.push name)
|
||||
| .sort .zero => modify (·.push "_builtin_prop")
|
||||
| .sort (.succ _) => modify (·.push "_builtin_typeu")
|
||||
| .sort _ => modify (·.push "_builtin_sortu")
|
||||
| _ => return ()
|
||||
|
||||
def InstanceInfo.ofDefinitionVal (v : DefinitionVal) : MetaM InstanceInfo := do
|
||||
let mut info ← DefinitionInfo.ofDefinitionVal v
|
||||
let some className ← isClass? v.type | unreachable!
|
||||
if let some instAttr ← getDefaultInstance v.name className then
|
||||
info := { info with attrs := info.attrs.push instAttr }
|
||||
let typeNames ← getInstanceTypes v.type
|
||||
return {
|
||||
toDefinitionInfo := info,
|
||||
className,
|
||||
typeNames
|
||||
}
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,48 +0,0 @@
|
|||
/-
|
||||
A generalization of `Lean.Name` that includes a file extension.
|
||||
-/
|
||||
import Lean
|
||||
|
||||
open Lean Name
|
||||
|
||||
inductive Extension where
|
||||
| html
|
||||
| pdf
|
||||
deriving Repr
|
||||
|
||||
namespace Extension
|
||||
|
||||
def cmp : Extension → Extension → Ordering
|
||||
| html, html => Ordering.eq
|
||||
| html, _ => Ordering.lt
|
||||
| pdf, pdf => Ordering.eq
|
||||
| pdf, _ => Ordering.gt
|
||||
|
||||
instance : BEq Extension where
|
||||
beq e1 e2 :=
|
||||
match cmp e1 e2 with
|
||||
| Ordering.eq => true
|
||||
| _ => false
|
||||
|
||||
def toString : Extension → String
|
||||
| html => "html"
|
||||
| pdf => "pdf"
|
||||
|
||||
end Extension
|
||||
|
||||
structure NameExt where
|
||||
name : Name
|
||||
ext : Extension
|
||||
|
||||
namespace NameExt
|
||||
|
||||
def cmp (n₁ n₂ : NameExt) : Ordering :=
|
||||
match Name.cmp n₁.name n₂.name with
|
||||
| Ordering.eq => Extension.cmp n₁.ext n₂.ext
|
||||
| ord => ord
|
||||
|
||||
def getString! : NameExt → String
|
||||
| ⟨str _ s, _⟩ => s
|
||||
| _ => unreachable!
|
||||
|
||||
end NameExt
|
|
@ -1,50 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.Attributes
|
||||
|
||||
namespace DocGen4.Process
|
||||
open Lean Meta
|
||||
|
||||
def NameInfo.ofTypedName (n : Name) (t : Expr) : MetaM NameInfo := do
|
||||
let env ← getEnv
|
||||
return { name := n, type := ← prettyPrintTerm t, doc := ← findDocString? env n}
|
||||
|
||||
partial def typeToArgsType (e : Expr) : (Array (Name × Expr × BinderInfo) × Expr) :=
|
||||
let helper := fun name type body data =>
|
||||
-- Once we hit a name with a macro scope we stop traversing the expression
|
||||
-- and print what is left after the : instead. The only exception
|
||||
-- to this is instances since these almost never have a name
|
||||
-- but should still be printed as arguments instead of after the :.
|
||||
if name.hasMacroScopes && !data.isInstImplicit then
|
||||
(#[], e)
|
||||
else
|
||||
let name := name.eraseMacroScopes
|
||||
let arg := (name, type, data)
|
||||
let (args, final) := typeToArgsType (Expr.instantiate1 body (mkFVar ⟨name⟩))
|
||||
(#[arg] ++ args, final)
|
||||
match e.consumeMData with
|
||||
| Expr.lam name type body binderInfo => helper name type body binderInfo
|
||||
| Expr.forallE name type body binderInfo => helper name type body binderInfo
|
||||
| _ => (#[], e)
|
||||
|
||||
def Info.ofConstantVal (v : ConstantVal) : MetaM Info := do
|
||||
let (args, type) := typeToArgsType v.type
|
||||
let args ← args.mapM (fun (n, e, b) => do return Arg.mk n (← prettyPrintTerm e) b)
|
||||
let nameInfo ← NameInfo.ofTypedName v.name type
|
||||
match ← findDeclarationRanges? v.name with
|
||||
-- TODO: Maybe selection range is more relevant? Figure this out in the future
|
||||
| some range => return {
|
||||
toNameInfo := nameInfo,
|
||||
args,
|
||||
declarationRange := range.range,
|
||||
attrs := ← getAllAttributes v.name
|
||||
}
|
||||
| none => panic! s!"{v.name} is a declaration without position"
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,33 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
def OpaqueInfo.ofOpaqueVal (v : OpaqueVal) : MetaM OpaqueInfo := do
|
||||
let info ← Info.ofConstantVal v.toConstantVal
|
||||
let value ← prettyPrintTerm v.value
|
||||
let env ← getEnv
|
||||
let isPartial := env.find? (Compiler.mkUnsafeRecName v.name) |>.isSome
|
||||
let definitionSafety :=
|
||||
if isPartial then
|
||||
DefinitionSafety.partial
|
||||
else if v.isUnsafe then
|
||||
DefinitionSafety.unsafe
|
||||
else
|
||||
DefinitionSafety.safe
|
||||
return {
|
||||
toInfo := info,
|
||||
value,
|
||||
definitionSafety
|
||||
}
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,58 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
-- TODO: replace with Leos variant from Zulip
|
||||
def dropArgs (type : Expr) (n : Nat) : (Expr × List (Name × Expr)) :=
|
||||
match type, n with
|
||||
| e, 0 => (e, [])
|
||||
| Expr.forallE name type body _, x + 1 =>
|
||||
let body := body.instantiate1 <| mkFVar ⟨name⟩
|
||||
let next := dropArgs body x
|
||||
{ next with snd := (name, type) :: next.snd}
|
||||
| _, _ + 1 => panic! s!"No forallE left"
|
||||
|
||||
def getFieldTypes (struct : Name) (ctor : ConstructorVal) (parents : Nat) : MetaM (Array NameInfo) := do
|
||||
let type := ctor.type
|
||||
let (fieldFunction, _) := dropArgs type (ctor.numParams + parents)
|
||||
let (_, fields) := dropArgs fieldFunction (ctor.numFields - parents)
|
||||
let mut fieldInfos := #[]
|
||||
for (name, type) in fields do
|
||||
fieldInfos := fieldInfos.push <| ← NameInfo.ofTypedName (struct.append name) type
|
||||
return fieldInfos
|
||||
|
||||
def StructureInfo.ofInductiveVal (v : InductiveVal) : MetaM StructureInfo := do
|
||||
let info ← Info.ofConstantVal v.toConstantVal
|
||||
let env ← getEnv
|
||||
let parents := getParentStructures env v.name
|
||||
let ctorVal := getStructureCtor env v.name
|
||||
let ctor ← NameInfo.ofTypedName ctorVal.name ctorVal.type
|
||||
match getStructureInfo? env v.name with
|
||||
| some i =>
|
||||
if i.fieldNames.size - parents.size > 0 then
|
||||
return {
|
||||
toInfo := info,
|
||||
fieldInfo := ← getFieldTypes v.name ctorVal parents.size,
|
||||
parents,
|
||||
ctor
|
||||
}
|
||||
else
|
||||
return {
|
||||
toInfo := info,
|
||||
fieldInfo := #[],
|
||||
parents,
|
||||
ctor
|
||||
}
|
||||
| none => panic! s!"{v.name} is not a structure"
|
||||
|
||||
end DocGen4.Process
|
|
@ -1,19 +0,0 @@
|
|||
/-
|
||||
Copyright (c) 2022 Henrik Böving. All rights reserved.
|
||||
Released under Apache 2.0 license as described in the file LICENSE.
|
||||
Authors: Henrik Böving
|
||||
-/
|
||||
import Lean
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.NameInfo
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta
|
||||
|
||||
def TheoremInfo.ofTheoremVal (v : TheoremVal) : MetaM TheoremInfo := do
|
||||
let info ← Info.ofConstantVal v.toConstantVal
|
||||
return { toInfo := info }
|
||||
|
||||
end DocGen4.Process
|
18
Main.lean
18
Main.lean
|
@ -11,16 +11,12 @@ def getTopLevelModules (p : Parsed) : IO (List String) := do
|
|||
return topLevelModules
|
||||
|
||||
def runSingleCmd (p : Parsed) : IO UInt32 := do
|
||||
let relevantModules := [p.positionalArg! "module" |>.as! String |> String.toName]
|
||||
let res ← lakeSetup
|
||||
match res with
|
||||
| Except.ok ws =>
|
||||
let relevantModules := #[p.positionalArg! "module" |>.as! String |> String.toName]
|
||||
let gitUrl := p.positionalArg! "gitUrl" |>.as! String
|
||||
let (doc, hierarchy) ← load <| .loadAllLimitAnalysis relevantModules
|
||||
IO.println "Outputting HTML"
|
||||
let baseConfig ← getSimpleBaseContext hierarchy
|
||||
htmlOutputResults baseConfig doc ws (p.hasFlag "ink")
|
||||
htmlOutputResults baseConfig doc (some gitUrl) (p.hasFlag "ink")
|
||||
return 0
|
||||
| Except.error rc => pure rc
|
||||
|
||||
def runIndexCmd (_p : Parsed) : IO UInt32 := do
|
||||
let hierarchy ← Hierarchy.fromDirectory Output.basePath
|
||||
|
@ -29,15 +25,10 @@ def runIndexCmd (_p : Parsed) : IO UInt32 := do
|
|||
return 0
|
||||
|
||||
def runGenCoreCmd (_p : Parsed) : IO UInt32 := do
|
||||
let res ← lakeSetup
|
||||
match res with
|
||||
| Except.ok ws =>
|
||||
let (doc, hierarchy) ← loadCore
|
||||
IO.println "Outputting HTML"
|
||||
let baseConfig ← getSimpleBaseContext hierarchy
|
||||
htmlOutputResults baseConfig doc ws (ink := False)
|
||||
htmlOutputResults baseConfig doc none (ink := False)
|
||||
return 0
|
||||
| Except.error rc => pure rc
|
||||
|
||||
def runDocGenCmd (_p : Parsed) : IO UInt32 := do
|
||||
IO.println "You most likely want to use me via Lake now, check my README on Github on how to:"
|
||||
|
@ -53,6 +44,7 @@ def singleCmd := `[Cli|
|
|||
|
||||
ARGS:
|
||||
module : String; "The module to generate the HTML for. Does not have to be part of topLevelModules."
|
||||
gitUrl : String; "The gitUrl as computed by the Lake facet"
|
||||
]
|
||||
|
||||
def indexCmd := `[Cli|
|
||||
|
|
14
Makefile
14
Makefile
|
@ -1,14 +0,0 @@
|
|||
all:
|
||||
@echo "Please specify a build target."
|
||||
|
||||
docs:
|
||||
-ls build/doc | \
|
||||
grep -v -E 'Init|Lean|Mathlib' | \
|
||||
xargs -I {} rm -r "build/doc/{}"
|
||||
-./scripts/run_pdflatex.sh build > /dev/null
|
||||
lake build Bookshelf:docs
|
||||
|
||||
docs!:
|
||||
-rm -r build/doc
|
||||
-./scripts/run_pdflatex.sh build > /dev/null
|
||||
lake build Bookshelf:docs
|
40
README.md
40
README.md
|
@ -1,7 +1,11 @@
|
|||
# bookshelf
|
||||
|
||||
A study of the books listed below. Most proofs are conducted in LaTeX. Where
|
||||
feasible, theorems are also formally proven in [Lean](https://leanprover.github.io/).
|
||||
A study of the books listed below.
|
||||
|
||||
## Overview
|
||||
|
||||
Most proofs are conducted in LaTeX. Where feasible, theorems are also formally
|
||||
proven in [Lean](https://leanprover.github.io/).
|
||||
|
||||
- [ ] Apostol, Tom M. Calculus, Vol. 1: One-Variable Calculus, with an Introduction to Linear Algebra. 2nd ed. Vol. 1. 2 vols. Wiley, 1991.
|
||||
- [x] Avigad, Jeremy. ‘Theorem Proving in Lean’, n.d.
|
||||
|
@ -14,23 +18,21 @@ feasible, theorems are also formally proven in [Lean](https://leanprover.github.
|
|||
- [ ] Ross, Sheldon. A First Course in Probability Theory. 8th ed. Pearson Prentice Hall, n.d.
|
||||
- [ ] Smullyan, Raymond M. To Mock a Mockingbird: And Other Logic Puzzles Including an Amazing Adventure in Combinatory Logic. Oxford: Oxford university press, 2000.
|
||||
|
||||
## Documentation
|
||||
|
||||
This project has absorbed [doc-gen4](https://github.com/leanprover/doc-gen4) to
|
||||
ease customization. In particular, the `DocGen4` module found in this project
|
||||
allows generating PDFs and including them in the navbar. To generate
|
||||
documentation and serve files locally, run the following:
|
||||
## Building
|
||||
|
||||
[direnv](https://direnv.net/) can be used to launch a dev shell upon entering
|
||||
this directory (refer to `.envrc`). Otherwise run via:
|
||||
```bash
|
||||
> make docs[!]
|
||||
> lake run server
|
||||
$ nix develop
|
||||
```
|
||||
|
||||
The `docs` build target avoids cleaning files that are expected to not change
|
||||
often (e.g. `Lean`, `Init`, and `Mathlib` related content). If you've upgraded
|
||||
Lean or Mathlib, run `make docs!` instead to generate documentation from
|
||||
scratch.
|
||||
|
||||
Both assume you have `pdflatex` and `python3` available in your `$PATH`. To
|
||||
change how the server behaves, refer to the `.env` file located in the root
|
||||
directory of this project.
|
||||
If you prefer not to use `nix`, you can also use the [elan](https://github.com/leanprover/elan)
|
||||
package manager like normal. Afterward, build the project by running
|
||||
```bash
|
||||
$ lake build
|
||||
```
|
||||
Optionally build documentation by running:
|
||||
```bash
|
||||
$ lake build Bookshelf:docs
|
||||
```
|
||||
Afterward, you can view the generated files by running `python3 -m http.server`
|
||||
from within the `.lake/build/doc` directory.
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"nodes": {
|
||||
"flake-compat": {
|
||||
"locked": {
|
||||
"lastModified": 1696426674,
|
||||
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
|
||||
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
||||
"revCount": 57,
|
||||
"type": "tarball",
|
||||
"url": "https://api.flakehub.com/f/pinned/edolstra/flake-compat/1.0.1/018afb31-abd1-7bff-a5e4-cff7e18efb7a/source.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
"url": "https://flakehub.com/f/edolstra/flake-compat/1.tar.gz"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1701680307,
|
||||
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1702312524,
|
||||
"narHash": "sha256-gkZJRDBUCpTPBvQk25G0B7vfbpEYM5s5OZqghkjZsnE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "a9bf124c46ef298113270b1f84a164865987a91c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
{
|
||||
description = "";
|
||||
|
||||
inputs = {
|
||||
flake-compat.url = "https://flakehub.com/f/edolstra/flake-compat/1.tar.gz";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, ... }:
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
manifest = import ./lake-manifest.nix { inherit pkgs; };
|
||||
scheme-custom = with pkgs; (texlive.combine {
|
||||
inherit (texlive) scheme-basic
|
||||
bigfoot
|
||||
comment
|
||||
enumitem
|
||||
environ
|
||||
etoolbox
|
||||
fontawesome5
|
||||
jknapltx
|
||||
mathabx
|
||||
mathabx-type1
|
||||
metafont
|
||||
ncctools
|
||||
pgf
|
||||
rsfs
|
||||
soul
|
||||
stmaryrd
|
||||
tcolorbox
|
||||
xcolor;
|
||||
});
|
||||
in
|
||||
{
|
||||
packages = {
|
||||
app = pkgs.stdenv.mkDerivation {
|
||||
pname = "bookshelf";
|
||||
version = "0.1.0";
|
||||
src = ./.;
|
||||
nativeBuildInputs = with pkgs; [
|
||||
git
|
||||
lean4
|
||||
scheme-custom
|
||||
];
|
||||
buildPhase = ''
|
||||
mkdir -p .lake/packages
|
||||
${pkgs.lib.foldlAttrs (s: key: val: s + ''
|
||||
cp -a ${val}/src .lake/packages/${key}
|
||||
chmod 755 .lake/packages/${key}/{,.git}
|
||||
'') "" manifest}
|
||||
|
||||
export GIT_ORIGIN_URL="https://github.com/jrpotter/bookshelf.git"
|
||||
export GIT_REVISION="${self.rev or "dirty"}"
|
||||
lake build Bookshelf:docs
|
||||
|
||||
find .lake/build/doc \
|
||||
\( -name "*.html.trace" -o -name "*.html.hash" \) \
|
||||
-exec rm {} +
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir $out
|
||||
cp -a .lake/build/doc/* $out
|
||||
'';
|
||||
};
|
||||
|
||||
default = self.packages.${system}.app;
|
||||
};
|
||||
|
||||
devShells.default = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
lean4
|
||||
python3
|
||||
scheme-custom
|
||||
];
|
||||
};
|
||||
});
|
||||
}
|
|
@ -1,75 +1,95 @@
|
|||
{"version": 5,
|
||||
"packagesDir": "lake-packages",
|
||||
{"version": 7,
|
||||
"packagesDir": ".lake/packages",
|
||||
"packages":
|
||||
[{"git":
|
||||
{"url": "https://github.com/xubaiw/CMark.lean",
|
||||
"subDir?": null,
|
||||
"rev": "0077cbbaa92abf855fc1c0413e158ffd8195ec77",
|
||||
"opts": {},
|
||||
"name": "CMark",
|
||||
"inputRev?": "main",
|
||||
"inherited": false}},
|
||||
{"git":
|
||||
{"url": "https://github.com/EdAyers/ProofWidgets4",
|
||||
"subDir?": null,
|
||||
"rev": "a0c2cd0ac3245a0dade4f925bcfa97e06dd84229",
|
||||
"opts": {},
|
||||
"name": "proofwidgets",
|
||||
"inputRev?": "v0.0.13",
|
||||
"inherited": true}},
|
||||
{"git":
|
||||
{"url": "https://github.com/fgdorais/lean4-unicode-basic",
|
||||
"subDir?": null,
|
||||
"rev": "2491e781ae478b6e6f1d86a7157f1c58fc50f895",
|
||||
"opts": {},
|
||||
"name": "«lean4-unicode-basic»",
|
||||
"inputRev?": "main",
|
||||
"inherited": false}},
|
||||
{"git":
|
||||
{"url": "https://github.com/mhuisi/lean4-cli",
|
||||
"subDir?": null,
|
||||
"rev": "21dac2e9cc7e3cf7da5800814787b833e680b2fd",
|
||||
"opts": {},
|
||||
"name": "Cli",
|
||||
"inputRev?": "nightly",
|
||||
"inherited": false}},
|
||||
{"git":
|
||||
{"url": "https://github.com/leanprover-community/mathlib4.git",
|
||||
"subDir?": null,
|
||||
"rev": "43718b8bde561e133edfd4ab8b5def9fb438c18d",
|
||||
"opts": {},
|
||||
"name": "mathlib",
|
||||
"inputRev?": "master",
|
||||
"inherited": false}},
|
||||
{"git":
|
||||
{"url": "https://github.com/gebner/quote4",
|
||||
"subDir?": null,
|
||||
"rev": "e75daed95ad1c92af4e577fea95e234d7a8401c1",
|
||||
"opts": {},
|
||||
"name": "Qq",
|
||||
"inputRev?": "master",
|
||||
"inherited": true}},
|
||||
{"git":
|
||||
{"url": "https://github.com/JLimperg/aesop",
|
||||
"subDir?": null,
|
||||
"rev": "1a0cded2be292b5496e659b730d2accc742de098",
|
||||
"opts": {},
|
||||
"name": "aesop",
|
||||
"inputRev?": "master",
|
||||
"inherited": true}},
|
||||
{"git":
|
||||
{"url": "https://github.com/hargonix/LeanInk",
|
||||
"subDir?": null,
|
||||
"rev": "2447df5cc6e48eb965c3c3fba87e46d353b5e9f1",
|
||||
"opts": {},
|
||||
"name": "leanInk",
|
||||
"inputRev?": "doc-gen",
|
||||
"inherited": false}},
|
||||
{"git":
|
||||
{"url": "https://github.com/leanprover/std4.git",
|
||||
"subDir?": null,
|
||||
"rev": "28459f72f3190b0f540b49ab769745819eeb1c5e",
|
||||
"opts": {},
|
||||
[{"url": "https://github.com/leanprover/std4.git",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "2e4a3586a8f16713f16b2d2b3af3d8e65f3af087",
|
||||
"name": "std",
|
||||
"inputRev?": "main",
|
||||
"inherited": false}}]}
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "v4.3.0",
|
||||
"inherited": false,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/leanprover-community/quote4",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "d3a1d25f3eba0d93a58d5d3d027ffa78ece07755",
|
||||
"name": "Qq",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "master",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/leanprover-community/aesop",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "c7cff4551258d31c0d2d453b3f9cbca757d445f1",
|
||||
"name": "aesop",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "master",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/leanprover-community/ProofWidgets4",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "909febc72b4f64628f8d35cd0554f8a90b6e0749",
|
||||
"name": "proofwidgets",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "v0.0.23",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/leanprover/lean4-cli",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "a751d21d4b68c999accb6fc5d960538af26ad5ec",
|
||||
"name": "Cli",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "main",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/leanprover-community/mathlib4.git",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "f04afed5ac9fea0e1355bc6f6bee2bd01f4a888d",
|
||||
"name": "mathlib",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "v4.3.0",
|
||||
"inherited": false,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/xubaiw/CMark.lean",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "0077cbbaa92abf855fc1c0413e158ffd8195ec77",
|
||||
"name": "CMark",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "main",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/fgdorais/lean4-unicode-basic",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "dc62b29a26fcc3da545472ab8ad2c98ef3433634",
|
||||
"name": "UnicodeBasic",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "main",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/hargonix/LeanInk",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "2447df5cc6e48eb965c3c3fba87e46d353b5e9f1",
|
||||
"name": "leanInk",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "doc-gen",
|
||||
"inherited": true,
|
||||
"configFile": "lakefile.lean"},
|
||||
{"url": "https://github.com/jrpotter/bookshelf-doc",
|
||||
"type": "git",
|
||||
"subDir": null,
|
||||
"rev": "9bd217dc37ea79a3f118a313583f539cdbc762e6",
|
||||
"name": "«doc-gen4»",
|
||||
"manifestFile": "lake-manifest.json",
|
||||
"inputRev": "main",
|
||||
"inherited": false,
|
||||
"configFile": "lakefile.lean"}],
|
||||
"name": "bookshelf",
|
||||
"lakeDir": ".lake"}
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
{ pkgs }:
|
||||
let
|
||||
fetchGitPackage = { pname, version, owner, repo, rev, hash }:
|
||||
pkgs.stdenv.mkDerivation {
|
||||
inherit pname version;
|
||||
src = pkgs.fetchgit {
|
||||
inherit rev hash;
|
||||
url = "https://github.com/${owner}/${repo}";
|
||||
# We need to keep this attribute enabled to prevent Lake from trying to
|
||||
# update the package. This attribute ensures the specified commit is
|
||||
# accessible at HEAD:
|
||||
# https://github.com/leanprover/lean4/blob/cddc8089bc736a1532d6092f69476bd2d205a9eb/src/lake/Lake/Load/Materialize.lean#L22
|
||||
leaveDotGit = true;
|
||||
};
|
||||
nativeBuildInputs = with pkgs; [ git ];
|
||||
# Lake will perform a compulsory check that `git remote get-url origin`
|
||||
# returns the value we set here:
|
||||
# https://github.com/leanprover/lean4/blob/cddc8089bc736a1532d6092f69476bd2d205a9eb/src/lake/Lake/Load/Materialize.lean#L54
|
||||
buildPhase = ''
|
||||
git remote add origin https://github.com/${owner}/${repo}
|
||||
'';
|
||||
installPhase = ''
|
||||
shopt -s dotglob
|
||||
mkdir -p $out/src
|
||||
cp -a . $out/src
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
CMark = fetchGitPackage {
|
||||
pname = "CMark";
|
||||
version = "main";
|
||||
owner = "xubaiw";
|
||||
repo = "CMark.lean";
|
||||
rev = "0077cbbaa92abf855fc1c0413e158ffd8195ec77";
|
||||
hash = "sha256-ge+9V4IsMdPwjhYu66zUUN6CK70K2BdMT98BzBV3a4c=";
|
||||
};
|
||||
|
||||
Cli = fetchGitPackage {
|
||||
pname = "Cli";
|
||||
version = "main";
|
||||
owner = "leanprover";
|
||||
repo = "lean4-cli";
|
||||
rev = "a751d21d4b68c999accb6fc5d960538af26ad5ec";
|
||||
hash = "sha256-n+6x7ZhyKKiIMZ9cH9VV8zay3oTUlJojtxcLYsUwQPU=";
|
||||
};
|
||||
|
||||
Qq = fetchGitPackage {
|
||||
pname = "Qq";
|
||||
version = "master";
|
||||
owner = "leanprover-community";
|
||||
repo = "quote4";
|
||||
rev = "d3a1d25f3eba0d93a58d5d3d027ffa78ece07755";
|
||||
hash = "sha256-l+X+Mi4khC/xdwQmESz8Qzto6noYqhYN4UqC+TVt3cs=";
|
||||
};
|
||||
|
||||
UnicodeBasic = fetchGitPackage {
|
||||
pname = "UnicodeBasic";
|
||||
version = "main";
|
||||
owner = "fgdorais";
|
||||
repo = "lean4-unicode-basic";
|
||||
rev = "dc62b29a26fcc3da545472ab8ad2c98ef3433634";
|
||||
hash = "sha256-EimohrYMr01CnGx8xCg4q4XX663QuxKfpTDNnDnosO4=";
|
||||
};
|
||||
|
||||
aesop = fetchGitPackage {
|
||||
pname = "aesop";
|
||||
version = "master";
|
||||
owner = "leanprover-community";
|
||||
repo = "aesop";
|
||||
rev = "c7cff4551258d31c0d2d453b3f9cbca757d445f1";
|
||||
hash = "sha256-uzkxE9XJ4y3WMtmiNQn2Je1hNkQ2FgE1/0vqz8f98cw=";
|
||||
};
|
||||
|
||||
doc-gen4 = fetchGitPackage {
|
||||
pname = "doc-gen4";
|
||||
version = "main";
|
||||
owner = "jrpotter";
|
||||
repo = "bookshelf-doc";
|
||||
rev = "9bd217dc37ea79a3f118a313583f539cdbc762e6";
|
||||
hash = "sha256-L7Uca5hJV19/WHYG+MkFWX6BwXDInfSYsOrnZdM9ejY=";
|
||||
};
|
||||
|
||||
leanInk = fetchGitPackage {
|
||||
pname = "leanInk";
|
||||
version = "doc-gen";
|
||||
owner = "hargonix";
|
||||
repo = "LeanInk";
|
||||
rev = "2447df5cc6e48eb965c3c3fba87e46d353b5e9f1";
|
||||
hash = "sha256-asHVaa1uOxpz5arCvfllIrJmMC9VDm1F+bufsu3XwN0=";
|
||||
};
|
||||
|
||||
mathlib = fetchGitPackage {
|
||||
pname = "mathlib";
|
||||
version = "v4.3.0";
|
||||
owner = "leanprover-community";
|
||||
repo = "mathlib4.git";
|
||||
rev = "f04afed5ac9fea0e1355bc6f6bee2bd01f4a888d";
|
||||
hash = "sha256-B0pZ7HwJwOrEXTMMyJSzMLLyh66Bcs/CqNwC3EKZ60I=";
|
||||
};
|
||||
|
||||
proofwidgets = fetchGitPackage {
|
||||
pname = "proofwidgets";
|
||||
version = "v0.0.23";
|
||||
owner = "leanprover-community";
|
||||
repo = "ProofWidgets4";
|
||||
rev = "909febc72b4f64628f8d35cd0554f8a90b6e0749";
|
||||
hash = "sha256-twXXKXXONQpfzG+YLoXYY+3kTU0F40Tsv2+SKfF2Qsc=";
|
||||
};
|
||||
|
||||
std = fetchGitPackage {
|
||||
pname = "std";
|
||||
version = "v4.3.0";
|
||||
owner = "leanprover";
|
||||
repo = "std4";
|
||||
rev = "2e4a3586a8f16713f16b2d2b3af3d8e65f3af087";
|
||||
hash = "sha256-agWcsRIEJbHSjIdiA6z/HQHLZkb72ASW9SPnIM0voeo=";
|
||||
};
|
||||
}
|
156
lakefile.lean
156
lakefile.lean
|
@ -4,165 +4,17 @@ open System Lake DSL
|
|||
|
||||
package «bookshelf»
|
||||
|
||||
-- ========================================
|
||||
-- Imports
|
||||
-- ========================================
|
||||
|
||||
require Cli from git
|
||||
"https://github.com/mhuisi/lean4-cli" @
|
||||
"nightly"
|
||||
require CMark from git
|
||||
"https://github.com/xubaiw/CMark.lean" @
|
||||
"main"
|
||||
require «lean4-unicode-basic» from git
|
||||
"https://github.com/fgdorais/lean4-unicode-basic" @
|
||||
"main"
|
||||
require leanInk from git
|
||||
"https://github.com/hargonix/LeanInk" @
|
||||
"doc-gen"
|
||||
require mathlib from git
|
||||
"https://github.com/leanprover-community/mathlib4.git" @
|
||||
"master"
|
||||
"v4.3.0"
|
||||
require std from git
|
||||
"https://github.com/leanprover/std4.git" @
|
||||
"v4.3.0"
|
||||
require «doc-gen4» from git
|
||||
"https://github.com/jrpotter/bookshelf-doc" @
|
||||
"main"
|
||||
|
||||
-- ========================================
|
||||
-- Documentation Generator
|
||||
-- ========================================
|
||||
|
||||
lean_lib DocGen4
|
||||
|
||||
lean_exe «doc-gen4» {
|
||||
root := `Main
|
||||
supportInterpreter := true
|
||||
}
|
||||
|
||||
module_facet docs (mod) : FilePath := do
|
||||
let some docGen4 ← findLeanExe? `«doc-gen4»
|
||||
| error "no doc-gen4 executable configuration found in workspace"
|
||||
let exeJob ← docGen4.exe.fetch
|
||||
let modJob ← mod.leanBin.fetch
|
||||
let buildDir := (← getWorkspace).root.buildDir
|
||||
let docFile := mod.filePath (buildDir / "doc") "html"
|
||||
exeJob.bindAsync fun exeFile exeTrace => do
|
||||
modJob.bindSync fun _ modTrace => do
|
||||
let depTrace := exeTrace.mix modTrace
|
||||
let trace ← buildFileUnlessUpToDate docFile depTrace do
|
||||
logInfo s!"Documenting module: {mod.name}"
|
||||
proc {
|
||||
cmd := exeFile.toString
|
||||
args := #["single", mod.name.toString]
|
||||
env := #[("LEAN_PATH", (← getAugmentedLeanPath).toString)]
|
||||
}
|
||||
return (docFile, trace)
|
||||
|
||||
-- TODO: technically speaking this facet does not show all file dependencies
|
||||
target coreDocs : FilePath := do
|
||||
let some docGen4 ← findLeanExe? `«doc-gen4»
|
||||
| error "no doc-gen4 executable configuration found in workspace"
|
||||
let exeJob ← docGen4.exe.fetch
|
||||
let basePath := (←getWorkspace).root.buildDir / "doc"
|
||||
let dataFile := basePath / "declarations" / "declaration-data-Lean.bmp"
|
||||
exeJob.bindSync fun exeFile exeTrace => do
|
||||
let trace ← buildFileUnlessUpToDate dataFile exeTrace do
|
||||
logInfo "Documenting Lean core: Init and Lean"
|
||||
proc {
|
||||
cmd := exeFile.toString
|
||||
args := #["genCore"]
|
||||
env := #[("LEAN_PATH", (← getAugmentedLeanPath).toString)]
|
||||
}
|
||||
return (dataFile, trace)
|
||||
|
||||
library_facet docs (lib) : FilePath := do
|
||||
-- Ordering is important. The index file is generated by walking through the
|
||||
-- filesystem directory. Files copied from the shell scripts need to exist
|
||||
-- prior to this.
|
||||
let mods ← lib.modules.fetch
|
||||
let moduleJobs ← BuildJob.mixArray <| ← mods.mapM (fetch <| ·.facet `docs)
|
||||
let coreJob : BuildJob FilePath ← coreDocs.fetch
|
||||
let exeJob ← «doc-gen4».fetch
|
||||
-- Shared with DocGen4.Output
|
||||
let basePath := (←getWorkspace).root.buildDir / "doc"
|
||||
let dataFile := basePath / "declarations" / "declaration-data.bmp"
|
||||
let staticFiles := #[
|
||||
basePath / "style.css",
|
||||
basePath / "declaration-data.js",
|
||||
basePath / "color-scheme.js",
|
||||
basePath / "nav.js",
|
||||
basePath / "how-about.js",
|
||||
basePath / "search.js",
|
||||
basePath / "mathjax-config.js",
|
||||
basePath / "instances.js",
|
||||
basePath / "importedBy.js",
|
||||
basePath / "index.html",
|
||||
basePath / "404.html",
|
||||
basePath / "navbar.html",
|
||||
basePath / "search.html",
|
||||
basePath / "find" / "index.html",
|
||||
basePath / "find" / "find.js",
|
||||
basePath / "src" / "alectryon.css",
|
||||
basePath / "src" / "alectryon.js",
|
||||
basePath / "src" / "docutils_basic.css",
|
||||
basePath / "src" / "pygments.css"
|
||||
]
|
||||
coreJob.bindAsync fun _ coreInputTrace => do
|
||||
exeJob.bindAsync fun exeFile exeTrace => do
|
||||
moduleJobs.bindSync fun _ inputTrace => do
|
||||
let depTrace := mixTraceArray #[inputTrace, exeTrace, coreInputTrace]
|
||||
let trace ← buildFileUnlessUpToDate dataFile depTrace do
|
||||
logInfo "Documentation indexing"
|
||||
proc {
|
||||
cmd := exeFile.toString
|
||||
args := #["index"]
|
||||
}
|
||||
let traces ← staticFiles.mapM computeTrace
|
||||
let indexTrace := mixTraceArray traces
|
||||
|
||||
return (dataFile, trace.mix indexTrace)
|
||||
|
||||
-- ========================================
|
||||
-- Bookshelf
|
||||
-- ========================================
|
||||
|
||||
@[default_target]
|
||||
lean_lib «Bookshelf» {
|
||||
roots := #[`Bookshelf, `Common]
|
||||
}
|
||||
|
||||
/--
|
||||
The contents of our `.env` file.
|
||||
-/
|
||||
structure Config where
|
||||
port : Nat := 5555
|
||||
|
||||
/--
|
||||
Read in the `.env` file into an in-memory structure.
|
||||
-/
|
||||
private def readConfig : StateT Config ScriptM Unit := do
|
||||
let env <- IO.FS.readFile ".env"
|
||||
for line in env.trim.split (fun c => c == '\n') do
|
||||
match line.split (fun c => c == '=') with
|
||||
| ["PORT", port] => modify (fun c => { c with port := String.toNat! port })
|
||||
| _ => error "Malformed `.env` file."
|
||||
return ()
|
||||
|
||||
/--
|
||||
Start an HTTP server for locally serving documentation. It is expected the
|
||||
documentation has already been generated prior via
|
||||
|
||||
```bash
|
||||
> lake build Bookshelf:docs
|
||||
```
|
||||
|
||||
USAGE:
|
||||
lake run server
|
||||
-/
|
||||
script server (_args) do
|
||||
let ((), config) <- StateT.run readConfig {}
|
||||
IO.println s!"Running Lean on `http://localhost:{config.port}`"
|
||||
_ <- IO.Process.run {
|
||||
cmd := "python3",
|
||||
args := #["-m", "http.server", toString config.port, "-d", "build/doc"],
|
||||
}
|
||||
return 0
|
||||
|
|
|
@ -1 +1 @@
|
|||
leanprover/lean4:4.0.0
|
||||
leanprover/lean4:v4.3.0
|
||||
|
|
|
@ -170,7 +170,7 @@
|
|||
\newcommand{\ceil}[1]{\left\lceil#1\right\rceil}
|
||||
\newcommand{\ctuple}[2]{\left< #1, \cdots, #2 \right>}
|
||||
\newcommand{\dom}[1]{\textop{dom}{#1}}
|
||||
\newcommand{\equinumerous}[2]{#1 \approx #2}
|
||||
\newcommand{\equin}{\approx}
|
||||
\newcommand{\fld}[1]{\textop{fld}{#1}}
|
||||
\newcommand{\floor}[1]{\left\lfloor#1\right\rfloor}
|
||||
\newcommand{\icc}[2]{\left[#1, #2\right]}
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
if ! command -v pdflatex > /dev/null; then
|
||||
>&2 echo 'pdflatex was not found in the current $PATH.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BUILD_DIR="$1"
|
||||
|
||||
function process_file () {
|
||||
REL_DIR=$(dirname "$1")
|
||||
REL_BASE=$(basename -s ".tex" "$1")
|
||||
mkdir -p "$BUILD_DIR/doc/$REL_DIR"
|
||||
(cd "$REL_DIR" && pdflatex "$REL_BASE.tex")
|
||||
cp "$REL_DIR/$REL_BASE.pdf" "$BUILD_DIR/doc/$REL_DIR/"
|
||||
}
|
||||
|
||||
export BUILD_DIR
|
||||
export -f process_file
|
||||
|
||||
# We run this command twice to allow any cross-references to resolve correctly.
|
||||
# https://tex.stackexchange.com/questions/41539/does-hyperref-work-between-two-files
|
||||
for _ in {1..2}; do
|
||||
find ./* \( -path build -o -path lake-packages \) -prune -o -name "*.tex" -print0 \
|
||||
| xargs -0 -I{} bash -c "process_file {}"
|
||||
done
|
|
@ -1,777 +0,0 @@
|
|||
@charset "UTF-8";
|
||||
/*
|
||||
Copyright © 2019 Clément Pit-Claudel
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
/*******************************/
|
||||
/* CSS reset for .alectryon-io */
|
||||
/*******************************/
|
||||
|
||||
.alectryon-io blockquote {
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
.alectryon-io blockquote:after {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.alectryon-io label {
|
||||
display: inline;
|
||||
font-size: inherit;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.alectryon-io a {
|
||||
text-decoration: none !important;
|
||||
font-style: oblique !important;
|
||||
color: unset;
|
||||
}
|
||||
|
||||
/* Undo <small> and <blockquote>, added to improve RSS rendering. */
|
||||
|
||||
.alectryon-io small.alectryon-output,
|
||||
.alectryon-io small.alectryon-type-info {
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
.alectryon-io blockquote.alectryon-goal,
|
||||
.alectryon-io blockquote.alectryon-message {
|
||||
font-weight: normal;
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
/***************/
|
||||
/* Main styles */
|
||||
/***************/
|
||||
|
||||
.alectryon-coqdoc .doc .code,
|
||||
.alectryon-coqdoc .doc .comment,
|
||||
.alectryon-coqdoc .doc .inlinecode,
|
||||
.alectryon-mref,
|
||||
.alectryon-block, .alectryon-io,
|
||||
.alectryon-toggle-label, .alectryon-banner {
|
||||
font-family: "Source Code Pro", Consolas, "Ubuntu Mono", Menlo, "DejaVu Sans Mono", monospace, monospace !important;
|
||||
font-size: 0.875em;
|
||||
font-feature-settings: "COQX" 1 /* Coq ligatures */, "XV00" 1 /* Legacy */, "calt" 1 /* Fallback */;
|
||||
line-height: initial;
|
||||
}
|
||||
|
||||
.alectryon-io, .alectryon-block, .alectryon-toggle-label, .alectryon-banner {
|
||||
overflow: visible;
|
||||
overflow-wrap: break-word;
|
||||
position: relative;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
/*
|
||||
CoqIDE doesn't turn off the unicode bidirectional algorithm (and PG simply
|
||||
respects the user's `bidi-display-reordering` setting), so don't turn it off
|
||||
here either. But beware unexpected results like `Definition test_אב := 0.`
|
||||
|
||||
.alectryon-io span {
|
||||
direction: ltr;
|
||||
unicode-bidi: bidi-override;
|
||||
}
|
||||
|
||||
In any case, make an exception for comments:
|
||||
|
||||
.highlight .c {
|
||||
direction: embed;
|
||||
unicode-bidi: initial;
|
||||
}
|
||||
*/
|
||||
|
||||
.alectryon-mref,
|
||||
.alectryon-mref-marker {
|
||||
align-self: center;
|
||||
box-sizing: border-box;
|
||||
display: inline-block;
|
||||
font-size: 80%;
|
||||
font-weight: bold;
|
||||
line-height: 1;
|
||||
box-shadow: 0 0 0 1pt black;
|
||||
padding: 1pt 0.3em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.alectryon-block .alectryon-mref-marker,
|
||||
.alectryon-io .alectryon-mref-marker {
|
||||
user-select: none;
|
||||
margin: -0.25em 0 -0.25em 0.5em;
|
||||
}
|
||||
|
||||
.alectryon-inline .alectryon-mref-marker {
|
||||
margin: -0.25em 0.15em -0.25em 0.625em; /* 625 = 0.5em / 80% */
|
||||
}
|
||||
|
||||
.alectryon-mref {
|
||||
color: inherit;
|
||||
margin: -0.5em 0.25em;
|
||||
}
|
||||
|
||||
.alectryon-goal:target .goal-separator .alectryon-mref-marker,
|
||||
:target > .alectryon-mref-marker {
|
||||
animation: blink 0.2s step-start 0s 3 normal none;
|
||||
background-color: #fcaf3e;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
@keyframes blink {
|
||||
50% {
|
||||
box-shadow: 0 0 0 3pt #fcaf3e, 0 0 0 4pt black;
|
||||
z-index: 10;
|
||||
}
|
||||
}
|
||||
|
||||
.alectryon-toggle,
|
||||
.alectryon-io .alectryon-extra-goal-toggle {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.alectryon-bubble,
|
||||
.alectryon-io label,
|
||||
.alectryon-toggle-label {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.alectryon-toggle-label {
|
||||
display: block;
|
||||
font-size: 0.8em;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-input {
|
||||
padding: 0.1em 0; /* Enlarge the hitbox slightly to fill interline gaps */
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-token {
|
||||
white-space: pre-wrap;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-sentence.alectryon-target .alectryon-input {
|
||||
/* FIXME if keywords were ‘bolder’ we wouldn't need !important */
|
||||
font-weight: bold !important; /* Use !important to avoid a * selector */
|
||||
}
|
||||
|
||||
.alectryon-bubble:before,
|
||||
.alectryon-toggle-label:before,
|
||||
.alectryon-io label.alectryon-input:after,
|
||||
.alectryon-io .alectryon-goal > label:before {
|
||||
border: 1px solid #babdb6;
|
||||
border-radius: 1em;
|
||||
box-sizing: border-box;
|
||||
content: '';
|
||||
display: inline-block;
|
||||
font-weight: bold;
|
||||
height: 0.25em;
|
||||
margin-bottom: 0.15em;
|
||||
vertical-align: middle;
|
||||
width: 0.75em;
|
||||
}
|
||||
|
||||
.alectryon-toggle-label:before,
|
||||
.alectryon-io .alectryon-goal > label:before {
|
||||
margin-right: 0.25em;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-goal > label:before {
|
||||
margin-top: 0.125em;
|
||||
}
|
||||
|
||||
.alectryon-io label.alectryon-input {
|
||||
padding-right: 1em; /* Prevent line wraps before the checkbox bubble */
|
||||
}
|
||||
|
||||
.alectryon-io label.alectryon-input:after {
|
||||
margin-left: 0.25em;
|
||||
margin-right: -1em; /* Compensate for the anti-wrapping space */
|
||||
}
|
||||
|
||||
.alectryon-failed {
|
||||
/* Underlines are broken in Chrome (they reset at each element boundary)… */
|
||||
/* text-decoration: red wavy underline; */
|
||||
/* … but it isn't too noticeable with dots */
|
||||
text-decoration: red dotted underline;
|
||||
text-decoration-skip-ink: none;
|
||||
/* Chrome prints background images in low resolution, yielding a blurry underline */
|
||||
/* background: bottom / 0.3em auto repeat-x url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAyLjY0NiAxLjg1MiIgaGVpZ2h0PSI4IiB3aWR0aD0iMTAiPjxwYXRoIGQ9Ik0wIC4yNjVjLjc5NCAwIC41MyAxLjMyMiAxLjMyMyAxLjMyMi43OTQgMCAuNTMtMS4zMjIgMS4zMjMtMS4zMjIiIGZpbGw9Im5vbmUiIHN0cm9rZT0icmVkIiBzdHJva2Utd2lkdGg9Ii41MjkiLz48L3N2Zz4=); */
|
||||
}
|
||||
|
||||
/* Wrapping :hover rules in a media query ensures that tapping a Coq sentence
|
||||
doesn't trigger its :hover state (otherwise, on mobile, tapping a sentence to
|
||||
hide its output causes it to remain visible (its :hover state gets triggered.
|
||||
We only do it for the default style though, since other styles don't put the
|
||||
output over the main text, so showing too much is not an issue. */
|
||||
@media (any-hover: hover) {
|
||||
.alectryon-bubble:hover:before,
|
||||
.alectryon-toggle-label:hover:before,
|
||||
.alectryon-io label.alectryon-input:hover:after {
|
||||
background: #eeeeec;
|
||||
}
|
||||
|
||||
.alectryon-io label.alectryon-input:hover {
|
||||
text-decoration: underline dotted #babdb6;
|
||||
text-shadow: 0 0 1px rgb(46, 52, 54, 0.3); /* #2e3436 + opacity */
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-sentence:hover .alectryon-output,
|
||||
.alectryon-io .alectryon-token:hover .alectryon-type-info-wrapper,
|
||||
.alectryon-io .alectryon-token:hover .alectryon-type-info-wrapper {
|
||||
z-index: 2; /* Place hovered goals above .alectryon-sentence.alectryon-target ones */
|
||||
}
|
||||
}
|
||||
|
||||
.alectryon-toggle:checked + .alectryon-toggle-label:before,
|
||||
.alectryon-io .alectryon-sentence > .alectryon-toggle:checked + label.alectryon-input:after,
|
||||
.alectryon-io .alectryon-extra-goal-toggle:checked + .alectryon-goal > label:before {
|
||||
background-color: #babdb6;
|
||||
border-color: #babdb6;
|
||||
}
|
||||
|
||||
/* Disable clicks on sentences when the document-wide toggle is set. */
|
||||
.alectryon-toggle:checked + label + .alectryon-container label.alectryon-input {
|
||||
cursor: unset;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/* Hide individual checkboxes when the document-wide toggle is set. */
|
||||
.alectryon-toggle:checked + label + .alectryon-container label.alectryon-input:after {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* .alectryon-output is displayed by toggles, :hover, and .alectryon-target rules */
|
||||
.alectryon-io .alectryon-output {
|
||||
box-sizing: border-box;
|
||||
display: none;
|
||||
left: 0;
|
||||
right: 0;
|
||||
position: absolute;
|
||||
padding: 0.25em 0;
|
||||
overflow: visible; /* Let box-shadows overflow */
|
||||
z-index: 1; /* Default to an index lower than that used by :hover */
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-type-info-wrapper {
|
||||
position: absolute;
|
||||
display: inline-block;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-type-info-wrapper.full-width {
|
||||
left: 0;
|
||||
min-width: 100%;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-type-info .goal-separator {
|
||||
height: unset;
|
||||
margin-top: 0em;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-type-info-wrapper .alectryon-type-info {
|
||||
box-sizing: border-box;
|
||||
bottom: 100%;
|
||||
position: absolute;
|
||||
/*padding: 0.25em 0;*/
|
||||
visibility: hidden;
|
||||
overflow: visible; /* Let box-shadows overflow */
|
||||
z-index: 1; /* Default to an index lower than that used by :hover */
|
||||
white-space: pre-wrap !important;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-type-info-wrapper .alectryon-type-info .alectryon-goal.alectryon-docstring {
|
||||
white-space: pre-wrap !important;
|
||||
}
|
||||
|
||||
@media (any-hover: hover) { /* See note above about this @media query */
|
||||
.alectryon-io .alectryon-sentence:hover .alectryon-output:not(:hover) {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.alectryon-io.output-hidden .alectryon-sentence:hover .alectryon-output:not(:hover) {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.alectryon-io.type-info-hidden .alectryon-token:hover .alectryon-type-info-wrapper .alectryon-type-info,
|
||||
.alectryon-io.type-info-hidden .alectryon-token:hover .alectryon-type-info-wrapper .alectryon-type-info {
|
||||
/*visibility: hidden !important;*/
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-token:hover .alectryon-type-info-wrapper .alectryon-type-info,
|
||||
.alectryon-io .alectryon-token:hover .alectryon-type-info-wrapper .alectryon-type-info {
|
||||
visibility: visible;
|
||||
transition-delay: 0.5s;
|
||||
}
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-sentence.alectryon-target .alectryon-output {
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* Indicate active (hovered or targeted) goals with a shadow. */
|
||||
.alectryon-io .alectryon-sentence:hover .alectryon-output:not(:hover) .alectryon-messages,
|
||||
.alectryon-io .alectryon-sentence.alectryon-target .alectryon-output .alectryon-messages,
|
||||
.alectryon-io .alectryon-sentence:hover .alectryon-output:not(:hover) .alectryon-goals,
|
||||
.alectryon-io .alectryon-sentence.alectryon-target .alectryon-output .alectryon-goals,
|
||||
.alectryon-io .alectryon-token:hover .alectryon-type-info-wrapper .alectryon-type-info {
|
||||
box-shadow: 2px 2px 2px gray;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-extra-goals .alectryon-goal .goal-hyps {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-extra-goals .alectryon-extra-goal-toggle:not(:checked) + .alectryon-goal label.goal-separator hr {
|
||||
/* Dashes indicate that the hypotheses are hidden */
|
||||
border-top-style: dashed;
|
||||
}
|
||||
|
||||
|
||||
/* Show just a small preview of the other goals; this is undone by the
|
||||
"extra-goal" toggle and by :hover and .alectryon-target in windowed mode. */
|
||||
.alectryon-io .alectryon-extra-goals .alectryon-goal .goal-conclusion {
|
||||
max-height: 5.2em;
|
||||
overflow-y: auto;
|
||||
/* Combining ‘overflow-y: auto’ with ‘display: inline-block’ causes extra space
|
||||
to be added below the box. ‘vertical-align: middle’ gets rid of it. */
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-goals,
|
||||
.alectryon-io .alectryon-messages {
|
||||
background: #f6f7f6;
|
||||
/*border: thin solid #d3d7cf; /* Convenient when pre's background is already #EEE */
|
||||
display: block;
|
||||
padding: 0.25em;
|
||||
}
|
||||
|
||||
.alectryon-message::before {
|
||||
content: '';
|
||||
float: right;
|
||||
/* etc/svg/square-bubble-xl.svg */
|
||||
background: url("data:image/svg+xml,%3Csvg width='14' height='14' viewBox='0 0 3.704 3.704' xmlns='http://www.w3.org/2000/svg'%3E%3Cg fill-rule='evenodd' stroke='%23000' stroke-width='.264'%3E%3Cpath d='M.794.934h2.115M.794 1.463h1.455M.794 1.992h1.852'/%3E%3C/g%3E%3Cpath d='M.132.14v2.646h.794v.661l.926-.661h1.72V.14z' fill='none' stroke='%23000' stroke-width='.265'/%3E%3C/svg%3E") top right no-repeat;
|
||||
height: 14px;
|
||||
width: 14px;
|
||||
}
|
||||
|
||||
.alectryon-toggle:checked + label + .alectryon-container {
|
||||
width: unset;
|
||||
}
|
||||
|
||||
/* Show goals when a toggle is set */
|
||||
.alectryon-toggle:checked + label + .alectryon-container label.alectryon-input + .alectryon-output,
|
||||
.alectryon-io .alectryon-sentence > .alectryon-toggle:checked ~ .alectryon-output {
|
||||
display: block;
|
||||
position: static;
|
||||
width: unset;
|
||||
background: unset; /* Override the backgrounds set in floating in windowed mode */
|
||||
padding: 0.25em 0; /* Re-assert so that later :hover rules don't override this padding */
|
||||
}
|
||||
|
||||
.alectryon-toggle:checked + label + .alectryon-container label.alectryon-input + .alectryon-output .goal-hyps,
|
||||
.alectryon-io .alectryon-sentence > .alectryon-toggle:checked ~ .alectryon-output .goal-hyps {
|
||||
/* Overridden back in windowed style */
|
||||
flex-flow: row wrap;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
.alectryon-toggle:checked + label + .alectryon-container .alectryon-sentence .alectryon-output > div,
|
||||
.alectryon-io .alectryon-sentence > .alectryon-toggle:checked ~ .alectryon-output > div {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-extra-goal-toggle:checked + .alectryon-goal .goal-hyps {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-extra-goal-toggle:checked + .alectryon-goal .goal-conclusion {
|
||||
max-height: unset;
|
||||
overflow-y: unset;
|
||||
}
|
||||
|
||||
.alectryon-toggle:checked + label + .alectryon-container .alectryon-sentence > .alectryon-toggle ~ .alectryon-wsp,
|
||||
.alectryon-io .alectryon-sentence > .alectryon-toggle:checked ~ .alectryon-wsp {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-messages,
|
||||
.alectryon-io .alectryon-message,
|
||||
.alectryon-io .alectryon-goals,
|
||||
.alectryon-io .alectryon-goal,
|
||||
.alectryon-io .goal-hyps > span,
|
||||
.alectryon-io .goal-conclusion {
|
||||
border-radius: 0.15em;
|
||||
}
|
||||
|
||||
.alectryon-io .alectryon-goal,
|
||||
.alectryon-io .alectryon-message {
|
||||
align-items: center;
|
||||
background: #f6f7f6;
|
||||
border: 0em;
|
||||
display: block;
|
||||
flex-direction: column;
|
||||
margin: 0.25em;
|
||||
padding: 0.5em;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.alectryon-io .goal-hyps {
|
||||
align-content: space-around;
|
||||
align-items: baseline;
|
||||
display: flex;
|
||||
flex-flow: column nowrap; /* re-stated in windowed mode */
|
||||
justify-content: space-around;
|
||||
/* LATER use a ‘gap’ property instead of margins once supported */
|
||||
margin: -0.15em -0.25em; /* -0.15em to cancel the item spacing */
|
||||
padding-bottom: 0.35em; /* 0.5em-0.15em to cancel the 0.5em of .goal-separator */
|
||||
}
|
||||
|
||||
.alectryon-io .goal-hyps > br {
|
||||
display: none; /* Only for RSS readers */
|
||||
}
|
||||
|
||||
.alectryon-io .goal-hyps > span,
|
||||
.alectryon-io .goal-conclusion {
|
||||
/*background: #eeeeec;*/
|
||||
display: inline-block;
|
||||
padding: 0.15em 0.35em;
|
||||
}
|
||||
|
||||
.alectryon-io .goal-hyps > span {
|
||||
align-items: baseline;
|
||||
display: inline-flex;
|
||||
margin: 0.15em 0.25em;
|
||||
}
|
||||
|
||||
.alectryon-block var,
|
||||
.alectryon-inline var,
|
||||
.alectryon-io .goal-hyps > span > var {
|
||||
font-weight: 600;
|
||||
font-style: unset;
|
||||
}
|
||||
|
||||
.alectryon-io .goal-hyps > span > var {
|
||||
/* Shrink the list of names, but let it grow as long as space is available. */
|
||||
flex-basis: min-content;
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.alectryon-io .goal-hyps > span b {
|
||||
font-weight: 600;
|
||||
margin: 0 0 0 0.5em;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.alectryon-io .hyp-body,
|
||||
.alectryon-io .hyp-type {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
}
|
||||
|
||||
.alectryon-io .goal-separator {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
height: 1em; /* Fixed height to ignore goal name and markers */
|
||||
margin-top: -0.5em; /* Compensated in .goal-hyps when shown */
|
||||
}
|
||||
|
||||
.alectryon-io .goal-separator hr {
|
||||
border: none;
|
||||
border-top: thin solid #555753;
|
||||
display: block;
|
||||
flex-grow: 1;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.alectryon-io .goal-separator .goal-name {
|
||||
font-size: 0.75em;
|
||||
margin-left: 0.5em;
|
||||
}
|
||||
|
||||
/**********/
|
||||
/* Banner */
|
||||
/**********/
|
||||
|
||||
.alectryon-banner {
|
||||
background: #eeeeec;
|
||||
border: 1px solid #babcbd;
|
||||
font-size: 0.75em;
|
||||
padding: 0.25em;
|
||||
text-align: center;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
.alectryon-banner a {
|
||||
cursor: pointer;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.alectryon-banner kbd {
|
||||
background: #d3d7cf;
|
||||
border-radius: 0.15em;
|
||||
border: 1px solid #babdb6;
|
||||
box-sizing: border-box;
|
||||
display: inline-block;
|
||||
font-family: inherit;
|
||||
font-size: 0.9em;
|
||||
height: 1.3em;
|
||||
line-height: 1.2em;
|
||||
margin: -0.25em 0;
|
||||
padding: 0 0.25em;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
/**********/
|
||||
/* Toggle */
|
||||
/**********/
|
||||
|
||||
.alectryon-toggle-label {
|
||||
margin: 1rem 0;
|
||||
}
|
||||
|
||||
/******************/
|
||||
/* Floating style */
|
||||
/******************/
|
||||
|
||||
/* If there's space, display goals to the right of the code, not below it. */
|
||||
@media (min-width: 80rem) {
|
||||
/* Unlike the windowed case, we don't want to move output blocks to the side
|
||||
when they are both :checked and -targeted, since it gets confusing as
|
||||
things jump around; hence the commented-output part of the selector,
|
||||
which would otherwise increase specificity */
|
||||
.alectryon-floating .alectryon-sentence.alectryon-target /* > .alectryon-toggle ~ */ .alectryon-output,
|
||||
.alectryon-floating .alectryon-sentence:hover .alectryon-output {
|
||||
top: 0;
|
||||
left: 100%;
|
||||
right: -100%;
|
||||
padding: 0 0.5em;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.alectryon-floating .alectryon-output {
|
||||
min-height: 100%;
|
||||
}
|
||||
|
||||
.alectryon-floating .alectryon-sentence:hover .alectryon-output {
|
||||
background: white; /* Ensure that short goals hide long ones */
|
||||
}
|
||||
|
||||
/* This odd margin-bottom property prevents the sticky div from bumping
|
||||
against the bottom of its container (.alectryon-output). The alternative
|
||||
would be enlarging .alectryon-output, but that would cause overflows,
|
||||
enlarging scrollbars and yielding scrolling towards the bottom of the
|
||||
page. Doing things this way instead makes it possible to restrict
|
||||
.alectryon-output to a reasonable size (100%, through top = bottom = 0).
|
||||
See also https://stackoverflow.com/questions/43909940/. */
|
||||
/* See note on specificity above */
|
||||
.alectryon-floating .alectryon-sentence.alectryon-target /* > .alectryon-toggle ~ */ .alectryon-output > div,
|
||||
.alectryon-floating .alectryon-sentence:hover .alectryon-output > div {
|
||||
margin-bottom: -200%;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.alectryon-floating .alectryon-toggle:checked + label + .alectryon-container .alectryon-sentence .alectryon-output > div,
|
||||
.alectryon-floating .alectryon-io .alectryon-sentence > .alectryon-toggle:checked ~ .alectryon-output > div {
|
||||
margin-bottom: unset; /* Undo the margin */
|
||||
}
|
||||
|
||||
/* Float underneath the current fragment
|
||||
@media (max-width: 80rem) {
|
||||
.alectryon-floating .alectryon-output {
|
||||
top: 100%;
|
||||
}
|
||||
} */
|
||||
}
|
||||
|
||||
/********************/
|
||||
/* Multi-pane style */
|
||||
/********************/
|
||||
|
||||
.alectryon-windowed {
|
||||
border: 0 solid #2e3436;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.alectryon-windowed .alectryon-sentence:hover .alectryon-output {
|
||||
background: white; /* Ensure that short goals hide long ones */
|
||||
}
|
||||
|
||||
.alectryon-windowed .alectryon-output {
|
||||
position: fixed; /* Overwritten by the ‘:checked’ rules */
|
||||
}
|
||||
|
||||
/* See note about specificity below */
|
||||
.alectryon-windowed .alectryon-sentence:hover .alectryon-output,
|
||||
.alectryon-windowed .alectryon-sentence.alectryon-target > .alectryon-toggle ~ .alectryon-output {
|
||||
padding: 0.5em;
|
||||
overflow-y: auto; /* Windowed contents may need to scroll */
|
||||
}
|
||||
|
||||
.alectryon-windowed .alectryon-io .alectryon-sentence:hover .alectryon-output:not(:hover) .alectryon-messages,
|
||||
.alectryon-windowed .alectryon-io .alectryon-sentence.alectryon-target .alectryon-output .alectryon-messages,
|
||||
.alectryon-windowed .alectryon-io .alectryon-sentence:hover .alectryon-output:not(:hover) .alectryon-goals,
|
||||
.alectryon-windowed .alectryon-io .alectryon-sentence.alectryon-target .alectryon-output .alectryon-goals {
|
||||
box-shadow: none; /* A shadow is unnecessary here and incompatible with overflow-y set to auto */
|
||||
}
|
||||
|
||||
.alectryon-windowed .alectryon-io .alectryon-sentence.alectryon-target .alectryon-output .goal-hyps {
|
||||
/* Restated to override the :checked style */
|
||||
flex-flow: column nowrap;
|
||||
justify-content: space-around;
|
||||
}
|
||||
|
||||
|
||||
.alectryon-windowed .alectryon-sentence.alectryon-target .alectryon-extra-goals .alectryon-goal .goal-conclusion
|
||||
/* Like .alectryon-io .alectryon-extra-goal-toggle:checked + .alectryon-goal .goal-conclusion */ {
|
||||
max-height: unset;
|
||||
overflow-y: unset;
|
||||
}
|
||||
|
||||
.alectryon-windowed .alectryon-output > div {
|
||||
display: flex; /* Put messages after goals */
|
||||
flex-direction: column-reverse;
|
||||
}
|
||||
|
||||
/*********************/
|
||||
/* Standalone styles */
|
||||
/*********************/
|
||||
|
||||
.alectryon-standalone {
|
||||
font-family: 'IBM Plex Serif', 'PT Serif', 'Merriweather', 'DejaVu Serif', serif;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 50rem) {
|
||||
html.alectryon-standalone {
|
||||
/* Prevent flickering when hovering a block causes scrollbars to appear. */
|
||||
margin-left: calc(100vw - 100%);
|
||||
margin-right: 0;
|
||||
}
|
||||
}
|
||||
|
||||
/* Coqdoc */
|
||||
|
||||
.alectryon-coqdoc .doc .code,
|
||||
.alectryon-coqdoc .doc .inlinecode,
|
||||
.alectryon-coqdoc .doc .comment {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.alectryon-coqdoc .doc .comment {
|
||||
color: #eeeeec;
|
||||
}
|
||||
|
||||
.alectryon-coqdoc .doc .paragraph {
|
||||
height: 0.75em;
|
||||
}
|
||||
|
||||
/* Centered, Floating */
|
||||
|
||||
.alectryon-standalone .alectryon-centered,
|
||||
.alectryon-standalone .alectryon-floating {
|
||||
max-width: 50rem;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
@media (min-width: 80rem) {
|
||||
.alectryon-standalone .alectryon-floating {
|
||||
max-width: 80rem;
|
||||
}
|
||||
|
||||
.alectryon-standalone .alectryon-floating > * {
|
||||
width: 50%;
|
||||
margin-left: 0;
|
||||
}
|
||||
}
|
||||
|
||||
/* Windowed */
|
||||
|
||||
.alectryon-standalone .alectryon-windowed {
|
||||
display: block;
|
||||
margin: 0;
|
||||
overflow-y: auto;
|
||||
position: absolute;
|
||||
padding: 0 1em;
|
||||
}
|
||||
|
||||
.alectryon-standalone .alectryon-windowed > * {
|
||||
/* Override properties of docutils_basic.css */
|
||||
margin-left: 0;
|
||||
max-width: unset;
|
||||
}
|
||||
|
||||
.alectryon-standalone .alectryon-windowed .alectryon-io {
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* No need to predicate the ‘:hover’ rules below on ‘:not(:checked)’, since ‘left’,
|
||||
‘right’, ‘top’, and ‘bottom’ will be inactived by the :checked rules setting
|
||||
‘position’ to ‘static’ */
|
||||
|
||||
|
||||
/* Specificity: We want the output to stay inline when hovered while unfolded
|
||||
(:checked), but we want it to move when it's targeted (i.e. when the user
|
||||
is browsing goals one by one using the keyboard, in which case we want to
|
||||
goals to appear in consistent locations). The selectors below ensure
|
||||
that :hover < :checked < -targeted in terms of specificity. */
|
||||
/* LATER: Reimplement this stuff with CSS variables */
|
||||
.alectryon-windowed .alectryon-sentence.alectryon-target > .alectryon-toggle ~ .alectryon-output {
|
||||
position: fixed;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 60rem) {
|
||||
.alectryon-standalone .alectryon-windowed {
|
||||
border-right-width: thin;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 50%;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.alectryon-standalone .alectryon-windowed .alectryon-sentence:hover .alectryon-output,
|
||||
.alectryon-standalone .alectryon-windowed .alectryon-sentence.alectryon-target .alectryon-output {
|
||||
bottom: 0;
|
||||
left: 50%;
|
||||
right: 0;
|
||||
top: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (max-width: 60rem) {
|
||||
.alectryon-standalone .alectryon-windowed {
|
||||
border-bottom-width: 1px;
|
||||
bottom: 40%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.alectryon-standalone .alectryon-windowed .alectryon-sentence:hover .alectryon-output,
|
||||
.alectryon-standalone .alectryon-windowed .alectryon-sentence.alectryon-target .alectryon-output {
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 60%;
|
||||
}
|
||||
}
|
|
@ -1,172 +0,0 @@
|
|||
var Alectryon;
|
||||
(function(Alectryon) {
|
||||
(function (slideshow) {
|
||||
function anchor(sentence) { return "#" + sentence.id; }
|
||||
|
||||
function current_sentence() { return slideshow.sentences[slideshow.pos]; }
|
||||
|
||||
function unhighlight() {
|
||||
var sentence = current_sentence();
|
||||
if (sentence) sentence.classList.remove("alectryon-target");
|
||||
slideshow.pos = -1;
|
||||
}
|
||||
|
||||
function highlight(sentence) {
|
||||
sentence.classList.add("alectryon-target");
|
||||
}
|
||||
|
||||
function scroll(sentence) {
|
||||
// Put the top of the current fragment close to the top of the
|
||||
// screen, but scroll it out of view if showing it requires pushing
|
||||
// the sentence past half of the screen. If sentence is already in
|
||||
// a reasonable position, don't move.
|
||||
var parent = sentence.parentElement;
|
||||
/* We want to scroll the whole document, so start at root… */
|
||||
while (parent && !parent.classList.contains("alectryon-root"))
|
||||
parent = parent.parentElement;
|
||||
/* … and work up from there to find a scrollable element.
|
||||
parent.scrollHeight can be greater than parent.clientHeight
|
||||
without showing scrollbars, so we add a 10px buffer. */
|
||||
while (parent && parent.scrollHeight <= parent.clientHeight + 10)
|
||||
parent = parent.parentElement;
|
||||
/* <body> and <html> elements can have their client rect overflow
|
||||
* the window if their height is unset, so scroll the window
|
||||
* instead */
|
||||
if (parent && (parent.nodeName == "BODY" || parent.nodeName == "HTML"))
|
||||
parent = null;
|
||||
|
||||
var rect = function(e) { return e.getBoundingClientRect(); };
|
||||
var parent_box = parent ? rect(parent) : { y: 0, height: window.innerHeight },
|
||||
sentence_y = rect(sentence).y - parent_box.y,
|
||||
fragment_y = rect(sentence.parentElement).y - parent_box.y;
|
||||
|
||||
// The assertion below sometimes fails for the first element in a block.
|
||||
// console.assert(sentence_y >= fragment_y);
|
||||
|
||||
if (sentence_y < 0.1 * parent_box.height ||
|
||||
sentence_y > 0.7 * parent_box.height) {
|
||||
(parent || window).scrollBy(
|
||||
0, Math.max(sentence_y - 0.5 * parent_box.height,
|
||||
fragment_y - 0.1 * parent_box.height));
|
||||
}
|
||||
}
|
||||
|
||||
function highlighted(pos) {
|
||||
return slideshow.pos == pos;
|
||||
}
|
||||
|
||||
function navigate(pos, inhibitScroll) {
|
||||
unhighlight();
|
||||
slideshow.pos = Math.min(Math.max(pos, 0), slideshow.sentences.length - 1);
|
||||
var sentence = current_sentence();
|
||||
highlight(sentence);
|
||||
if (!inhibitScroll)
|
||||
scroll(sentence);
|
||||
}
|
||||
|
||||
var keys = {
|
||||
PAGE_UP: 33,
|
||||
PAGE_DOWN: 34,
|
||||
ARROW_UP: 38,
|
||||
ARROW_DOWN: 40,
|
||||
h: 72, l: 76, p: 80, n: 78
|
||||
};
|
||||
|
||||
function onkeydown(e) {
|
||||
e = e || window.event;
|
||||
if (e.ctrlKey || e.metaKey) {
|
||||
if (e.keyCode == keys.ARROW_UP)
|
||||
slideshow.previous();
|
||||
else if (e.keyCode == keys.ARROW_DOWN)
|
||||
slideshow.next();
|
||||
else
|
||||
return;
|
||||
} else {
|
||||
// if (e.keyCode == keys.PAGE_UP || e.keyCode == keys.p || e.keyCode == keys.h)
|
||||
// slideshow.previous();
|
||||
// else if (e.keyCode == keys.PAGE_DOWN || e.keyCode == keys.n || e.keyCode == keys.l)
|
||||
// slideshow.next();
|
||||
// else
|
||||
return;
|
||||
}
|
||||
e.preventDefault();
|
||||
}
|
||||
|
||||
function start() {
|
||||
slideshow.navigate(0);
|
||||
}
|
||||
|
||||
function toggleHighlight(idx) {
|
||||
if (highlighted(idx))
|
||||
unhighlight();
|
||||
else
|
||||
navigate(idx, true);
|
||||
}
|
||||
|
||||
function handleClick(evt) {
|
||||
if (evt.ctrlKey || evt.metaKey) {
|
||||
var sentence = evt.currentTarget;
|
||||
|
||||
// Ensure that the goal is shown on the side, not inline
|
||||
var checkbox = sentence.getElementsByClassName("alectryon-toggle")[0];
|
||||
if (checkbox)
|
||||
checkbox.checked = false;
|
||||
|
||||
toggleHighlight(sentence.alectryon_index);
|
||||
evt.preventDefault();
|
||||
}
|
||||
}
|
||||
|
||||
function init() {
|
||||
document.onkeydown = onkeydown;
|
||||
slideshow.pos = -1;
|
||||
slideshow.sentences = Array.from(document.getElementsByClassName("alectryon-sentence"));
|
||||
slideshow.sentences.forEach(function (s, idx) {
|
||||
s.addEventListener('click', handleClick, false);
|
||||
s.alectryon_index = idx;
|
||||
});
|
||||
}
|
||||
|
||||
slideshow.start = start;
|
||||
slideshow.end = unhighlight;
|
||||
slideshow.navigate = navigate;
|
||||
slideshow.next = function() { navigate(slideshow.pos + 1); };
|
||||
slideshow.previous = function() { navigate(slideshow.pos + -1); };
|
||||
window.addEventListener('DOMContentLoaded', init);
|
||||
})(Alectryon.slideshow || (Alectryon.slideshow = {}));
|
||||
|
||||
(function (styles) {
|
||||
var styleNames = ["centered", "floating", "windowed"];
|
||||
|
||||
function className(style) {
|
||||
return "alectryon-" + style;
|
||||
}
|
||||
|
||||
function setStyle(style) {
|
||||
var root = document.getElementsByClassName("alectryon-root")[0];
|
||||
styleNames.forEach(function (s) {
|
||||
root.classList.remove(className(s)); });
|
||||
root.classList.add(className(style));
|
||||
}
|
||||
|
||||
function init() {
|
||||
var banner = document.getElementsByClassName("alectryon-banner")[0];
|
||||
if (banner) {
|
||||
banner.append(" Style: ");
|
||||
styleNames.forEach(function (styleName, idx) {
|
||||
var s = styleName;
|
||||
var a = document.createElement("a");
|
||||
a.onclick = function() { setStyle(s); };
|
||||
a.append(styleName);
|
||||
if (idx > 0) banner.append("; ");
|
||||
banner.appendChild(a);
|
||||
});
|
||||
banner.append(".");
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('DOMContentLoaded', init);
|
||||
|
||||
styles.setStyle = setStyle;
|
||||
})(Alectryon.styles || (Alectryon.styles = {}));
|
||||
})(Alectryon || (Alectryon = {}));
|
|
@ -1,593 +0,0 @@
|
|||
/******************************************************************************
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Matthias Eisen
|
||||
Further changes Copyright (c) 2020, 2021 Clément Pit-Claudel
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
******************************************************************************/
|
||||
|
||||
kbd,
|
||||
pre,
|
||||
samp,
|
||||
tt,
|
||||
body code, /* Increase specificity to override IBM's stylesheet */
|
||||
body code.highlight,
|
||||
.docutils.literal {
|
||||
font-family: 'Iosevka Slab Web', 'Iosevka Web', 'Iosevka Slab', 'Iosevka', 'Fira Code', monospace;
|
||||
font-feature-settings: "COQX" 1 /* Coq ligatures */, "XV00" 1 /* Legacy */, "calt" 1 /* Fallback */;
|
||||
}
|
||||
|
||||
body {
|
||||
color: #111;
|
||||
font-family: 'IBM Plex Serif', 'PT Serif', 'Merriweather', 'DejaVu Serif', serif;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
main, div.document {
|
||||
margin: 0 auto;
|
||||
max-width: 720px;
|
||||
}
|
||||
|
||||
|
||||
/* ========== Headings ========== */
|
||||
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
font-weight: normal;
|
||||
margin-top: 1.5em;
|
||||
}
|
||||
|
||||
h1.section-subtitle,
|
||||
h2.section-subtitle,
|
||||
h3.section-subtitle,
|
||||
h4.section-subtitle,
|
||||
h5.section-subtitle,
|
||||
h6.section-subtitle {
|
||||
margin-top: 0.4em;
|
||||
}
|
||||
|
||||
h1.title {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
h2.subtitle {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
span.section-subtitle {
|
||||
font-size: 80%,
|
||||
}
|
||||
|
||||
/* //-------- Headings ---------- */
|
||||
|
||||
|
||||
/* ========== Images ========== */
|
||||
|
||||
img,
|
||||
.figure,
|
||||
object {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
div.figure {
|
||||
margin-left: 2em;
|
||||
margin-right: 2em;
|
||||
}
|
||||
|
||||
img.align-left, .figure.align-left, object.align-left {
|
||||
clear: left;
|
||||
float: left;
|
||||
margin-right: 1em;
|
||||
}
|
||||
|
||||
img.align-right, .figure.align-right, object.align-right {
|
||||
clear: right;
|
||||
float: right;
|
||||
margin-left: 1em;
|
||||
}
|
||||
|
||||
img.align-center, .figure.align-center, object.align-center {
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
/* reset inner alignment in figures */
|
||||
div.align-right {
|
||||
text-align: inherit;
|
||||
}
|
||||
|
||||
object[type="image/svg+xml"],
|
||||
object[type="application/x-shockwave-flash"] {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* //-------- Images ---------- */
|
||||
|
||||
|
||||
|
||||
/* ========== Literal Blocks ========== */
|
||||
|
||||
.docutils.literal {
|
||||
background-color: #eee;
|
||||
padding: 0 0.2em;
|
||||
border-radius: 0.1em;
|
||||
}
|
||||
|
||||
pre.address {
|
||||
margin-bottom: 0;
|
||||
margin-top: 0;
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
pre.literal-block {
|
||||
border-left: solid 5px #ccc;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
pre.literal-block, pre.doctest-block, pre.math, pre.code {
|
||||
}
|
||||
|
||||
span.interpreted {
|
||||
}
|
||||
|
||||
span.pre {
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
pre.code .ln {
|
||||
color: grey;
|
||||
}
|
||||
pre.code, code {
|
||||
border-style: none;
|
||||
/* ! padding: 1em 0; */ /* Removed because that large hitbox bleeds over links on other lines */
|
||||
}
|
||||
pre.code .comment, code .comment {
|
||||
color: #888;
|
||||
}
|
||||
pre.code .keyword, code .keyword {
|
||||
font-weight: bold;
|
||||
color: #080;
|
||||
}
|
||||
pre.code .literal.string, code .literal.string {
|
||||
color: #d20;
|
||||
background-color: #fff0f0;
|
||||
}
|
||||
pre.code .literal.number, code .literal.number {
|
||||
color: #00d;
|
||||
}
|
||||
pre.code .name.builtin, code .name.builtin {
|
||||
color: #038;
|
||||
color: #820;
|
||||
}
|
||||
pre.code .name.namespace, code .name.namespace {
|
||||
color: #b06;
|
||||
}
|
||||
pre.code .deleted, code .deleted {
|
||||
background-color: #fdd;
|
||||
}
|
||||
pre.code .inserted, code .inserted {
|
||||
background-color: #dfd;
|
||||
}
|
||||
|
||||
|
||||
/* //-------- Literal Blocks --------- */
|
||||
|
||||
|
||||
/* ========== Tables ========== */
|
||||
|
||||
table {
|
||||
border-spacing: 0;
|
||||
border-collapse: collapse;
|
||||
border-style: none;
|
||||
border-top: solid thin #111;
|
||||
border-bottom: solid thin #111;
|
||||
}
|
||||
|
||||
td,
|
||||
th {
|
||||
border: none;
|
||||
padding: 0.5em;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
th {
|
||||
border-top: solid thin #111;
|
||||
border-bottom: solid thin #111;
|
||||
background-color: #ddd;
|
||||
}
|
||||
|
||||
|
||||
table.field-list,
|
||||
table.footnote,
|
||||
table.citation,
|
||||
table.option-list {
|
||||
border: none;
|
||||
}
|
||||
table.docinfo {
|
||||
margin: 2em 4em;
|
||||
}
|
||||
|
||||
table.docutils {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
table.docutils th.field-name,
|
||||
table.docinfo th.docinfo-name {
|
||||
border: none;
|
||||
background: none;
|
||||
font-weight: bold ;
|
||||
text-align: left ;
|
||||
white-space: nowrap ;
|
||||
padding-left: 0;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
table.docutils.booktabs {
|
||||
border: none;
|
||||
border-top: medium solid;
|
||||
border-bottom: medium solid;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table.docutils.booktabs * {
|
||||
border: none;
|
||||
}
|
||||
table.docutils.booktabs th {
|
||||
border-bottom: thin solid;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
span.option {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
table caption {
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
|
||||
/* //-------- Tables ---------- */
|
||||
|
||||
|
||||
/* ========== Lists ========== */
|
||||
|
||||
ol.simple, ul.simple {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
ol.arabic {
|
||||
list-style: decimal;
|
||||
}
|
||||
|
||||
ol.loweralpha {
|
||||
list-style: lower-alpha;
|
||||
}
|
||||
|
||||
ol.upperalpha {
|
||||
list-style: upper-alpha;
|
||||
}
|
||||
|
||||
ol.lowerroman {
|
||||
list-style: lower-roman;
|
||||
}
|
||||
|
||||
ol.upperroman {
|
||||
list-style: upper-roman;
|
||||
}
|
||||
|
||||
dl.docutils dd {
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
|
||||
|
||||
dl.docutils dt {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* //-------- Lists ---------- */
|
||||
|
||||
|
||||
/* ========== Sidebar ========== */
|
||||
|
||||
div.sidebar {
|
||||
margin: 0 0 0.5em 1em ;
|
||||
border-left: solid medium #111;
|
||||
padding: 1em ;
|
||||
width: 40% ;
|
||||
float: right ;
|
||||
clear: right;
|
||||
}
|
||||
|
||||
div.sidebar {
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
p.sidebar-title {
|
||||
font-size: 1rem;
|
||||
font-weight: bold ;
|
||||
}
|
||||
|
||||
p.sidebar-subtitle {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* //-------- Sidebar ---------- */
|
||||
|
||||
|
||||
/* ========== Topic ========== */
|
||||
|
||||
div.topic {
|
||||
border-left: thin solid #111;
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
div.topic p {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
p.topic-title {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* //-------- Topic ---------- */
|
||||
|
||||
|
||||
/* ========== Header ========== */
|
||||
|
||||
div.header {
|
||||
font-family: "Century Gothic", CenturyGothic, Geneva, AppleGothic, sans-serif;
|
||||
font-size: 0.9rem;
|
||||
margin: 2em auto 4em auto;
|
||||
max-width: 960px;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
hr.header {
|
||||
border: 0;
|
||||
height: 1px;
|
||||
margin-top: 1em;
|
||||
background-color: #111;
|
||||
}
|
||||
|
||||
/* //-------- Header ---------- */
|
||||
|
||||
|
||||
/* ========== Footer ========== */
|
||||
|
||||
div.footer {
|
||||
font-family: "Century Gothic", CenturyGothic, Geneva, AppleGothic, sans-serif;
|
||||
font-size: 0.9rem;
|
||||
margin: 6em auto 2em auto;
|
||||
max-width: 960px;
|
||||
clear: both;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
hr.footer {
|
||||
border: 0;
|
||||
height: 1px;
|
||||
margin-bottom: 2em;
|
||||
background-color: #111;
|
||||
}
|
||||
|
||||
/* //-------- Footer ---------- */
|
||||
|
||||
|
||||
/* ========== Admonitions ========== */
|
||||
|
||||
div.admonition,
|
||||
div.attention,
|
||||
div.caution,
|
||||
div.danger,
|
||||
div.error,
|
||||
div.hint,
|
||||
div.important,
|
||||
div.note,
|
||||
div.tip,
|
||||
div.warning {
|
||||
border: solid thin #111;
|
||||
padding: 0 1em;
|
||||
}
|
||||
|
||||
div.error,
|
||||
div.danger {
|
||||
border-color: #a94442;
|
||||
background-color: #f2dede;
|
||||
}
|
||||
|
||||
div.hint,
|
||||
div.tip {
|
||||
border-color: #31708f;
|
||||
background-color: #d9edf7;
|
||||
}
|
||||
|
||||
div.attention,
|
||||
div.caution,
|
||||
div.warning {
|
||||
border-color: #8a6d3b;
|
||||
background-color: #fcf8e3;
|
||||
}
|
||||
|
||||
div.hint p.admonition-title,
|
||||
div.tip p.admonition-title {
|
||||
color: #31708f;
|
||||
font-weight: bold ;
|
||||
}
|
||||
|
||||
div.note p.admonition-title,
|
||||
div.admonition p.admonition-title,
|
||||
div.important p.admonition-title {
|
||||
font-weight: bold ;
|
||||
}
|
||||
|
||||
div.attention p.admonition-title,
|
||||
div.caution p.admonition-title,
|
||||
div.warning p.admonition-title {
|
||||
color: #8a6d3b;
|
||||
font-weight: bold ;
|
||||
}
|
||||
|
||||
div.danger p.admonition-title,
|
||||
div.error p.admonition-title,
|
||||
.code .error {
|
||||
color: #a94442;
|
||||
font-weight: bold ;
|
||||
}
|
||||
|
||||
/* //-------- Admonitions ---------- */
|
||||
|
||||
|
||||
/* ========== Table of Contents ========== */
|
||||
|
||||
div.contents {
|
||||
margin: 2em 0;
|
||||
border: none;
|
||||
}
|
||||
|
||||
ul.auto-toc {
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
a.toc-backref {
|
||||
text-decoration: none ;
|
||||
color: #111;
|
||||
}
|
||||
|
||||
/* //-------- Table of Contents ---------- */
|
||||
|
||||
|
||||
|
||||
/* ========== Line Blocks========== */
|
||||
|
||||
div.line-block {
|
||||
display: block ;
|
||||
margin-top: 1em ;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
div.line-block div.line-block {
|
||||
margin-top: 0 ;
|
||||
margin-bottom: 0 ;
|
||||
margin-left: 1.5em;
|
||||
}
|
||||
|
||||
/* //-------- Line Blocks---------- */
|
||||
|
||||
|
||||
/* ========== System Messages ========== */
|
||||
|
||||
div.system-messages {
|
||||
margin: 5em;
|
||||
}
|
||||
|
||||
div.system-messages h1 {
|
||||
color: red;
|
||||
}
|
||||
|
||||
div.system-message {
|
||||
border: medium outset ;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
div.system-message p.system-message-title {
|
||||
color: red ;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* //-------- System Messages---------- */
|
||||
|
||||
|
||||
/* ========== Helpers ========== */
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.align-left {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.align-center {
|
||||
clear: both ;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.align-right {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
/* //-------- Helpers---------- */
|
||||
|
||||
|
||||
p.caption {
|
||||
font-style: italic;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
p.credits {
|
||||
font-style: italic ;
|
||||
font-size: smaller }
|
||||
|
||||
p.label {
|
||||
white-space: nowrap }
|
||||
|
||||
p.rubric {
|
||||
font-weight: bold ;
|
||||
font-size: larger ;
|
||||
color: maroon ;
|
||||
text-align: center }
|
||||
|
||||
p.attribution {
|
||||
text-align: right ;
|
||||
margin-left: 50% }
|
||||
|
||||
blockquote.epigraph {
|
||||
margin: 2em 5em;
|
||||
}
|
||||
|
||||
div.abstract {
|
||||
margin: 2em 5em }
|
||||
|
||||
div.abstract {
|
||||
font-weight: bold ;
|
||||
text-align: center }
|
||||
|
||||
div.dedication {
|
||||
margin: 2em 5em ;
|
||||
text-align: center ;
|
||||
font-style: italic }
|
||||
|
||||
div.dedication {
|
||||
font-weight: bold ;
|
||||
font-style: normal }
|
||||
|
||||
|
||||
span.classifier {
|
||||
font-style: oblique }
|
||||
|
||||
span.classifier-delimiter {
|
||||
font-weight: bold }
|
||||
|
||||
span.problematic {
|
||||
color: red }
|
||||
|
||||
|
||||
|
|
@ -1,82 +0,0 @@
|
|||
/* Pygments stylesheet generated by Alectryon (style=None) */
|
||||
/* Most of the following are unused as of now */
|
||||
td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
|
||||
td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
|
||||
.highlight .hll, .code .hll { background-color: #ffffcc }
|
||||
.highlight .c, .code .c { color: #555753; font-style: italic } /* Comment */
|
||||
.highlight .err, .code .err { color: #a40000; border: 1px solid #cc0000 } /* Error */
|
||||
.highlight .g, .code .g { color: #000000 } /* Generic */
|
||||
.highlight .k, .code .k { color: #8f5902 } /* Keyword */
|
||||
.highlight .l, .code .l { color: #2e3436 } /* Literal */
|
||||
.highlight .n, .code .n { color: #000000 } /* Name */
|
||||
.highlight .o, .code .o { color: #000000 } /* Operator */
|
||||
.highlight .x, .code .x { color: #2e3436 } /* Other */
|
||||
.highlight .p, .code .p { color: #000000 } /* Punctuation */
|
||||
.highlight .ch, .code .ch { color: #555753; font-weight: bold; font-style: italic } /* Comment.Hashbang */
|
||||
.highlight .cm, .code .cm { color: #555753; font-style: italic } /* Comment.Multiline */
|
||||
.highlight .cp, .code .cp { color: #3465a4; font-style: italic } /* Comment.Preproc */
|
||||
.highlight .cpf, .code .cpf { color: #555753; font-style: italic } /* Comment.PreprocFile */
|
||||
.highlight .c1, .code .c1 { color: #555753; font-style: italic } /* Comment.Single */
|
||||
.highlight .cs, .code .cs { color: #3465a4; font-weight: bold; font-style: italic } /* Comment.Special */
|
||||
.highlight .gd, .code .gd { color: #a40000 } /* Generic.Deleted */
|
||||
.highlight .ge, .code .ge { color: #000000; font-style: italic } /* Generic.Emph */
|
||||
.highlight .gr, .code .gr { color: #a40000 } /* Generic.Error */
|
||||
.highlight .gh, .code .gh { color: #a40000; font-weight: bold } /* Generic.Heading */
|
||||
.highlight .gi, .code .gi { color: #4e9a06 } /* Generic.Inserted */
|
||||
.highlight .go, .code .go { color: #000000; font-style: italic } /* Generic.Output */
|
||||
.highlight .gp, .code .gp { color: #8f5902 } /* Generic.Prompt */
|
||||
.highlight .gs, .code .gs { color: #000000; font-weight: bold } /* Generic.Strong */
|
||||
.highlight .gu, .code .gu { color: #000000; font-weight: bold } /* Generic.Subheading */
|
||||
.highlight .gt, .code .gt { color: #000000; font-style: italic } /* Generic.Traceback */
|
||||
.highlight .kc, .code .kc { color: #204a87; font-weight: bold } /* Keyword.Constant */
|
||||
.highlight .kd, .code .kd { color: #4e9a06; font-weight: bold } /* Keyword.Declaration */
|
||||
.highlight .kn, .code .kn { color: #4e9a06; font-weight: bold } /* Keyword.Namespace */
|
||||
.highlight .kp, .code .kp { color: #204a87 } /* Keyword.Pseudo */
|
||||
.highlight .kr, .code .kr { color: #8f5902 } /* Keyword.Reserved */
|
||||
.highlight .kt, .code .kt { color: #204a87 } /* Keyword.Type */
|
||||
.highlight .ld, .code .ld { color: #2e3436 } /* Literal.Date */
|
||||
.highlight .m, .code .m { color: #2e3436 } /* Literal.Number */
|
||||
.highlight .s, .code .s { color: #ad7fa8 } /* Literal.String */
|
||||
.highlight .na, .code .na { color: #c4a000 } /* Name.Attribute */
|
||||
.highlight .nb, .code .nb { color: #75507b } /* Name.Builtin */
|
||||
.highlight .nc, .code .nc { color: #204a87 } /* Name.Class */
|
||||
.highlight .no, .code .no { color: #ce5c00 } /* Name.Constant */
|
||||
.highlight .nd, .code .nd { color: #3465a4; font-weight: bold } /* Name.Decorator */
|
||||
.highlight .ni, .code .ni { color: #c4a000; text-decoration: underline } /* Name.Entity */
|
||||
.highlight .ne, .code .ne { color: #cc0000 } /* Name.Exception */
|
||||
.highlight .nf, .code .nf { color: #a40000 } /* Name.Function */
|
||||
.highlight .nl, .code .nl { color: #3465a4; font-weight: bold } /* Name.Label */
|
||||
.highlight .nn, .code .nn { color: #000000 } /* Name.Namespace */
|
||||
.highlight .nx, .code .nx { color: #000000 } /* Name.Other */
|
||||
.highlight .py, .code .py { color: #000000 } /* Name.Property */
|
||||
.highlight .nt, .code .nt { color: #a40000 } /* Name.Tag */
|
||||
.highlight .nv, .code .nv { color: #ce5c00 } /* Name.Variable */
|
||||
.highlight .ow, .code .ow { color: #8f5902 } /* Operator.Word */
|
||||
.highlight .w, .code .w { color: #d3d7cf; text-decoration: underline } /* Text.Whitespace */
|
||||
.highlight .mb, .code .mb { color: #2e3436 } /* Literal.Number.Bin */
|
||||
.highlight .mf, .code .mf { color: #2e3436 } /* Literal.Number.Float */
|
||||
.highlight .mh, .code .mh { color: #2e3436 } /* Literal.Number.Hex */
|
||||
.highlight .mi, .code .mi { color: #2e3436 } /* Literal.Number.Integer */
|
||||
.highlight .mo, .code .mo { color: #2e3436 } /* Literal.Number.Oct */
|
||||
.highlight .sa, .code .sa { color: #ad7fa8 } /* Literal.String.Affix */
|
||||
.highlight .sb, .code .sb { color: #ad7fa8 } /* Literal.String.Backtick */
|
||||
.highlight .sc, .code .sc { color: #ad7fa8; font-weight: bold } /* Literal.String.Char */
|
||||
.highlight .dl, .code .dl { color: #ad7fa8 } /* Literal.String.Delimiter */
|
||||
.highlight .sd, .code .sd { color: #ad7fa8 } /* Literal.String.Doc */
|
||||
.highlight .s2, .code .s2 { color: #ad7fa8 } /* Literal.String.Double */
|
||||
.highlight .se, .code .se { color: #ad7fa8; font-weight: bold } /* Literal.String.Escape */
|
||||
.highlight .sh, .code .sh { color: #ad7fa8; text-decoration: underline } /* Literal.String.Heredoc */
|
||||
.highlight .si, .code .si { color: #ce5c00 } /* Literal.String.Interpol */
|
||||
.highlight .sx, .code .sx { color: #ad7fa8 } /* Literal.String.Other */
|
||||
.highlight .sr, .code .sr { color: #ad7fa8 } /* Literal.String.Regex */
|
||||
.highlight .s1, .code .s1 { color: #ad7fa8 } /* Literal.String.Single */
|
||||
.highlight .ss, .code .ss { color: #8f5902 } /* Literal.String.Symbol */
|
||||
.highlight .bp, .code .bp { color: #5c35cc } /* Name.Builtin.Pseudo */
|
||||
.highlight .fm, .code .fm { color: #a40000 } /* Name.Function.Magic */
|
||||
.highlight .vc, .code .vc { color: #ce5c00 } /* Name.Variable.Class */
|
||||
.highlight .vg, .code .vg { color: #ce5c00; text-decoration: underline } /* Name.Variable.Global */
|
||||
.highlight .vi, .code .vi { color: #ce5c00 } /* Name.Variable.Instance */
|
||||
.highlight .vm, .code .vm { color: #ce5c00 } /* Name.Variable.Magic */
|
||||
.highlight .il, .code .il { color: #2e3436 } /* Literal.Number.Integer.Long */
|
|
@ -1,33 +0,0 @@
|
|||
function getTheme() {
|
||||
return localStorage.getItem("theme") || "system";
|
||||
}
|
||||
|
||||
function setTheme(themeName) {
|
||||
localStorage.setItem('theme', themeName);
|
||||
if (themeName == "system") {
|
||||
themeName = parent.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
|
||||
}
|
||||
// the navbar is in an iframe, so we need to set this variable in the parent document
|
||||
for (const win of [window, parent]) {
|
||||
win.document.documentElement.setAttribute('data-theme', themeName);
|
||||
}
|
||||
}
|
||||
|
||||
setTheme(getTheme())
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
document.querySelectorAll("#color-theme-switcher input").forEach((input) => {
|
||||
if (input.value == getTheme()) {
|
||||
input.checked = true;
|
||||
}
|
||||
input.addEventListener('change', e => setTheme(e.target.value));
|
||||
});
|
||||
|
||||
// also check to see if the user changes their theme settings while the page is loaded.
|
||||
parent.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', event => {
|
||||
setTheme(getTheme());
|
||||
})
|
||||
});
|
||||
|
||||
// un-hide the colorscheme picker
|
||||
document.querySelector("#settings").removeAttribute('hidden');
|
|
@ -1,287 +0,0 @@
|
|||
/**
|
||||
* This module is a wrapper that facilitates manipulating the declaration data.
|
||||
*
|
||||
* Please see {@link DeclarationDataCenter} for more information.
|
||||
*/
|
||||
|
||||
const CACHE_DB_NAME = "declaration-data";
|
||||
const CACHE_DB_VERSION = 1;
|
||||
const CACHE_DB_KEY = "DECLARATIONS_KEY";
|
||||
|
||||
/**
|
||||
* The DeclarationDataCenter is used for declaration searching.
|
||||
*
|
||||
* For usage, see the {@link init} and {@link search} methods.
|
||||
*/
|
||||
export class DeclarationDataCenter {
|
||||
/**
|
||||
* The declaration data. Users should not interact directly with this field.
|
||||
*
|
||||
* *NOTE:* This is not made private to support legacy browsers.
|
||||
*/
|
||||
declarationData = null;
|
||||
|
||||
/**
|
||||
* Used to implement the singleton, in case we need to fetch data mutiple times in the same page.
|
||||
*/
|
||||
static requestSingleton = null;
|
||||
|
||||
/**
|
||||
* Construct a DeclarationDataCenter with given data.
|
||||
*
|
||||
* Please use {@link DeclarationDataCenter.init} instead, which automates the data fetching process.
|
||||
* @param {*} declarationData
|
||||
*/
|
||||
constructor(declarationData) {
|
||||
this.declarationData = declarationData;
|
||||
}
|
||||
|
||||
/**
|
||||
* The actual constructor of DeclarationDataCenter
|
||||
* @returns {Promise<DeclarationDataCenter>}
|
||||
*/
|
||||
static async init() {
|
||||
if (DeclarationDataCenter.requestSingleton === null) {
|
||||
DeclarationDataCenter.requestSingleton = DeclarationDataCenter.getData();
|
||||
}
|
||||
return await DeclarationDataCenter.requestSingleton;
|
||||
}
|
||||
|
||||
static async getData() {
|
||||
const dataListUrl = new URL(
|
||||
`${SITE_ROOT}/declarations/declaration-data.bmp`,
|
||||
window.location
|
||||
);
|
||||
|
||||
// try to use cache first
|
||||
const data = await fetchCachedDeclarationData().catch(_e => null);
|
||||
if (data) {
|
||||
// if data is defined, use the cached one.
|
||||
return new DeclarationDataCenter(data);
|
||||
} else {
|
||||
// undefined. then fetch the data from the server.
|
||||
const dataListRes = await fetch(dataListUrl);
|
||||
const data = await dataListRes.json();
|
||||
// TODO https://github.com/leanprover/doc-gen4/issues/133
|
||||
// await cacheDeclarationData(data);
|
||||
return new DeclarationDataCenter(data);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for a declaration.
|
||||
* @returns {Array<any>}
|
||||
*/
|
||||
search(pattern, strict = true, allowedKinds=undefined, maxResults=undefined) {
|
||||
if (!pattern) {
|
||||
return [];
|
||||
}
|
||||
if (strict) {
|
||||
let decl = this.declarationData.declarations[pattern];
|
||||
return decl ? [decl] : [];
|
||||
} else {
|
||||
return getMatches(this.declarationData.declarations, pattern, allowedKinds, maxResults);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for all instances of a certain typeclass
|
||||
* @returns {Array<String>}
|
||||
*/
|
||||
instancesForClass(className) {
|
||||
const instances = this.declarationData.instances[className];
|
||||
if (!instances) {
|
||||
return [];
|
||||
} else {
|
||||
return instances;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for all instances that involve a certain type
|
||||
* @returns {Array<String>}
|
||||
*/
|
||||
instancesForType(typeName) {
|
||||
const instances = this.declarationData.instancesFor[typeName];
|
||||
if (!instances) {
|
||||
return [];
|
||||
} else {
|
||||
return instances;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analogous to Lean declNameToLink
|
||||
* @returns {String}
|
||||
*/
|
||||
declNameToLink(declName) {
|
||||
return this.declarationData.declarations[declName].docLink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all modules that imported the given one.
|
||||
* @returns {Array<String>}
|
||||
*/
|
||||
moduleImportedBy(moduleName) {
|
||||
return this.declarationData.importedBy[moduleName];
|
||||
}
|
||||
|
||||
/**
|
||||
* Analogous to Lean moduleNameToLink
|
||||
* @returns {String}
|
||||
*/
|
||||
moduleNameToLink(moduleName) {
|
||||
return this.declarationData.modules[moduleName];
|
||||
}
|
||||
}
|
||||
|
||||
function isSeparater(char) {
|
||||
return char === "." || char === "_";
|
||||
}
|
||||
|
||||
// HACK: the fuzzy matching is quite hacky
|
||||
|
||||
function matchCaseSensitive(declName, lowerDeclName, pattern) {
|
||||
let i = 0,
|
||||
j = 0,
|
||||
err = 0,
|
||||
lastMatch = 0;
|
||||
while (i < declName.length && j < pattern.length) {
|
||||
if (pattern[j] === declName[i] || pattern[j] === lowerDeclName[i]) {
|
||||
err += (isSeparater(pattern[j]) ? 0.125 : 1) * (i - lastMatch);
|
||||
if (pattern[j] !== declName[i]) err += 0.5;
|
||||
lastMatch = i + 1;
|
||||
j++;
|
||||
} else if (isSeparater(declName[i])) {
|
||||
err += 0.125 * (i + 1 - lastMatch);
|
||||
lastMatch = i + 1;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
err += 0.125 * (declName.length - lastMatch);
|
||||
if (j === pattern.length) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
function getMatches(declarations, pattern, allowedKinds = undefined, maxResults = undefined) {
|
||||
const lowerPats = pattern.toLowerCase().split(/\s/g);
|
||||
const patNoSpaces = pattern.replace(/\s/g, "");
|
||||
const results = [];
|
||||
for (const [_, {
|
||||
name,
|
||||
kind,
|
||||
doc,
|
||||
docLink,
|
||||
sourceLink,
|
||||
}] of Object.entries(declarations)) {
|
||||
// Apply "kind" filter
|
||||
if (allowedKinds !== undefined) {
|
||||
if (!allowedKinds.has(kind)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
const lowerName = name.toLowerCase();
|
||||
const lowerDoc = doc.toLowerCase();
|
||||
let err = matchCaseSensitive(name, lowerName, patNoSpaces);
|
||||
// match all words as substrings of docstring
|
||||
if (
|
||||
err >= 3 &&
|
||||
pattern.length > 3 &&
|
||||
lowerPats.every((l) => lowerDoc.indexOf(l) != -1)
|
||||
) {
|
||||
err = 3;
|
||||
}
|
||||
if (err !== undefined) {
|
||||
results.push({
|
||||
name,
|
||||
kind,
|
||||
doc,
|
||||
err,
|
||||
lowerName,
|
||||
lowerDoc,
|
||||
docLink,
|
||||
sourceLink,
|
||||
});
|
||||
}
|
||||
}
|
||||
return results.sort(({ err: a }, { err: b }) => a - b).slice(0, maxResults);
|
||||
}
|
||||
|
||||
// TODO: refactor the indexedDB part to be more robust
|
||||
|
||||
/**
|
||||
* Get the indexedDB database, automatically initialized.
|
||||
* @returns {Promise<IDBDatabase>}
|
||||
*/
|
||||
async function getDeclarationDatabase() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = indexedDB.open(CACHE_DB_NAME, CACHE_DB_VERSION);
|
||||
|
||||
request.onerror = function (event) {
|
||||
reject(
|
||||
new Error(
|
||||
`fail to open indexedDB ${CACHE_DB_NAME} of version ${CACHE_DB_VERSION}`
|
||||
)
|
||||
);
|
||||
};
|
||||
request.onupgradeneeded = function (event) {
|
||||
let db = event.target.result;
|
||||
// We only need to store one object, so no key path or increment is needed.
|
||||
db.createObjectStore("declaration");
|
||||
};
|
||||
request.onsuccess = function (event) {
|
||||
resolve(event.target.result);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Store data in indexedDB object store.
|
||||
* @param {Map<string, any>} data
|
||||
*/
|
||||
async function cacheDeclarationData(data) {
|
||||
let db = await getDeclarationDatabase();
|
||||
let store = db
|
||||
.transaction("declaration", "readwrite")
|
||||
.objectStore("declaration");
|
||||
return new Promise((resolve, reject) => {
|
||||
let clearRequest = store.clear();
|
||||
let addRequest = store.add(data, CACHE_DB_KEY);
|
||||
|
||||
addRequest.onsuccess = function (event) {
|
||||
resolve();
|
||||
};
|
||||
addRequest.onerror = function (event) {
|
||||
reject(new Error(`fail to store declaration data`));
|
||||
};
|
||||
clearRequest.onerror = function (event) {
|
||||
reject(new Error("fail to clear object store"));
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve data from indexedDB database.
|
||||
* @returns {Promise<Map<string, any>|undefined>}
|
||||
*/
|
||||
async function fetchCachedDeclarationData() {
|
||||
let db = await getDeclarationDatabase();
|
||||
let store = db
|
||||
.transaction("declaration", "readonly")
|
||||
.objectStore("declaration");
|
||||
return new Promise((resolve, reject) => {
|
||||
let transactionRequest = store.get(CACHE_DB_KEY);
|
||||
transactionRequest.onsuccess = function (event) {
|
||||
// TODO: This API is not thought 100% through. If we have a DB cached
|
||||
// already it will not even ask the remote for a new one so we end up
|
||||
// with outdated declaration-data. This has to have some form of cache
|
||||
// invalidation: https://github.com/leanprover/doc-gen4/issues/133
|
||||
// resolve(event.target.result);
|
||||
resolve(undefined);
|
||||
};
|
||||
transactionRequest.onerror = function (event) {
|
||||
reject(new Error(`fail to store declaration data`));
|
||||
};
|
||||
});
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
/**
|
||||
* This module is used for the `/find` endpoint.
|
||||
*
|
||||
* Two basic kinds of search syntax are supported:
|
||||
*
|
||||
* 1. Query-Fragment syntax: `/find?pattern=Nat.add#doc` for documentation and `/find?pattern=Nat.add#src` for source code.
|
||||
* 2. Fragment-Only syntax:`/find/#doc/Nat.add` for documentation and `/find/#src/Nat.add` for source code.
|
||||
*
|
||||
* Though both of them are valid, the first one is highly recommended, and the second one is for compatibility and taste.
|
||||
*
|
||||
* There are some extended usage for the QF syntax. For example, appending `strict=false` to the query part
|
||||
* (`/find?pattern=Nat.add&strict=false#doc`) will allow you to use the fuzzy search, rather than strict match.
|
||||
* Also, the fragment is extensible as well. For now only `#doc` and `#src` are implement, and the plain query without
|
||||
* hash (`/find?pattern=Nat.add`) is used for computer-friendly data (semantic web is great! :P), while all other fragments
|
||||
* fallback to the `#doc` view.
|
||||
*/
|
||||
|
||||
import { DeclarationDataCenter } from "../declaration-data.js";
|
||||
|
||||
function leanFriendlyRegExp(c) {
|
||||
try {
|
||||
return new RegExp("(?<!«[^»]*)" + c);
|
||||
} catch (e) {
|
||||
if (e instanceof SyntaxError) {
|
||||
// Lookbehind is not implemented yet in WebKit: https://bugs.webkit.org/show_bug.cgi?id=174931
|
||||
// Fall back to less friendly regex.
|
||||
return new RegExp(c);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* We don't use browser's default hash and searchParams in case Lean declaration name
|
||||
* can be like `«#»`, rather we manually handle the `window.location.href` with regex.
|
||||
*/
|
||||
const LEAN_FRIENDLY_URL_REGEX = /^[^?#]+(?:\?((?:[^«#»]|«.*»)*))?(?:#(.*))?$/;
|
||||
const LEAN_FRIENDLY_AND_SEPARATOR = leanFriendlyRegExp("&");
|
||||
const LEAN_FRIENDLY_EQUAL_SEPARATOR = leanFriendlyRegExp("=");
|
||||
const LEAN_FRIENDLY_SLASH_SEPARATOR = leanFriendlyRegExp("/");
|
||||
|
||||
const [_, query, fragment] = LEAN_FRIENDLY_URL_REGEX.exec(window.location.href);
|
||||
const queryParams = new Map(
|
||||
query
|
||||
?.split(LEAN_FRIENDLY_AND_SEPARATOR)
|
||||
?.map((p) => p.split(LEAN_FRIENDLY_EQUAL_SEPARATOR))
|
||||
?.filter((l) => l.length == 2 && l[0].length > 0)
|
||||
);
|
||||
const fragmentPaths = fragment?.split(LEAN_FRIENDLY_SLASH_SEPARATOR) ?? [];
|
||||
|
||||
const encodedPattern = queryParams.get("pattern") ?? fragmentPaths[1]; // if first fail then second, may be undefined
|
||||
const pattern = decodeURIComponent(encodedPattern);
|
||||
const strict = (queryParams.get("strict") ?? "true") === "true"; // default to true
|
||||
const view = fragmentPaths[0];
|
||||
|
||||
findAndRedirect(pattern, strict, view);
|
||||
|
||||
/**
|
||||
* Find the result and redirect to the result page.
|
||||
* @param {string} pattern the pattern to search for
|
||||
* @param {string} view the view of the find result (`"doc"` or `"src"` for now)
|
||||
*/
|
||||
async function findAndRedirect(pattern, strict, view) {
|
||||
// if no pattern provided, directly redirect to the 404 page
|
||||
if (!pattern) {
|
||||
window.location.replace(`${SITE_ROOT}404.html`);
|
||||
}
|
||||
// search for result
|
||||
try {
|
||||
const dataCenter = await DeclarationDataCenter.init();
|
||||
let result = (dataCenter.search(pattern, strict) ?? [])[0]; // in case return non array
|
||||
// if no result found, redirect to the 404 page
|
||||
if (!result) {
|
||||
// TODO: better url semantic for 404, current implementation will lead to duplicate search for fuzzy match if not found.
|
||||
window.location.replace(`${SITE_ROOT}404.html#${pattern ?? ""}`);
|
||||
} else {
|
||||
result.docLink = SITE_ROOT + result.docLink;
|
||||
// success, redirect to doc or source page, or to the semantic rdf.
|
||||
if (!view) {
|
||||
window.location.replace(result.link);
|
||||
} else if (view == "doc") {
|
||||
window.location.replace(result.docLink);
|
||||
} else if (view == "src") {
|
||||
window.location.replace(result.sourceLink);
|
||||
} else {
|
||||
// fallback to doc page
|
||||
window.location.replace(result.docLink);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
document.write(`Cannot fetch data, please check your network connection.\n${e}`);
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
/**
|
||||
* This module implements the `howabout` functionality in the 404 page.
|
||||
*/
|
||||
|
||||
import { DeclarationDataCenter } from "./declaration-data.js";
|
||||
|
||||
const HOW_ABOUT = document.querySelector("#howabout");
|
||||
|
||||
// Show url of the missing page
|
||||
if (HOW_ABOUT) {
|
||||
HOW_ABOUT.parentNode
|
||||
.insertBefore(document.createElement("pre"), HOW_ABOUT)
|
||||
.appendChild(document.createElement("code")).innerText =
|
||||
window.location.href.replace(/[/]/g, "/\u200b");
|
||||
|
||||
// TODO: add how about functionality for similar page as well.
|
||||
const pattern = window.location.hash.replace("#", "");
|
||||
|
||||
// try to search for similar declarations
|
||||
if (pattern) {
|
||||
HOW_ABOUT.innerText = "Please wait a second. I'll try to help you.";
|
||||
DeclarationDataCenter.init().then((dataCenter) => {
|
||||
let results = dataCenter.search(pattern, false);
|
||||
if (results.length > 0) {
|
||||
HOW_ABOUT.innerText = "How about one of these instead:";
|
||||
const ul = HOW_ABOUT.appendChild(document.createElement("ul"));
|
||||
for (const { name, docLink } of results) {
|
||||
const li = ul.appendChild(document.createElement("li"));
|
||||
const a = li.appendChild(document.createElement("a"));
|
||||
a.href = docLink;
|
||||
a.appendChild(document.createElement("code")).innerText = name;
|
||||
}
|
||||
} else {
|
||||
HOW_ABOUT.innerText =
|
||||
"Sorry, I cannot find any similar declarations. Check the link or use the module navigation to find what you want :P";
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
import { DeclarationDataCenter } from "./declaration-data.js";
|
||||
|
||||
fillImportedBy();
|
||||
|
||||
async function fillImportedBy() {
|
||||
if (!MODULE_NAME) {
|
||||
return;
|
||||
}
|
||||
const dataCenter = await DeclarationDataCenter.init();
|
||||
const moduleName = MODULE_NAME;
|
||||
const importedByList = document.querySelector(".imported-by-list");
|
||||
const importedBy = dataCenter.moduleImportedBy(moduleName);
|
||||
var innerHTML = "";
|
||||
for(var module of importedBy) {
|
||||
const moduleLink = dataCenter.moduleNameToLink(module);
|
||||
innerHTML += `<li><a href="${SITE_ROOT}${moduleLink}">${module}</a></li>`
|
||||
}
|
||||
importedByList.innerHTML = innerHTML;
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
import { DeclarationDataCenter } from "./declaration-data.js";
|
||||
|
||||
annotateInstances();
|
||||
annotateInstancesFor()
|
||||
|
||||
async function annotateInstances() {
|
||||
const dataCenter = await DeclarationDataCenter.init();
|
||||
const instanceForLists = [...(document.querySelectorAll(".instances-list"))];
|
||||
|
||||
for (const instanceForList of instanceForLists) {
|
||||
const className = instanceForList.id.slice("instances-list-".length);
|
||||
const instances = dataCenter.instancesForClass(className);
|
||||
var innerHTML = "";
|
||||
for(var instance of instances) {
|
||||
const instanceLink = dataCenter.declNameToLink(instance);
|
||||
innerHTML += `<li><a href="${SITE_ROOT}${instanceLink}">${instance}</a></li>`
|
||||
}
|
||||
instanceForList.innerHTML = innerHTML;
|
||||
}
|
||||
}
|
||||
|
||||
async function annotateInstancesFor() {
|
||||
const dataCenter = await DeclarationDataCenter.init();
|
||||
const instanceForLists = [...(document.querySelectorAll(".instances-for-list"))];
|
||||
|
||||
for (const instanceForList of instanceForLists) {
|
||||
const typeName = instanceForList.id.slice("instances-for-list-".length);
|
||||
const instances = dataCenter.instancesForType(typeName);
|
||||
var innerHTML = "";
|
||||
for(var instance of instances) {
|
||||
const instanceLink = dataCenter.declNameToLink(instance);
|
||||
innerHTML += `<li><a href="${SITE_ROOT}${instanceLink}">${instance}</a></li>`
|
||||
}
|
||||
instanceForList.innerHTML = innerHTML;
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* This file is for configing MathJax behavior.
|
||||
* See https://docs.mathjax.org/en/latest/web/configuration.html
|
||||
*
|
||||
* This configuration is copied from old doc-gen3
|
||||
* https://github.com/leanprover-community/doc-gen
|
||||
*/
|
||||
MathJax = {
|
||||
tex: {
|
||||
inlineMath: [["$", "$"]],
|
||||
displayMath: [["$$", "$$"]],
|
||||
},
|
||||
options: {
|
||||
skipHtmlTags: [
|
||||
"script",
|
||||
"noscript",
|
||||
"style",
|
||||
"textarea",
|
||||
"pre",
|
||||
"code",
|
||||
"annotation",
|
||||
"annotation-xml",
|
||||
"decl",
|
||||
"decl_meta",
|
||||
"attributes",
|
||||
"decl_args",
|
||||
"decl_header",
|
||||
"decl_name",
|
||||
"decl_type",
|
||||
"equation",
|
||||
"equations",
|
||||
"structure_field",
|
||||
"structure_fields",
|
||||
"constructor",
|
||||
"constructors",
|
||||
"instances",
|
||||
],
|
||||
ignoreHtmlClass: "tex2jax_ignore",
|
||||
processHtmlClass: "tex2jax_process",
|
||||
},
|
||||
};
|
|
@ -1,43 +0,0 @@
|
|||
/**
|
||||
* This module is used to implement persistent navbar expansion.
|
||||
*/
|
||||
|
||||
// The variable to store the expansion information.
|
||||
let expanded = {};
|
||||
|
||||
// Load expansion information from sessionStorage.
|
||||
for (const e of (sessionStorage.getItem("expanded") || "").split(",")) {
|
||||
if (e !== "") {
|
||||
expanded[e] = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save expansion information to sessionStorage.
|
||||
*/
|
||||
function saveExpanded() {
|
||||
sessionStorage.setItem(
|
||||
"expanded",
|
||||
Object.getOwnPropertyNames(expanded)
|
||||
.filter((e) => expanded[e])
|
||||
.join(",")
|
||||
);
|
||||
}
|
||||
|
||||
// save expansion information when user change the expansion.
|
||||
for (const elem of document.getElementsByClassName("nav_sect")) {
|
||||
const id = elem.getAttribute("data-path");
|
||||
if (!id) continue;
|
||||
if (expanded[id]) {
|
||||
elem.open = true;
|
||||
}
|
||||
elem.addEventListener("toggle", () => {
|
||||
expanded[id] = elem.open;
|
||||
saveExpanded();
|
||||
});
|
||||
}
|
||||
|
||||
// Scroll to center.
|
||||
for (const currentFileLink of document.getElementsByClassName("visible")) {
|
||||
currentFileLink.scrollIntoView({ block: "center" });
|
||||
}
|
158
static/search.js
158
static/search.js
|
@ -1,158 +0,0 @@
|
|||
/**
|
||||
* This module is used to handle user's interaction with the search form.
|
||||
*/
|
||||
|
||||
import { DeclarationDataCenter } from "./declaration-data.js";
|
||||
|
||||
// Search form and input in the upper right toolbar
|
||||
const SEARCH_FORM = document.querySelector("#search_form");
|
||||
const SEARCH_INPUT = SEARCH_FORM.querySelector("input[name=q]");
|
||||
|
||||
// Search form on the /search.html_page. These may be null.
|
||||
const SEARCH_PAGE_INPUT = document.querySelector("#search_page_query")
|
||||
const SEARCH_RESULTS = document.querySelector("#search_results")
|
||||
|
||||
// Max results to show for autocomplete or /search.html page.
|
||||
const AC_MAX_RESULTS = 30
|
||||
const SEARCH_PAGE_MAX_RESULTS = undefined
|
||||
|
||||
// Create an `div#autocomplete_results` to hold all autocomplete results.
|
||||
let ac_results = document.createElement("div");
|
||||
ac_results.id = "autocomplete_results";
|
||||
SEARCH_FORM.appendChild(ac_results);
|
||||
|
||||
/**
|
||||
* Attach `selected` class to the the selected autocomplete result.
|
||||
*/
|
||||
function handleSearchCursorUpDown(down) {
|
||||
const sel = ac_results.querySelector(`.selected`);
|
||||
if (sel) {
|
||||
sel.classList.remove("selected");
|
||||
const toSelect = down
|
||||
? sel.nextSibling
|
||||
: sel.previousSibling;
|
||||
toSelect && toSelect.classList.add("selected");
|
||||
} else {
|
||||
const toSelect = down ? ac_results.firstChild : ac_results.lastChild;
|
||||
toSelect && toSelect.classList.add("selected");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform search (when enter is pressed).
|
||||
*/
|
||||
function handleSearchEnter() {
|
||||
const sel = ac_results.querySelector(`.selected .result_link a`) || document.querySelector(`#search_button`);
|
||||
sel.click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow user to navigate autocomplete results with up/down arrow keys, and choose with enter.
|
||||
*/
|
||||
SEARCH_INPUT.addEventListener("keydown", (ev) => {
|
||||
switch (ev.key) {
|
||||
case "Down":
|
||||
case "ArrowDown":
|
||||
ev.preventDefault();
|
||||
handleSearchCursorUpDown(true);
|
||||
break;
|
||||
case "Up":
|
||||
case "ArrowUp":
|
||||
ev.preventDefault();
|
||||
handleSearchCursorUpDown(false);
|
||||
break;
|
||||
case "Enter":
|
||||
ev.preventDefault();
|
||||
handleSearchEnter();
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Remove all children of a DOM node.
|
||||
*/
|
||||
function removeAllChildren(node) {
|
||||
while (node.firstChild) {
|
||||
node.removeChild(node.lastChild);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle user input and perform search.
|
||||
*/
|
||||
function handleSearch(dataCenter, err, ev, sr, maxResults, autocomplete) {
|
||||
const text = ev.target.value;
|
||||
|
||||
// If no input clear all.
|
||||
if (!text) {
|
||||
sr.removeAttribute("state");
|
||||
removeAllChildren(sr);
|
||||
return;
|
||||
}
|
||||
|
||||
// searching
|
||||
sr.setAttribute("state", "loading");
|
||||
|
||||
if (dataCenter) {
|
||||
var allowedKinds;
|
||||
if (!autocomplete) {
|
||||
allowedKinds = new Set();
|
||||
document.querySelectorAll(".kind_checkbox").forEach((checkbox) =>
|
||||
{
|
||||
if (checkbox.checked) {
|
||||
allowedKinds.add(checkbox.value);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
const result = dataCenter.search(text, false, allowedKinds, maxResults);
|
||||
|
||||
// in case user has updated the input.
|
||||
if (ev.target.value != text) return;
|
||||
|
||||
// update autocomplete results
|
||||
removeAllChildren(sr);
|
||||
for (const { name, kind, doc, docLink } of result) {
|
||||
const row = sr.appendChild(document.createElement("div"));
|
||||
row.classList.add("search_result")
|
||||
const linkdiv = row.appendChild(document.createElement("div"))
|
||||
linkdiv.classList.add("result_link")
|
||||
const link = linkdiv.appendChild(document.createElement("a"));
|
||||
link.innerText = name;
|
||||
link.title = name;
|
||||
link.href = SITE_ROOT + docLink;
|
||||
if (!autocomplete) {
|
||||
const doctext = row.appendChild(document.createElement("div"));
|
||||
doctext.innerText = doc
|
||||
doctext.classList.add("result_doc")
|
||||
}
|
||||
}
|
||||
}
|
||||
// handle error
|
||||
else {
|
||||
removeAllChildren(sr);
|
||||
const d = sr.appendChild(document.createElement("a"));
|
||||
d.innerText = `Cannot fetch data, please check your network connection.\n${err}`;
|
||||
}
|
||||
sr.setAttribute("state", "done");
|
||||
}
|
||||
|
||||
DeclarationDataCenter.init()
|
||||
.then((dataCenter) => {
|
||||
// Search autocompletion.
|
||||
SEARCH_INPUT.addEventListener("input", ev => handleSearch(dataCenter, null, ev, ac_results, AC_MAX_RESULTS, true));
|
||||
if(SEARCH_PAGE_INPUT) {
|
||||
SEARCH_PAGE_INPUT.addEventListener("input", ev => handleSearch(dataCenter, null, ev, SEARCH_RESULTS, SEARCH_PAGE_MAX_RESULTS, false))
|
||||
document.querySelectorAll(".kind_checkbox").forEach((checkbox) =>
|
||||
checkbox.addEventListener("input", ev => SEARCH_PAGE_INPUT.dispatchEvent(new Event("input")))
|
||||
);
|
||||
SEARCH_PAGE_INPUT.dispatchEvent(new Event("input"))
|
||||
};
|
||||
SEARCH_INPUT.dispatchEvent(new Event("input"))
|
||||
})
|
||||
.catch(e => {
|
||||
SEARCH_INPUT.addEventListener("input", ev => handleSearch(null, e, ev, ac_results, AC_MAX_RESULTS,true ));
|
||||
if(SEARCH_PAGE_INPUT) {
|
||||
SEARCH_PAGE_INPUT.addEventListener("input", ev => handleSearch(null, e, ev, SEARCH_RESULTS, SEARCH_PAGE_MAX_RESULTS, false));
|
||||
}
|
||||
});
|
|
@ -1,4 +0,0 @@
|
|||
/**
|
||||
* Get the site root, {siteRoot} is to be replaced by doc-gen4.
|
||||
*/
|
||||
export const SITE_ROOT = "{siteRoot}";
|
815
static/style.css
815
static/style.css
|
@ -1,815 +0,0 @@
|
|||
@import url('https://fonts.googleapis.com/css2?family=Merriweather&family=Open+Sans&family=Source+Code+Pro&family=Source+Code+Pro:wght@600&display=swap');
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: 'Open Sans', sans-serif;
|
||||
color: var(--text-color);
|
||||
background: var(--body-bg);
|
||||
}
|
||||
|
||||
a {
|
||||
color: var(--link-color);
|
||||
}
|
||||
|
||||
a.pdf {
|
||||
color: var(--link-pdf-color);
|
||||
}
|
||||
|
||||
h1, h2, h3, h4, h5, h6 {
|
||||
font-family: 'Merriweather', serif;
|
||||
}
|
||||
|
||||
body { line-height: 1.5; }
|
||||
nav { line-height: normal; }
|
||||
|
||||
:root {
|
||||
--header-height: 3em;
|
||||
--fragment-offset: calc(var(--header-height) + 1em);
|
||||
--content-width: 55vw;
|
||||
|
||||
--header-bg: #f8f8f8;
|
||||
--body-bg: white;
|
||||
--code-bg: #f3f3f3;
|
||||
--text-color: black;
|
||||
--link-color: hsl(210, 100%, 30%);
|
||||
--link-pdf-color: hsl(272, 61%, 34%);
|
||||
|
||||
--implicit-arg-text-color: var(--text-color);
|
||||
|
||||
--def-color: #92dce5;
|
||||
--def-color-hsl-angle: 187;
|
||||
--theorem-color: #8fe388;
|
||||
--theorem-color-hsl-angle: 115;
|
||||
--axiom-and-constant-color: #f44708;
|
||||
--axiom-and-constant-color-hsl-angle: 16;
|
||||
--structure-and-inductive-color: #f0a202;
|
||||
--structure-and-inductive-color-hsl-angle: 40;
|
||||
--starting-percentage: 100;
|
||||
|
||||
--hamburger-bg-color: #eee;
|
||||
--hamburger-active-color: white;
|
||||
--hamburger-border-color: #ccc;
|
||||
|
||||
--tags-border-color: #555;
|
||||
|
||||
--fragment-offset: calc(var(--header-height) + 1em);
|
||||
--content-width: 55vw;
|
||||
}
|
||||
/* automatic dark theme if no javascript */
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
--header-bg: #111010;
|
||||
--body-bg: #171717;
|
||||
--code-bg: #363333;
|
||||
--text-color: #eee;
|
||||
--link-color: #58a6ff;
|
||||
--link-pdf-color: #9d58fd;
|
||||
|
||||
--implicit-arg-text-color: var(--text-color);
|
||||
|
||||
--def-color: #1F497F;
|
||||
--def-color-hsl-angle: 214;
|
||||
--theorem-color: #134E2D;
|
||||
--theorem-color-hsl-angle: 146;
|
||||
--axiom-and-constant-color: #6B1B1A;
|
||||
--axiom-and-constant-color-hsl-angle: 1;
|
||||
--structure-and-inductive-color: #73461C;
|
||||
--structure-and-inductive-color-hsl-angle: 29;
|
||||
--starting-percentage: 30;
|
||||
|
||||
--hamburger-bg-color: #2d2c2c;
|
||||
--hamburger-active-color: black;
|
||||
--hamburger-border-color: #717171;
|
||||
|
||||
--tags-border-color: #AAA;
|
||||
}
|
||||
}
|
||||
|
||||
[data-theme="light"] {
|
||||
color-scheme: light;
|
||||
|
||||
--header-height: 3em;
|
||||
--fragment-offset: calc(var(--header-height) + 1em);
|
||||
--content-width: 55vw;
|
||||
|
||||
--header-bg: #f8f8f8;
|
||||
--body-bg: white;
|
||||
--code-bg: #f3f3f3;
|
||||
--text-color: black;
|
||||
--link-color: hsl(210, 100%, 30%);
|
||||
--link-pdf-color: hsl(272, 61%, 34%);
|
||||
|
||||
--implicit-arg-text-color: var(--text-color);
|
||||
|
||||
--def-color: #92dce5;
|
||||
--def-color-hsl-angle: 187;
|
||||
--theorem-color: #8fe388;
|
||||
--theorem-color-hsl-angle: 115;
|
||||
--axiom-and-constant-color: #f44708;
|
||||
--axiom-and-constant-color-hsl-angle: 16;
|
||||
--structure-and-inductive-color: #f0a202;
|
||||
--structure-and-inductive-color-hsl-angle: 40;
|
||||
--starting-percentage: 100;
|
||||
|
||||
--hamburger-bg-color: #eee;
|
||||
--hamburger-active-color: white;
|
||||
--hamburger-border-color: #ccc;
|
||||
|
||||
--tags-border-color: #555;
|
||||
|
||||
--fragment-offset: calc(var(--header-height) + 1em);
|
||||
--content-width: 55vw;
|
||||
}
|
||||
|
||||
[data-theme="dark"] {
|
||||
color-scheme: dark;
|
||||
|
||||
--header-bg: #111010;
|
||||
--body-bg: #171717;
|
||||
--code-bg: #363333;
|
||||
--text-color: #eee;
|
||||
--link-color: #58a6ff;
|
||||
--link-pdf-color: #9d58fd;
|
||||
|
||||
--implicit-arg-text-color: var(--text-color);
|
||||
|
||||
--def-color: #1F497F;
|
||||
--def-color-hsl-angle: 214;
|
||||
--theorem-color: #134E2D;
|
||||
--theorem-color-hsl-angle: 146;
|
||||
--axiom-and-constant-color: #6B1B1A;
|
||||
--axiom-and-constant-color-hsl-angle: 1;
|
||||
--structure-and-inductive-color: #73461C;
|
||||
--structure-and-inductive-color-hsl-angle: 29;
|
||||
--starting-percentage: 30;
|
||||
|
||||
--hamburger-bg-color: #2d2c2c;
|
||||
--hamburger-active-color: black;
|
||||
--hamburger-border-color: #717171;
|
||||
|
||||
--tags-border-color: #AAA;
|
||||
}
|
||||
|
||||
@supports (width: min(10px, 5vw)) {
|
||||
:root {
|
||||
--content-width: clamp(20em, 55vw, 60em);
|
||||
}
|
||||
}
|
||||
|
||||
#nav_toggle {
|
||||
display: none;
|
||||
}
|
||||
label[for="nav_toggle"] {
|
||||
display: none;
|
||||
}
|
||||
|
||||
header {
|
||||
height: var(--header-height);
|
||||
float: left;
|
||||
position: fixed;
|
||||
width: 100vw;
|
||||
max-width: 100%;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
--header-side-padding: 2em;
|
||||
padding: 0 var(--header-side-padding);
|
||||
background: var(--header-bg);
|
||||
z-index: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
@supports (width: min(10px, 5vw)) {
|
||||
header {
|
||||
--header-side-padding: calc(max(2em, (100vw - var(--content-width) - 30em) / 2));
|
||||
}
|
||||
}
|
||||
@media screen and (max-width: 1000px) {
|
||||
:root {
|
||||
--content-width: 100vw;
|
||||
}
|
||||
|
||||
.internal_nav {
|
||||
display: none;
|
||||
}
|
||||
|
||||
body .nav {
|
||||
width: 100vw;
|
||||
max-width: 100vw;
|
||||
margin-left: 1em;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
body main {
|
||||
width: unset;
|
||||
max-width: unset;
|
||||
margin-left: unset;
|
||||
margin-right: unset;
|
||||
}
|
||||
body .decl > div {
|
||||
overflow-x: unset;
|
||||
}
|
||||
|
||||
#nav_toggle:not(:checked) ~ .nav {
|
||||
display: none;
|
||||
}
|
||||
#nav_toggle:checked ~ main {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
label[for="nav_toggle"]::before {
|
||||
content: '≡';
|
||||
}
|
||||
label[for="nav_toggle"] {
|
||||
display: inline-block;
|
||||
margin-right: 1em;
|
||||
border: 1px solid var(--hamburger-border-color);
|
||||
padding: 0.5ex 1ex;
|
||||
cursor: pointer;
|
||||
background: var(--hamburger-bg-color);
|
||||
}
|
||||
#nav_toggle:checked ~ * label[for="nav_toggle"] {
|
||||
background: var(--hamburger-active-color);
|
||||
}
|
||||
|
||||
body header h1 {
|
||||
font-size: 100%;
|
||||
}
|
||||
|
||||
header {
|
||||
--header-side-padding: 1ex;
|
||||
}
|
||||
}
|
||||
@media screen and (max-width: 700px) {
|
||||
header h1 span { display: none; }
|
||||
:root { --header-side-padding: 1ex; }
|
||||
#search_form button { display: none; }
|
||||
#search_form input { width: 100%; }
|
||||
header #autocomplete_results {
|
||||
left: 1ex;
|
||||
right: 1ex;
|
||||
width: inherit;
|
||||
}
|
||||
body header > * { margin: 0; }
|
||||
}
|
||||
|
||||
header > * {
|
||||
display: inline-block;
|
||||
padding: 0;
|
||||
margin: 0 1em;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
header h1 {
|
||||
font-weight: normal;
|
||||
font-size: 160%;
|
||||
}
|
||||
|
||||
header header_filename {
|
||||
font-size: 150%;
|
||||
}
|
||||
@media (max-width: 80em) {
|
||||
.header .header_filename {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* inserted by nav.js */
|
||||
#autocomplete_results {
|
||||
position: absolute;
|
||||
top: var(--header-height);
|
||||
right: calc(var(--header-side-padding));
|
||||
width: calc(20em + 4em);
|
||||
z-index: 1;
|
||||
background: var(--header-bg);
|
||||
border: 1px solid #aaa;
|
||||
border-top: none;
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
max-height: calc(100vh - var(--header-height));
|
||||
}
|
||||
|
||||
#autocomplete_results:empty {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#autocomplete_results[state="loading"]:empty {
|
||||
display: block;
|
||||
cursor: progress;
|
||||
}
|
||||
#autocomplete_results[state="loading"]:empty::before {
|
||||
display: block;
|
||||
content: ' 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 🐙 ';
|
||||
padding: 1ex;
|
||||
animation: marquee 10s linear infinite;
|
||||
}
|
||||
@keyframes marquee {
|
||||
0% { transform: translate(100%, 0); }
|
||||
100% { transform: translate(-100%, 0); }
|
||||
}
|
||||
|
||||
#autocomplete_results[state="done"]:empty {
|
||||
display: block;
|
||||
text-align: center;
|
||||
padding: 1ex;
|
||||
}
|
||||
#autocomplete_results[state="done"]:empty::before {
|
||||
content: '(no results)';
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
#autocomplete_results a {
|
||||
display: block;
|
||||
color: inherit;
|
||||
padding: 1ex;
|
||||
border-left: 0.5ex solid transparent;
|
||||
padding-left: 0.5ex;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#autocomplete_results .selected .result_link a {
|
||||
background: var(--body-bg);
|
||||
border-color: var(--structure-and-inductive-color);
|
||||
}
|
||||
|
||||
|
||||
#search_results {
|
||||
display: table;
|
||||
width: 100%;
|
||||
}
|
||||
#search_results[state="done"]:empty::before {
|
||||
content: '(no results)';
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
#search_results .result_link, #search_results .result_doc {
|
||||
border-bottom: 1px solid rgba(0, 0, 0, 0.8);
|
||||
}
|
||||
|
||||
.search_result {
|
||||
display: table-row;
|
||||
}
|
||||
|
||||
.result_link, .result_doc {
|
||||
display: table-cell;
|
||||
overflow: hidden;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
main, nav {
|
||||
margin-top: calc(var(--header-height) + 1em);
|
||||
}
|
||||
|
||||
/* extra space for scrolling things to the top */
|
||||
main {
|
||||
margin-bottom: 90vh;
|
||||
}
|
||||
|
||||
main {
|
||||
max-width: var(--content-width);
|
||||
/* center it: */
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
nav {
|
||||
float: left;
|
||||
height: calc(100vh - var(--header-height) - 1em);
|
||||
position: fixed;
|
||||
top: 0;
|
||||
overflow: auto;
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: transparent transparent;
|
||||
}
|
||||
|
||||
nav:hover {
|
||||
scrollbar-color: gray transparent;
|
||||
}
|
||||
|
||||
nav {
|
||||
--column-available-space: calc((100vw - var(--content-width) - 5em)/2);
|
||||
--column-width: calc(var(--column-available-space) - 1ex);
|
||||
--dist-to-edge: 1ex;
|
||||
width: var(--content-width);
|
||||
max-width: var(--column-width);
|
||||
}
|
||||
@supports (width: min(10px, 5vw)) {
|
||||
.nav { --desired-column-width: 20em; }
|
||||
.internal_nav { --desired-column-width: 30em; }
|
||||
nav {
|
||||
--column-available-space: calc(max(0px, (100vw - var(--content-width) - 5em)/2));
|
||||
--column-width: calc(clamp(0px, var(--column-available-space) - 1ex, var(--desired-column-width)));
|
||||
--dist-to-edge: calc(max(1ex, var(--column-available-space) - var(--column-width)));
|
||||
}
|
||||
}
|
||||
|
||||
.nav { left: var(--dist-to-edge); }
|
||||
.internal_nav { right: var(--dist-to-edge); }
|
||||
|
||||
.internal_nav .nav_link, .taclink {
|
||||
/* indent everything but first line by 2ex */
|
||||
text-indent: -2ex; padding-left: 2ex;
|
||||
}
|
||||
|
||||
.navframe {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.navframe .nav {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
.internal_nav .imports {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.tagfilter-div {
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
.tagfilter-div > summary {
|
||||
margin-bottom: 1ex;
|
||||
}
|
||||
|
||||
/* top-level modules in left navbar */
|
||||
.nav .module_list > details {
|
||||
margin-top: 1ex;
|
||||
}
|
||||
|
||||
.nav details > * {
|
||||
padding-left: 2ex;
|
||||
}
|
||||
|
||||
.nav summary {
|
||||
cursor: pointer;
|
||||
padding-left: 0;
|
||||
}
|
||||
|
||||
.nav summary::marker {
|
||||
font-size: 85%;
|
||||
}
|
||||
|
||||
.nav .nav_file {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.nav h3 {
|
||||
margin-block-end: 4px;
|
||||
}
|
||||
|
||||
/* People use way too long declaration names. */
|
||||
.internal_nav, .decl_name {
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
/* Add a linebreak after a declaration name. */
|
||||
.decl_name::after {
|
||||
content: "\A";
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.nav_link {
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
.navframe {
|
||||
--header-height: 0;
|
||||
}
|
||||
|
||||
#settings {
|
||||
margin-top: 5em;
|
||||
}
|
||||
#settings h3 {
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
#color-theme-switcher {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
padding: 0 2ex;
|
||||
flex-flow: row wrap;
|
||||
}
|
||||
|
||||
/* custom radio buttons for dark/light switch */
|
||||
#color-theme-switcher input {
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
display: inline-block;
|
||||
box-sizing: content-box;
|
||||
height: 1em;
|
||||
width: 1em;
|
||||
background-clip: content-box;
|
||||
padding: 2px;
|
||||
border: 2px solid transparent;
|
||||
margin-bottom: -4px;
|
||||
border-radius: 50%;
|
||||
}
|
||||
#color-theme-dark { background-color: #444; }
|
||||
#color-theme-light { background-color: #ccc; }
|
||||
#color-theme-system {
|
||||
background-image: linear-gradient(60deg, #444, #444 50%, #CCC 50%, #CCC);
|
||||
}
|
||||
#color-theme-switcher input:checked {
|
||||
border-color: var(--text-color);
|
||||
}
|
||||
|
||||
.decl > div, .mod_doc {
|
||||
padding-left: 8px;
|
||||
padding-right: 8px;
|
||||
}
|
||||
|
||||
.decl {
|
||||
margin-top: 20px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.decl > div {
|
||||
/* sometimes declarations arguments are way too long
|
||||
and would continue into the right column,
|
||||
so put a scroll bar there: */
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
/* Make `#id` links appear below header. */
|
||||
.decl, h1[id], h2[id], h3[id], h4[id], h5[id], h6[id] {
|
||||
scroll-margin-top: var(--fragment-offset);
|
||||
}
|
||||
/* don't need as much vertical space for these
|
||||
inline elements */
|
||||
a[id], li[id] {
|
||||
scroll-margin-top: var(--header-height);
|
||||
}
|
||||
|
||||
/* HACK: Safari doesn't support scroll-margin-top for
|
||||
fragment links (yet?)
|
||||
https://caniuse.com/mdn-css_properties_scroll-margin-top
|
||||
https://bugs.webkit.org/show_bug.cgi?id=189265
|
||||
*/
|
||||
@supports not (scroll-margin-top: var(--fragment-offset)) {
|
||||
.decl::before, h1[id]::before, h2[id]::before, h3[id]::before,
|
||||
h4[id]::before, h5[id]::before, h6[id]::before,
|
||||
a[id]::before, li[id]::before {
|
||||
content: "";
|
||||
display: block;
|
||||
height: var(--fragment-offset);
|
||||
margin-top: calc(-1 * var(--fragment-offset));
|
||||
box-sizing: inherit;
|
||||
visibility: hidden;
|
||||
width: 1px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* hide # after markdown headings except on hover */
|
||||
.markdown-heading:not(:hover) > .hover-link {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
main h2, main h3, main h4, main h5, main h6 {
|
||||
margin-top: 2rem;
|
||||
}
|
||||
.decl + .mod_doc > h2,
|
||||
.decl + .mod_doc > h3,
|
||||
.decl + .mod_doc > h4,
|
||||
.decl + .mod_doc > h5,
|
||||
.decl + .mod_doc > h6 {
|
||||
margin-top: 4rem;
|
||||
}
|
||||
|
||||
.def, .instance {
|
||||
border-left: 10px solid var(--text-color);
|
||||
border-top: 2px solid var(--text-color);
|
||||
}
|
||||
|
||||
.theorem {
|
||||
border-left: 10px solid var(--theorem-color);
|
||||
border-top: 2px solid var(--theorem-color);
|
||||
}
|
||||
|
||||
.axiom, .opaque {
|
||||
border-left: 10px solid var(--axiom-and-constant-color);
|
||||
border-top: 2px solid var(--axiom-and-constant-color);
|
||||
}
|
||||
|
||||
.structure, .inductive, .class {
|
||||
border-left: 10px solid var(--structure-and-inductive-color);
|
||||
border-top: 2px solid var(--structure-and-inductive-color);
|
||||
}
|
||||
|
||||
.fn {
|
||||
display: inline-block;
|
||||
/* border: 1px dashed red; */
|
||||
text-indent: -1ex;
|
||||
padding-left: 1ex;
|
||||
white-space: pre-wrap;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.fn { --fn: 1; }
|
||||
.fn .fn { --fn: 2; }
|
||||
.fn .fn .fn { --fn: 3; }
|
||||
.fn .fn .fn .fn { --fn: 4; }
|
||||
.fn .fn .fn .fn .fn { --fn: 5; }
|
||||
.fn .fn .fn .fn .fn .fn { --fn: 6; }
|
||||
.fn .fn .fn .fn .fn .fn .fn { --fn: 7; }
|
||||
.fn .fn .fn .fn .fn .fn .fn .fn { --fn: 8; }
|
||||
.fn {
|
||||
transition: background-color 100ms ease-in-out;
|
||||
}
|
||||
|
||||
.def .fn:hover, .instance .fn:hover {
|
||||
background-color: hsla(var(--def-color-hsl-angle), 61%, calc(var(--starting-percentage) - 5%*var(--fn)));
|
||||
box-shadow: 0 0 0 1px hsla(var(--def-color-hsl-angle), 61%, calc(var(--starting-percentage) - 5%*(var(--fn) + 1)));
|
||||
border-radius: 5px;
|
||||
}
|
||||
.theorem .fn:hover {
|
||||
background-color: hsla(var(--theorem-color-hsl-angle), 62%, calc(var(--starting-percentage) - 5%*var(--fn)));
|
||||
box-shadow: 0 0 0 1px hsla(var(--theorem-color-hsl-angle), 62%, calc(var(--starting-percentage) - 5%*(var(--fn) + 1)));
|
||||
border-radius: 5px;
|
||||
}
|
||||
.axiom .fn:hover, .opaque .fn:hover {
|
||||
background-color: hsla(var(--axiom-and-constant-color-hsl-angle), 94%, calc(var(--starting-percentage) - 5%*var(--fn)));
|
||||
box-shadow: 0 0 0 1px hsla(var(--axiom-and-constant-color-hsl-angle), 94%, calc(var(--starting-percentage) - 5%*(var(--fn) + 1)));
|
||||
border-radius: 5px;
|
||||
}
|
||||
.structure .fn:hover, .inductive .fn:hover, .class .fn:hover {
|
||||
background-color: hsla(var(--structure-and-inductive-color-hsl-angle), 98%, calc(var(--starting-percentage) - 5%*var(--fn)));
|
||||
box-shadow: 0 0 0 1px hsla(var(--structure-and-inductive-color-hsl-angle), 98%, calc(var(--starting-percentage) - 5%*(var(--fn) + 1)));
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
.decl_args, .decl_type .decl_parent {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.implicits, .impl_arg {
|
||||
color: var(--text-color);
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
.decl_kind, .decl_name, .decl_extends {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* break long declaration names at periods where possible */
|
||||
.break_within {
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.break_within .name {
|
||||
word-break: normal;
|
||||
}
|
||||
|
||||
.decl_header {
|
||||
/* indent everything but first line twice as much as decl_type */
|
||||
text-indent: -8ex; padding-left: 8ex;
|
||||
}
|
||||
|
||||
.decl_type {
|
||||
margin-top: 2px;
|
||||
margin-left: 4ex; /* extra indentation */
|
||||
}
|
||||
|
||||
.imports li, code, .decl_header, .attributes, .structure_field_info,
|
||||
.constructor, .instances li, .equation, .result_link, .structure_ext_ctor {
|
||||
font-family: 'Source Code Pro', monospace;
|
||||
}
|
||||
|
||||
pre {
|
||||
white-space: break-spaces;
|
||||
}
|
||||
|
||||
code, pre { background: var(--code-bg); }
|
||||
code, pre { border-radius: 5px; }
|
||||
code { padding: 1px 3px; }
|
||||
pre { padding: 1ex; }
|
||||
pre code { padding: 0 0; }
|
||||
|
||||
#howabout code { background: inherit; }
|
||||
#howabout li { margin-bottom: 0.5ex; }
|
||||
|
||||
.structure_fields, .constructors {
|
||||
display: block;
|
||||
padding-inline-start: 0;
|
||||
margin-top: 1ex;
|
||||
text-indent: -2ex; padding-left: 2ex;
|
||||
}
|
||||
|
||||
.structure_field {
|
||||
display: block;
|
||||
margin-left: 2ex;
|
||||
}
|
||||
|
||||
.inductive_ctor_doc {
|
||||
text-indent: 2ex;
|
||||
font-family: 'Open Sans', sans-serif;
|
||||
}
|
||||
|
||||
.structure_field_doc {
|
||||
text-indent: 0;
|
||||
font-family: 'Open Sans', sans-serif;
|
||||
}
|
||||
|
||||
.structure_ext_fields {
|
||||
display: block;
|
||||
padding-inline-start: 0;
|
||||
margin-top: 1ex;
|
||||
text-indent: -2ex; padding-left: 2ex;
|
||||
}
|
||||
|
||||
.structure_ext_fields .structure_field {
|
||||
margin-left: -1ex !important;
|
||||
}
|
||||
|
||||
.structure_ext_ctor {
|
||||
display: block;
|
||||
text-indent: -3ex;
|
||||
}
|
||||
|
||||
.constructor {
|
||||
display: block;
|
||||
}
|
||||
.constructor:before {
|
||||
content: '| ';
|
||||
color: gray;
|
||||
}
|
||||
|
||||
/** Don't show underline on types, to prevent the ≤ vs < confusion. **/
|
||||
a:link, a:visited, a:active {
|
||||
color:var(--link-color);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a.pdf:link, a.pdf:visited, a.pdf:active {
|
||||
color:var(--link-pdf-color);
|
||||
}
|
||||
|
||||
/** Show it on hover though. **/
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.impl_arg {
|
||||
font-style: italic;
|
||||
transition: opacity 300ms ease-in;
|
||||
}
|
||||
|
||||
.decl_header:not(:hover) .impl_arg {
|
||||
opacity: 30%;
|
||||
transition: opacity 1000ms ease-out;
|
||||
}
|
||||
|
||||
.gh_link {
|
||||
float: right;
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
.ink_link {
|
||||
float: right;
|
||||
margin-left: 20px;
|
||||
}
|
||||
|
||||
|
||||
.docfile h2, .note h2 {
|
||||
margin-block-start: 3px;
|
||||
margin-block-end: 0px;
|
||||
}
|
||||
|
||||
.docfile h2 a {
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.tags {
|
||||
margin-bottom: 1ex;
|
||||
}
|
||||
|
||||
.tags ul {
|
||||
display: inline;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.tags li {
|
||||
border: 1px solid var(--tags-border-color);
|
||||
border-radius: 4px;
|
||||
list-style-type: none;
|
||||
padding: 1px 3px;
|
||||
margin-left: 1ex;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
/* used by nav.js */
|
||||
.hide { display: none; }
|
||||
|
||||
.tactic, .note {
|
||||
border-top: 3px solid #0479c7;
|
||||
padding-top: 2em;
|
||||
margin-top: 2em;
|
||||
margin-bottom: 2em;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue