feat: implement facet

main
Henrik Böving 2022-08-11 21:37:10 +02:00
parent 6534a71cca
commit cdfd8ff49c
4 changed files with 129 additions and 78 deletions

View File

@ -19,20 +19,13 @@ Sets up a lake workspace for the current project. Furthermore initialize
the Lean search path with the path to the proper compiler from lean-toolchain
as well as all the dependencies.
-/
def lakeSetup (imports : List String) : IO (Except UInt32 Lake.Workspace) := do
def lakeSetup : IO (Except UInt32 Lake.Workspace) := do
let (leanInstall?, lakeInstall?) ← Lake.findInstall?
match ←(EIO.toIO' <| Lake.mkLoadConfig {leanInstall?, lakeInstall?}) with
| .ok config =>
let ws : Lake.Workspace ← Lake.loadWorkspace config
|>.run Lake.MonadLog.eio
|>.toIO (λ _ => IO.userError "Failed to load Lake workspace")
let libraryLeanGitHash := ws.lakeEnv.lean.githash
if libraryLeanGitHash ≠ Lean.githash then
IO.println s!"WARNING: This doc-gen was built with Lean: {Lean.githash} but the project is running on: {libraryLeanGitHash}"
let _libs ← ws.runBuild (Lake.buildImportsAndDeps imports) false
|>.run (Lake.MonadLog.eio config.verbosity)
|>.toIO (λ _ => IO.userError "Failed to compile imports via Lake")
initSearchPath (←findSysroot) (ws.packageList.map (·.oleanDir))
pure <| Except.ok ws
| .error err =>
throw <| IO.userError err.toString
@ -62,4 +55,7 @@ def load (task : Process.AnalyzeTask) : IO (Process.AnalyzerResult × Hierarchy)
Prod.fst <$> Meta.MetaM.toIO (Process.process task) config { env := env } {} {}
def loadCore : IO (Process.AnalyzerResult × Hierarchy) := do
load <| .loadAll [`Init, `Std, `Lean]
end DocGen4

View File

@ -11,12 +11,11 @@ def getTopLevelModules (p : Parsed) : IO (List String) := do
pure topLevelModules
def runSingleCmd (p : Parsed) : IO UInt32 := do
let relevantModules := [p.positionalArg! "module" |>.as! String]
let res ← lakeSetup (relevantModules)
let relevantModules := [p.positionalArg! "module" |>.as! String |> String.toName]
let res ← lakeSetup
match res with
| Except.ok ws =>
let relevantModules := relevantModules.map String.toName
let (doc, hierarchy) ← load (.loadAllLimitAnalysis relevantModules)
let (doc, hierarchy) ← load <| .loadAllLimitAnalysis relevantModules
IO.println "Outputting HTML"
let baseConfig := getSimpleBaseContext hierarchy
htmlOutputResults baseConfig doc ws (p.hasFlag "ink")
@ -29,22 +28,22 @@ def runIndexCmd (_p : Parsed) : IO UInt32 := do
htmlOutputIndex baseConfig
pure 0
def runDocGenCmd (p : Parsed) : IO UInt32 := do
let modules : List String := p.variableArgsAs! String |>.toList
if modules.length == 0 then
throw <| IO.userError "No modules provided."
let res ← lakeSetup modules
def runGenCoreCmd (_p : Parsed) : IO UInt32 := do
let res ← lakeSetup
match res with
| Except.ok ws =>
IO.println s!"Loading modules from: {←searchPathRef.get}"
let modules := modules.map String.toName
let (doc, hierarchy) ← load (.loadAll modules)
let (doc, hierarchy) ← loadCore
IO.println "Outputting HTML"
htmlOutput doc hierarchy ws (p.hasFlag "ink")
let baseConfig := getSimpleBaseContext hierarchy
htmlOutputResults baseConfig doc ws (ink := False)
pure 0
| Except.error rc => pure rc
def runDocGenCmd (_p : Parsed) : IO UInt32 := do
IO.println "You most likely want to use me via Lake now, check my README on Github on how to:"
IO.println "https://github.com/leanprover/doc-gen4"
return 0
def singleCmd := `[Cli|
single VIA runSingleCmd;
"Only generate the documentation for the module it was given, might contain broken links unless all documentation is generated."
@ -63,19 +62,19 @@ def indexCmd := `[Cli|
...topLevelModule : String; "The top level modules this documentation will be for."
]
def genCoreCmd := `[Cli|
genCore VIA runGenCoreCmd;
"Generate documentation for the core Lean modules: Init, Std and Lean since they are not Lake projects"
]
def docGenCmd : Cmd := `[Cli|
"doc-gen4" VIA runDocGenCmd; ["0.0.1"]
"doc-gen4" VIA runDocGenCmd; ["0.1.0"]
"A documentation generator for Lean 4."
FLAGS:
ink; "Render the files with LeanInk in addition"
ARGS:
...modules : String; "The modules to generate the HTML for."
SUBCOMMANDS:
singleCmd;
indexCmd
indexCmd;
genCoreCmd
]
def main (args : List String) : IO UInt32 :=

View File

@ -2,48 +2,15 @@
Document Generator for Lean 4
## Usage
You can call `doc-gen4` from the top of a Lake project like this:
```sh
$ /path/to/doc-gen4 Module
`doc-gen4` is the easiest to use via its custom Lake facet, in order
to do this you have to add it to your `lakefile.lean` like this:
```
where `Module` is one or more of the top level modules you want to document.
The tool will then proceed to compile the project using lake (if that hasn't happened yet),
analyze it and put the result in `./build/doc`.
You can optionally provide the path to a `LeanInk` binary using the `--ink` flag which will make
the tool produce `Alectryon` style rendered output along the usual documentation.
You could e.g. host the files locally with the built-in Python webserver:
```sh
$ cd build/doc && python -m http.server
meta if get_config? env = some "dev" then -- dev is so not everyone has to build it
require «doc-gen4» from git "https://github.com/leanprover/doc-gen4" @ "main"
```
### Multi stage
You can also use `doc-gen4` in multiple separate stages to generate the whole documentation.
For example `mathlib4` consists out of 4 modules, the 3 Lean compiler ones and itself:
- `Init`
- `Std`
- `Lean`
- `Mathlib`
The first build stage is to run doc-gen for all modules separately:
1. `doc-gen4 single Init`
2. `doc-gen4 single Std`
3. `doc-gen4 single Lean`
4. `doc-gen4 single Mathlib`
Note that you can also just make a call to submodules so `Mathlib.Algebra`
will work standalone as well. Furthermore one can use the `--ink` flag
here to also generate LeanInk documentation in addition.
The second and last stage is the index one which zips up some
information relevant for the search:
```sh
$ doc-gen4 index Mathlib
Then you can generate documentation for an entire library using:
```
Now `build/doc` should contain the same files with the same context as if one had run
```
$ doc-gen4 Mathlib
lake -Kenv=dev build Test:docs
```
If you have multiple libraries you want to generate documentation for
the recommended way right now is to run it for each library.

View File

@ -26,3 +26,92 @@ require lake from git
require leanInk from git
"https://github.com/hargonix/LeanInk" @ "doc-gen"
module_facet docs (mod) : FilePath := do
let some docGen4 ← findLeanExe? `«doc-gen4»
| error "no doc-gen4 executable configuration found in workspace"
let exeJob ← docGen4.exe.fetch
let modJob ← mod.leanBin.fetch
let buildDir := (← getWorkspace).root.buildDir
let docFile := mod.filePath (buildDir / "doc") "html"
exeJob.bindAsync fun exeFile exeTrace => do
modJob.bindSync fun _ modTrace => do
let depTrace := exeTrace.mix modTrace
let trace ← buildFileUnlessUpToDate docFile depTrace do
logInfo s!"Documenting module: {mod.name}"
proc {
cmd := exeFile.toString
args := #["single", mod.name.toString, "--ink"]
env := #[("LEAN_PATH", (← getAugmentedLeanPath).toString)]
}
return (docFile, trace)
-- TODO: technically speaking this facet does not show all file dependencies
target coreDocs : FilePath := do
let some docGen4 ← findLeanExe? `«doc-gen4»
| error "no doc-gen4 executable configuration found in workspace"
let exeJob ← docGen4.exe.fetch
let basePath := (←getWorkspace).root.buildDir / "doc"
let dataFile := basePath / "declarations" / "declaration-data-Lean.bmp"
exeJob.bindSync fun exeFile exeTrace => do
let trace ← buildFileUnlessUpToDate dataFile exeTrace do
logInfo "Documenting Lean core: Init, Std, Lean"
proc {
cmd := exeFile.toString
args := #["genCore"]
env := #[("LEAN_PATH", (← getAugmentedLeanPath).toString)]
}
return (dataFile, trace)
library_facet docs (lib) : FilePath := do
let some docGen4Pkg ← findPackage? `«doc-gen4»
| error "no doc-gen4 package found in workspace"
let some docGen4 := docGen4Pkg.findLeanExe? `«doc-gen4»
| error "no doc-gen4 executable configuration found in workspace"
let exeJob ← docGen4.exe.fetch
-- XXX: Workaround remove later
let coreJob ← if h : docGen4Pkg.name = _package.name then
have : Fact (docGen4Pkg.name = _package.name) := Fact.mk h
let job := fetch <| docGen4Pkg.target `coreDocs
job
else
error "wrong package"
let moduleJobs ← BuildJob.mixArray (α := FilePath) <| ← lib.recBuildLocalModules #[`docs]
-- Shared with DocGen4.Output
let basePath := (←getWorkspace).root.buildDir / "doc"
let dataFile := basePath / "declarations" / "declaration-data.bmp"
let staticFiles := #[
basePath / "style.css",
basePath / "declaration-data.js",
basePath / "nav.js",
basePath / "how-about.js",
basePath / "search.js",
basePath / "mathjax-config.js",
basePath / "instances.js",
basePath / "importedBy.js",
basePath / "index.html",
basePath / "404.html",
basePath / "navbar.html",
basePath / "find" / "index.html",
basePath / "find" / "find.js",
basePath / "src" / "alectryon.css",
basePath / "src" / "alectryon.js",
basePath / "src" / "docutils_basic.css",
basePath / "src" / "pygments.css"
]
coreJob.bindAsync fun _ coreInputTrace => do
exeJob.bindAsync fun exeFile exeTrace => do
moduleJobs.bindSync fun _ inputTrace => do
let depTrace := mixTraceArray #[inputTrace, exeTrace, coreInputTrace]
let trace ← buildFileUnlessUpToDate dataFile depTrace do
logInfo "Documentation indexing"
proc {
cmd := exeFile.toString
args := #["index"]
}
let traces ← staticFiles.mapM computeTrace
let indexTrace := mixTraceArray traces
return (dataFile, trace.mix indexTrace)