feat: Preparations to split doc-gen build process
parent
0f9b80abb2
commit
25b1ddb66b
|
@ -208,14 +208,15 @@ open Lean
|
|||
open LeanInk.Annotation.Alectryon
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def moduleToHtml (module : Process.Module) (inkPath : System.FilePath) (sourceFilePath : System.FilePath) : HtmlT IO Html := withReader (setCurrentName module.name) do
|
||||
def moduleToHtml (module : Process.Module) (inkPath : System.FilePath) (sourceFilePath : System.FilePath) : HtmlT IO Html := withTheReader SiteBaseContext (setCurrentName module.name) do
|
||||
let json ← runInk inkPath sourceFilePath
|
||||
let fragments := fromJson? json
|
||||
match fragments with
|
||||
| .ok fragments =>
|
||||
let render := StateT.run (LeanInk.Annotation.Alectryon.renderFragments fragments) { counter := 0 }
|
||||
let ctx ← read
|
||||
let (html, _) := ReaderT.run render ctx
|
||||
let baseCtx ← readThe SiteBaseContext
|
||||
let (html, _) := render |>.run ctx baseCtx
|
||||
pure html
|
||||
| .error err => throw $ IO.userError s!"Error while parsing LeanInk Output: {err}"
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ def lakeSetup (imports : List String) : IO (Except UInt32 (Lake.Workspace × Str
|
|||
Load a list of modules from the current Lean search path into an `Environment`
|
||||
to process for documentation.
|
||||
-/
|
||||
def load (imports : List Name) : IO Process.AnalyzerResult := do
|
||||
def load (imports : List Name) (transitiveModules : Bool) : IO (Process.AnalyzerResult × Hierarchy) := do
|
||||
let env ← importModules (List.map (Import.mk · false) imports) Options.empty
|
||||
IO.println "Processing modules"
|
||||
let config := {
|
||||
|
@ -49,6 +49,6 @@ def load (imports : List Name) : IO Process.AnalyzerResult := do
|
|||
fileName := default,
|
||||
fileMap := default,
|
||||
}
|
||||
Prod.fst <$> Meta.MetaM.toIO Process.process config { env := env} {} {}
|
||||
Prod.fst <$> Meta.MetaM.toIO (Process.process imports transitiveModules) config { env := env } {} {}
|
||||
|
||||
end DocGen4
|
||||
|
|
|
@ -11,86 +11,91 @@ import DocGen4.Output.Index
|
|||
import DocGen4.Output.Module
|
||||
import DocGen4.Output.NotFound
|
||||
import DocGen4.Output.Find
|
||||
import DocGen4.Output.Semantic
|
||||
import DocGen4.Output.SourceLinker
|
||||
import DocGen4.LeanInk.Output
|
||||
import Std.Data.HashMap
|
||||
|
||||
namespace DocGen4
|
||||
|
||||
open Lean IO System Output Process
|
||||
open Lean IO System Output Process Std
|
||||
|
||||
/--
|
||||
The main entrypoint for outputting the documentation HTML based on an
|
||||
`AnalyzerResult`.
|
||||
-/
|
||||
def htmlOutput (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String) (inkPath : Option System.FilePath) : IO Unit := do
|
||||
let config := {
|
||||
depthToRoot := 0,
|
||||
result := result,
|
||||
currentName := none,
|
||||
sourceLinker := ←sourceLinker ws leanHash
|
||||
leanInkEnabled := inkPath.isSome
|
||||
}
|
||||
def htmlOutputSetup (config : SiteBaseContext) : IO Unit := do
|
||||
let basePath := FilePath.mk "." / "build" / "doc"
|
||||
let srcBasePath := basePath / "src"
|
||||
let indexHtml := ReaderT.run index config
|
||||
let findHtml := ReaderT.run find { config with depthToRoot := 1 }
|
||||
let notFoundHtml := ReaderT.run notFound config
|
||||
-- Rendering the entire lean compiler takes time....
|
||||
--let sourceSearchPath := ((←Lean.findSysroot) / "src" / "lean") :: ws.root.srcDir :: ws.leanSrcPath
|
||||
let sourceSearchPath := ws.root.srcDir :: ws.leanSrcPath
|
||||
let declarationDataPath := basePath / "declaration-data.bmp"
|
||||
|
||||
-- Base structure
|
||||
FS.createDirAll basePath
|
||||
FS.createDirAll (basePath / "find")
|
||||
FS.createDirAll (basePath / "semantic")
|
||||
FS.createDirAll srcBasePath
|
||||
|
||||
let mut declList := #[]
|
||||
for (_, mod) in result.moduleInfo.toArray do
|
||||
for decl in filterMapDocInfo mod.members do
|
||||
let name := decl.getName.toString
|
||||
let config := { config with depthToRoot := 0 }
|
||||
let doc := decl.getDocString.getD ""
|
||||
let root := Id.run <| ReaderT.run (getRoot) config
|
||||
let link := root ++ s!"../semantic/{decl.getName.hash}.xml#"
|
||||
let docLink := Id.run <| ReaderT.run (declNameToLink decl.getName) config
|
||||
let sourceLink := Id.run <| ReaderT.run (getSourceUrl mod.name decl.getDeclarationRange) config
|
||||
let obj := Json.mkObj [("name", name), ("doc", doc), ("link", link), ("docLink", docLink), ("sourceLink", sourceLink)]
|
||||
declList := declList.push obj
|
||||
let xml := toString <| Id.run <| ReaderT.run (semanticXml decl) config
|
||||
FS.writeFile (basePath / "semantic" / s!"{decl.getName.hash}.xml") xml
|
||||
let json := Json.arr declList
|
||||
|
||||
FS.writeFile (basePath / "semantic" / "docgen4.xml") <| toString <| Id.run <| ReaderT.run schemaXml config
|
||||
|
||||
-- The three HTML files we always need
|
||||
let indexHtml := ReaderT.run index config
|
||||
FS.writeFile (basePath / "index.html") indexHtml.toString
|
||||
|
||||
let notFoundHtml := ReaderT.run notFound config
|
||||
FS.writeFile (basePath / "404.html") notFoundHtml.toString
|
||||
|
||||
let findHtml := ReaderT.run find { config with depthToRoot := 1 }
|
||||
FS.writeFile (basePath / "find" / "index.html") findHtml.toString
|
||||
|
||||
-- The root JSON for find
|
||||
let topLevelModules := config.hierarchy.getChildren.toArray.map (Json.str ∘ toString ∘ Prod.fst)
|
||||
FS.writeFile declarationDataPath (Json.arr topLevelModules).compress
|
||||
|
||||
-- All the static stuff
|
||||
FS.writeFile (basePath / "style.css") styleCss
|
||||
|
||||
let declarationDataPath := basePath / "declaration-data.bmp"
|
||||
FS.writeFile declarationDataPath json.compress
|
||||
FS.writeFile (basePath / "declaration-data.timestamp") <| toString (←declarationDataPath.metadata).modified.sec
|
||||
|
||||
FS.writeFile (basePath / "declaration-data.js") declarationDataCenterJs
|
||||
FS.writeFile (basePath / "nav.js") navJs
|
||||
FS.writeFile (basePath / "find" / "find.js") findJs
|
||||
FS.writeFile (basePath / "how-about.js") howAboutJs
|
||||
FS.writeFile (basePath / "search.js") searchJs
|
||||
FS.writeFile (basePath / "mathjax-config.js") mathjaxConfigJs
|
||||
|
||||
-- All the static stuff for LeanInk
|
||||
FS.writeFile (srcBasePath / "alectryon.css") alectryonCss
|
||||
FS.writeFile (srcBasePath / "alectryon.js") alectryonJs
|
||||
FS.writeFile (srcBasePath / "docutils_basic.css") docUtilsCss
|
||||
FS.writeFile (srcBasePath / "pygments.css") pygmentsCss
|
||||
|
||||
|
||||
def htmlOutputResults (baseConfig : SiteBaseContext) (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String) (inkPath : Option System.FilePath) : IO Unit := do
|
||||
let config : SiteContext := {
|
||||
result := result,
|
||||
sourceLinker := ←sourceLinker ws leanHash
|
||||
leanInkEnabled := inkPath.isSome
|
||||
}
|
||||
let basePath := FilePath.mk "." / "build" / "doc"
|
||||
let srcBasePath := basePath / "src"
|
||||
-- Rendering the entire lean compiler takes time....
|
||||
--let sourceSearchPath := ((←Lean.findSysroot) / "src" / "lean") :: ws.root.srcDir :: ws.leanSrcPath
|
||||
let sourceSearchPath := ws.root.srcDir :: ws.leanSrcPath
|
||||
|
||||
let mut declMap := HashMap.empty
|
||||
for (_, mod) in result.moduleInfo.toArray do
|
||||
let topLevelMod := mod.name.getRoot
|
||||
let mut jsonDecls := #[]
|
||||
for decl in filterMapDocInfo mod.members do
|
||||
let name := decl.getName.toString
|
||||
let doc := decl.getDocString.getD ""
|
||||
let docLink := declNameToLink decl.getName |>.run config baseConfig
|
||||
IO.println s!"DOC: {docLink}"
|
||||
let sourceLink := getSourceUrl mod.name decl.getDeclarationRange |>.run config baseConfig
|
||||
let json := Json.mkObj [("name", name), ("doc", doc), ("docLink", docLink), ("sourceLink", sourceLink)]
|
||||
jsonDecls := jsonDecls.push json
|
||||
let currentModDecls := declMap.findD topLevelMod #[]
|
||||
declMap := declMap.insert topLevelMod (currentModDecls ++ jsonDecls)
|
||||
|
||||
for (topLevelMod, decls) in declMap.toList do
|
||||
FS.writeFile (basePath / s!"declaration-data-{topLevelMod}.bmp") (Json.arr decls).compress
|
||||
|
||||
for (modName, module) in result.moduleInfo.toArray do
|
||||
let fileDir := moduleNameToDirectory basePath modName
|
||||
let filePath := moduleNameToFile basePath modName
|
||||
-- path: 'basePath/module/components/till/last.html'
|
||||
-- The last component is the file name, so we drop it from the depth to root.
|
||||
let config := { config with depthToRoot := modName.components.dropLast.length }
|
||||
let moduleHtml := ReaderT.run (moduleToHtml module) config
|
||||
let baseConfig := { baseConfig with depthToRoot := modName.components.dropLast.length }
|
||||
let moduleHtml := moduleToHtml module |>.run config baseConfig
|
||||
FS.createDirAll $ fileDir
|
||||
FS.writeFile filePath moduleHtml.toString
|
||||
if let some inkPath := inkPath then
|
||||
|
@ -98,12 +103,28 @@ def htmlOutput (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String
|
|||
IO.println s!"Inking: {modName.toString}"
|
||||
-- path: 'basePath/src/module/components/till/last.html'
|
||||
-- The last component is the file name, however we are in src/ here so dont drop it this time
|
||||
let config := { config with depthToRoot := modName.components.length }
|
||||
let srcHtml ← ReaderT.run (LeanInk.moduleToHtml module inkPath inputPath) config
|
||||
let baseConfig := { baseConfig with depthToRoot := modName.components.length }
|
||||
let srcHtml ← LeanInk.moduleToHtml module inkPath inputPath |>.run config baseConfig
|
||||
let srcDir := moduleNameToDirectory srcBasePath modName
|
||||
let srcPath := moduleNameToFile srcBasePath modName
|
||||
FS.createDirAll srcDir
|
||||
FS.writeFile srcPath srcHtml.toString
|
||||
|
||||
def getSimpleBaseContext (hierarchy : Hierarchy) : SiteBaseContext :=
|
||||
{
|
||||
depthToRoot := 0,
|
||||
currentName := none,
|
||||
hierarchy
|
||||
}
|
||||
|
||||
/--
|
||||
The main entrypoint for outputting the documentation HTML based on an
|
||||
`AnalyzerResult`.
|
||||
-/
|
||||
def htmlOutput (result : AnalyzerResult) (hierarchy : Hierarchy) (ws : Lake.Workspace) (leanHash: String) (inkPath : Option System.FilePath) : IO Unit := do
|
||||
let baseConfig := getSimpleBaseContext hierarchy
|
||||
htmlOutputSetup baseConfig
|
||||
htmlOutputResults baseConfig result ws leanHash inkPath
|
||||
|
||||
end DocGen4
|
||||
|
||||
|
|
|
@ -13,13 +13,14 @@ open scoped DocGen4.Jsx
|
|||
open Lean System Widget Elab Process
|
||||
|
||||
/--
|
||||
The context used in the `HtmlM` monad for HTML templating.
|
||||
The context used in the `BaseHtmlM` monad for HTML templating.
|
||||
-/
|
||||
structure SiteContext where
|
||||
structure SiteBaseContext where
|
||||
|
||||
/--
|
||||
The full analysis result from the Process module.
|
||||
The module hierarchy as a tree structure.
|
||||
-/
|
||||
result : AnalyzerResult
|
||||
hierarchy : Hierarchy
|
||||
/--
|
||||
How far away we are from the page root, used for relative links to the root.
|
||||
-/
|
||||
|
@ -29,6 +30,15 @@ structure SiteContext where
|
|||
pages that don't have a module name.
|
||||
-/
|
||||
currentName : Option Name
|
||||
|
||||
/--
|
||||
The context used in the `HtmlM` monad for HTML templating.
|
||||
-/
|
||||
structure SiteContext where
|
||||
/--
|
||||
The full analysis result from the Process module.
|
||||
-/
|
||||
result : AnalyzerResult
|
||||
/--
|
||||
A function to link declaration names to their source URLs, usually Github ones.
|
||||
-/
|
||||
|
@ -38,50 +48,63 @@ structure SiteContext where
|
|||
-/
|
||||
leanInkEnabled : Bool
|
||||
|
||||
def setCurrentName (name : Name) (ctx : SiteContext) := {ctx with currentName := some name}
|
||||
def setCurrentName (name : Name) (ctx : SiteBaseContext) := {ctx with currentName := some name}
|
||||
|
||||
abbrev HtmlT := ReaderT SiteContext
|
||||
abbrev BaseHtmlT := ReaderT SiteBaseContext
|
||||
abbrev BaseHtmlM := BaseHtmlT Id
|
||||
|
||||
abbrev HtmlT (m) := ReaderT SiteContext (BaseHtmlT m)
|
||||
abbrev HtmlM := HtmlT Id
|
||||
|
||||
def HtmlT.run (x : HtmlT m α) (ctx : SiteContext) (baseCtx : SiteBaseContext) : m α :=
|
||||
ReaderT.run x ctx |>.run baseCtx
|
||||
|
||||
def HtmlM.run (x : HtmlM α) (ctx : SiteContext) (baseCtx : SiteBaseContext) : α :=
|
||||
ReaderT.run x ctx |>.run baseCtx |>.run
|
||||
|
||||
/--
|
||||
Obtains the root URL as a relative one to the current depth.
|
||||
-/
|
||||
def getRoot : HtmlM String := do
|
||||
def getRoot : BaseHtmlM String := do
|
||||
let rec go: Nat -> String
|
||||
| 0 => "./"
|
||||
| Nat.succ n' => "../" ++ go n'
|
||||
let d <- SiteContext.depthToRoot <$> read
|
||||
let d <- SiteBaseContext.depthToRoot <$> read
|
||||
return (go d)
|
||||
|
||||
def getHierarchy : BaseHtmlM Hierarchy := do pure (←read).hierarchy
|
||||
def getCurrentName : BaseHtmlM (Option Name) := do pure (←read).currentName
|
||||
def getResult : HtmlM AnalyzerResult := do pure (←read).result
|
||||
def getCurrentName : HtmlM (Option Name) := do pure (←read).currentName
|
||||
def getSourceUrl (module : Name) (range : Option DeclarationRange): HtmlM String := do pure $ (←read).sourceLinker module range
|
||||
def leanInkEnabled? : HtmlM Bool := do pure (←read).leanInkEnabled
|
||||
|
||||
/--
|
||||
If a template is meant to be extended because it for example only provides the
|
||||
header but no real content this is the way to fill the template with content.
|
||||
This is untyped so HtmlM and BaseHtmlM can be mixed.
|
||||
-/
|
||||
def templateExtends {α β : Type} (base : α → HtmlM β) (new : HtmlM α) : HtmlM β :=
|
||||
def templateExtends {α β} {m} [Bind m] (base : α → m β) (new : m α) : m β :=
|
||||
new >>= base
|
||||
|
||||
def templateLiftExtends {α β} {m n} [Bind m] [MonadLift n m] (base : α → n β) (new : m α) : m β :=
|
||||
new >>= (monadLift ∘ base)
|
||||
/--
|
||||
Returns the doc-gen4 link to a module name.
|
||||
-/
|
||||
def moduleNameToLink (n : Name) : HtmlM String := do
|
||||
def moduleNameToLink (n : Name) : BaseHtmlM String := do
|
||||
let parts := n.components.map Name.toString
|
||||
pure $ (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
||||
|
||||
/--
|
||||
Returns the HTML doc-gen4 link to a module name.
|
||||
-/
|
||||
def moduleToHtmlLink (module : Name) : HtmlM Html := do
|
||||
def moduleToHtmlLink (module : Name) : BaseHtmlM Html := do
|
||||
pure <a href={←moduleNameToLink module}>{module.toString}</a>
|
||||
|
||||
/--
|
||||
Returns the LeanInk link to a module name.
|
||||
-/
|
||||
def moduleNameToInkLink (n : Name) : HtmlM String := do
|
||||
def moduleNameToInkLink (n : Name) : BaseHtmlM String := do
|
||||
let parts := "src" :: n.components.map Name.toString
|
||||
pure $ (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
||||
|
||||
|
|
|
@ -6,13 +6,13 @@ namespace Output
|
|||
open scoped DocGen4.Jsx
|
||||
open Lean
|
||||
|
||||
def find : HtmlM Html := do
|
||||
def find : BaseHtmlM Html := do
|
||||
pure
|
||||
<html lang="en">
|
||||
<head>
|
||||
<link rel="preload" href={s!"{←getRoot}declaration-data.bmp"}/>
|
||||
<script>{s!"const SITE_ROOT={String.quote (←getRoot)};"}</script>
|
||||
<script type="module" async="true" src={s!"./find.js"}></script>
|
||||
<script type="module" async="true" src="./find.js"></script>
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
|
|
|
@ -11,7 +11,7 @@ namespace Output
|
|||
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def index : HtmlM Html := do templateExtends (baseHtml "Index") $ pure $
|
||||
def index : BaseHtmlM Html := do templateExtends (baseHtml "Index") $ pure $
|
||||
<main>
|
||||
<a id="top"></a>
|
||||
<h1> Welcome to the documentation page </h1>
|
||||
|
|
|
@ -233,10 +233,10 @@ def internalNav (members : Array Name) (moduleName : Name) : HtmlM Html := do
|
|||
/--
|
||||
The main entry point to rendering the HTML for an entire module.
|
||||
-/
|
||||
def moduleToHtml (module : Process.Module) : HtmlM Html := withReader (setCurrentName module.name) do
|
||||
def moduleToHtml (module : Process.Module) : HtmlM Html := withTheReader SiteBaseContext (setCurrentName module.name) do
|
||||
let memberDocs ← module.members.mapM (λ i => moduleMemberToHtml module.name i)
|
||||
let memberNames := filterMapDocInfo module.members |>.map DocInfo.getName
|
||||
templateExtends (baseHtmlGenerator module.name.toString) $ pure #[
|
||||
templateLiftExtends (baseHtmlGenerator module.name.toString) $ pure #[
|
||||
←internalNav memberNames module.name,
|
||||
Html.element "main" false #[] memberDocs
|
||||
]
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace Output
|
|||
open Lean
|
||||
open scoped DocGen4.Jsx
|
||||
|
||||
def moduleListFile (file : Name) : HtmlM Html := do
|
||||
def moduleListFile (file : Name) : BaseHtmlM Html := do
|
||||
pure <div class={if (← getCurrentName) == file then "nav_link visible" else "nav_link"}>
|
||||
{←moduleToHtmlLink file}
|
||||
</div>
|
||||
|
@ -21,7 +21,7 @@ def moduleListFile (file : Name) : HtmlM Html := do
|
|||
/--
|
||||
Build the HTML tree representing the module hierarchy.
|
||||
-/
|
||||
partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
|
||||
partial def moduleListDir (h : Hierarchy) : BaseHtmlM Html := do
|
||||
let children := Array.mk (h.getChildren.toList.map Prod.snd)
|
||||
let dirs := children.filter (λ c => c.getChildren.toList.length != 0)
|
||||
let files := children.filter (λ c => Hierarchy.isFile c ∧ c.getChildren.toList.length = 0)
|
||||
|
@ -30,7 +30,7 @@ partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
|
|||
let fileNodes ← (files.mapM moduleListFile)
|
||||
let moduleLink ← moduleNameToLink h.getName
|
||||
let summary :=
|
||||
if (←getResult).moduleInfo.contains h.getName then
|
||||
if h.isFile then
|
||||
<summary>{←moduleToHtmlLink h.getName}</summary>
|
||||
else
|
||||
<summary>{h.getName.toString}</summary>
|
||||
|
@ -45,17 +45,17 @@ partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
|
|||
/--
|
||||
Return a list of top level modules, linkified and rendered as HTML
|
||||
-/
|
||||
def moduleList : HtmlM Html := do
|
||||
let hierarchy := (←getResult).hierarchy
|
||||
def moduleList : BaseHtmlM Html := do
|
||||
let hierarchy ← getHierarchy
|
||||
let mut list := Array.empty
|
||||
for (n, cs) in hierarchy.getChildren do
|
||||
for (_, cs) in hierarchy.getChildren do
|
||||
list := list.push $ ←moduleListDir cs
|
||||
pure <div class="module_list">[list]</div>
|
||||
|
||||
/--
|
||||
The main entry point to rendering the navbar on the left hand side.
|
||||
-/
|
||||
def navbar : HtmlM Html := do
|
||||
def navbar : BaseHtmlM Html := do
|
||||
pure
|
||||
<nav class="nav">
|
||||
<h3>General documentation</h3>
|
||||
|
|
|
@ -14,7 +14,7 @@ open scoped DocGen4.Jsx
|
|||
/--
|
||||
Render the 404 page.
|
||||
-/
|
||||
def notFound : HtmlM Html := do templateExtends (baseHtml "404") $ pure $
|
||||
def notFound : BaseHtmlM Html := do templateExtends (baseHtml "404") $ pure $
|
||||
<main>
|
||||
<h1>404 Not Found</h1>
|
||||
<p> Unfortunately, the page you were looking for is no longer here. </p>
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
import DocGen4.Output.Template
|
||||
import DocGen4.Output.DocString
|
||||
import DocGen4.Process
|
||||
import Lean.Data.Xml
|
||||
|
||||
open Lean Xml DocGen4.Process
|
||||
|
||||
namespace DocGen4
|
||||
namespace Output
|
||||
|
||||
instance : ToString $ Array Element where
|
||||
toString xs := xs.map toString |>.foldl String.append ""
|
||||
|
||||
instance : Coe Element Content where
|
||||
coe e := Content.Element e
|
||||
|
||||
-- TODO: syntax metaprogramming and basic semantic data
|
||||
|
||||
def semanticXml (i : DocInfo) : HtmlM $ Array Element := do
|
||||
pure #[
|
||||
Element.Element
|
||||
"rdf:RDF"
|
||||
(Std.RBMap.fromList [
|
||||
("xmlns:rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
|
||||
("xmlns:docgen4", s!"{←getRoot}semactic/docgen4.xml#")
|
||||
] _)
|
||||
#[
|
||||
Element.Element
|
||||
"rdf:Description"
|
||||
(Std.RBMap.fromList [
|
||||
("rdf:about", s!"{←getRoot}semactic/{i.getName.hash}.xml#")
|
||||
] _)
|
||||
#[]
|
||||
]
|
||||
]
|
||||
|
||||
def schemaXml : HtmlM $ Array Element := do
|
||||
pure #[
|
||||
Element.Element
|
||||
"rdf:RDF"
|
||||
(Std.RBMap.fromList [
|
||||
("xmlns:rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
|
||||
("xmlns:docgen4", s!"{←getRoot}semactic/docgen4.xml#")
|
||||
] _)
|
||||
#[
|
||||
Element.Element
|
||||
"docgen4:hasInstance"
|
||||
Std.RBMap.empty
|
||||
#[
|
||||
Element.Element
|
||||
"rdfs:type"
|
||||
Std.RBMap.empty
|
||||
#[Content.Character "rdf:Property"]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
|
@ -14,7 +14,7 @@ open scoped DocGen4.Jsx
|
|||
/--
|
||||
The HTML template used for all pages.
|
||||
-/
|
||||
def baseHtmlGenerator (title : String) (site : Array Html) : HtmlM Html := do
|
||||
def baseHtmlGenerator (title : String) (site : Array Html) : BaseHtmlM Html := do
|
||||
pure
|
||||
<html lang="en">
|
||||
<head>
|
||||
|
@ -66,7 +66,7 @@ def baseHtmlGenerator (title : String) (site : Array Html) : HtmlM Html := do
|
|||
/--
|
||||
A comfortability wrapper around `baseHtmlGenerator`.
|
||||
-/
|
||||
def baseHtml (title : String) (site : Html) : HtmlM Html := baseHtmlGenerator title #[site]
|
||||
def baseHtml (title : String) (site : Html) : BaseHtmlM Html := baseHtmlGenerator title #[site]
|
||||
|
||||
end Output
|
||||
end DocGen4
|
||||
|
|
|
@ -6,14 +6,20 @@ Authors: Henrik Böving
|
|||
|
||||
import Lean
|
||||
import Std.Data.HashMap
|
||||
import Std.Data.HashSet
|
||||
|
||||
import DocGen4.Process.Base
|
||||
import DocGen4.Process.Hierarchy
|
||||
import DocGen4.Process.DocInfo
|
||||
|
||||
open Std
|
||||
|
||||
def HashSet.fromArray [BEq α] [Hashable α] (xs : Array α) : HashSet α :=
|
||||
xs.foldr (flip .insert) .empty
|
||||
|
||||
namespace DocGen4.Process
|
||||
|
||||
open Lean Meta Std
|
||||
open Lean Meta
|
||||
|
||||
/--
|
||||
Member of a module, either a declaration or some module doc string.
|
||||
|
@ -54,10 +60,6 @@ structure AnalyzerResult where
|
|||
-/
|
||||
moduleInfo : HashMap Name Module
|
||||
/--
|
||||
The module hierarchy as a tree structure.
|
||||
-/
|
||||
hierarchy : Hierarchy
|
||||
/--
|
||||
An adjacency matrix for the import relation between modules, indexed
|
||||
my the values in `name2ModIdx`.
|
||||
-/
|
||||
|
@ -86,21 +88,33 @@ def getDocString : ModuleMember → Option String
|
|||
|
||||
end ModuleMember
|
||||
|
||||
def getRelevantModules (imports : List Name) : MetaM (HashSet Name) := do
|
||||
let env ← getEnv
|
||||
let mut relevant := .empty
|
||||
for module in env.header.moduleNames do
|
||||
if module.getRoot ∈ imports then
|
||||
relevant := relevant.insert module
|
||||
pure relevant
|
||||
|
||||
/--
|
||||
Run the doc-gen analysis on all modules that are loaded into the `Environment`
|
||||
of this `MetaM` run.
|
||||
-/
|
||||
def process : MetaM AnalyzerResult := do
|
||||
def process (imports : List Name) (transitiveModules : Bool) : MetaM (AnalyzerResult × Hierarchy) := do
|
||||
let env ← getEnv
|
||||
let mut res := mkHashMap env.header.moduleNames.size
|
||||
for module in env.header.moduleNames do
|
||||
let modDocs := match getModuleDoc? env module with
|
||||
| none => #[]
|
||||
| some ds => ds
|
||||
|>.map (λ doc => ModuleMember.modDoc doc)
|
||||
let relevantNames := if transitiveModules then (HashSet.fromArray env.header.moduleNames) else (←getRelevantModules imports)
|
||||
let mut res := mkHashMap relevantNames.size
|
||||
for module in relevantNames.toArray do
|
||||
let modDocs := getModuleDoc? env module |>.getD #[] |>.map .modDoc
|
||||
res := res.insert module (Module.mk module modDocs)
|
||||
|
||||
for cinfo in env.constants.toList do
|
||||
for (name, cinfo) in env.constants.toList do
|
||||
let some modidx := env.getModuleIdxFor? name | unreachable!
|
||||
let moduleName := env.allImportedModuleNames.get! modidx
|
||||
-- skip irrelevant names
|
||||
if !relevantNames.contains moduleName.getRoot then
|
||||
continue
|
||||
|
||||
try
|
||||
let config := {
|
||||
maxHeartbeats := 5000000,
|
||||
|
@ -108,14 +122,13 @@ def process : MetaM AnalyzerResult := do
|
|||
fileName := ←getFileName,
|
||||
fileMap := ←getFileMap
|
||||
}
|
||||
let analysis := Prod.fst <$> Meta.MetaM.toIO (DocInfo.ofConstant cinfo) config { env := env} {} {}
|
||||
let analysis := Prod.fst <$> Meta.MetaM.toIO (DocInfo.ofConstant (name, cinfo)) config { env := env} {} {}
|
||||
if let some dinfo ← analysis then
|
||||
let some modidx := env.getModuleIdxFor? dinfo.getName | unreachable!
|
||||
let moduleName := env.allImportedModuleNames.get! modidx
|
||||
let module := res.find! moduleName
|
||||
res := res.insert moduleName {module with members := module.members.push (ModuleMember.docInfo dinfo)}
|
||||
catch e =>
|
||||
IO.println s!"WARNING: Failed to obtain information for: {cinfo.fst}: {←e.toMessageData.toString}"
|
||||
IO.println s!"WARNING: Failed to obtain information for: {name}: {←e.toMessageData.toString}"
|
||||
|
||||
-- TODO: This is definitely not the most efficient way to store this data
|
||||
let mut adj := Array.mkArray res.size (Array.mkArray res.size false)
|
||||
|
@ -128,13 +141,14 @@ def process : MetaM AnalyzerResult := do
|
|||
let some importIdx := env.getModuleIdx? imp.module | unreachable!
|
||||
adj := adj.set! modIdx (adj.get! modIdx |>.set! importIdx true)
|
||||
|
||||
pure {
|
||||
let hierarchy := Hierarchy.fromArray relevantNames.toArray
|
||||
let analysis := {
|
||||
name2ModIdx := env.const2ModIdx,
|
||||
moduleNames := env.header.moduleNames,
|
||||
moduleInfo := res,
|
||||
hierarchy := Hierarchy.fromArray env.header.moduleNames,
|
||||
importAdj := adj
|
||||
}
|
||||
pure (analysis, hierarchy)
|
||||
|
||||
def filterMapDocInfo (ms : Array ModuleMember) : Array DocInfo :=
|
||||
ms.filterMap filter
|
||||
|
|
|
@ -27,6 +27,9 @@ namespace HierarchyMap
|
|||
def toList : HierarchyMap → List (Name × Hierarchy)
|
||||
| t => t.revFold (fun ps k v => (k, v)::ps) []
|
||||
|
||||
def toArray : HierarchyMap → Array (Name × Hierarchy)
|
||||
| t => t.fold (fun ps k v => ps ++ #[(k, v)] ) #[]
|
||||
|
||||
def hForIn [Monad m] (t : HierarchyMap) (init : σ) (f : (Name × Hierarchy) → σ → m (ForInStep σ)) : m σ :=
|
||||
t.forIn init (fun a b acc => f (a, b) acc)
|
||||
|
||||
|
@ -58,7 +61,7 @@ partial def insert! (h : Hierarchy) (n : Name) : Hierarchy := Id.run $ do
|
|||
| none =>
|
||||
node hn h.isFile (cs.insert Name.cmp n $ empty n true)
|
||||
| some (node _ true _) => h
|
||||
| some hierarchy@(node _ false ccs) =>
|
||||
| some (node _ false ccs) =>
|
||||
cs := cs.erase Name.cmp n
|
||||
node hn h.isFile (cs.insert Name.cmp n $ node n true ccs)
|
||||
else
|
||||
|
|
62
Main.lean
62
Main.lean
|
@ -4,24 +4,52 @@ import Cli
|
|||
|
||||
open DocGen4 Lean Cli
|
||||
|
||||
def findLeanInk? (p : Parsed) : IO (Option System.FilePath) := do
|
||||
match p.flag? "ink" with
|
||||
| some ink =>
|
||||
let inkPath := System.FilePath.mk ink.value
|
||||
if ←inkPath.pathExists then
|
||||
pure $ some inkPath
|
||||
else
|
||||
throw $ IO.userError "Invalid path to LeanInk binary provided"
|
||||
| none => pure none
|
||||
|
||||
def runDocGenCmd (p : Parsed) : IO UInt32 := do
|
||||
let modules : List String := p.variableArgsAs! String |>.toList
|
||||
let res ← lakeSetup modules
|
||||
match res with
|
||||
| Except.ok (ws, leanHash) =>
|
||||
IO.println s!"Loading modules from: {←searchPathRef.get}"
|
||||
let doc ← load $ modules.map Name.mkSimple
|
||||
IO.println "Outputting HTML"
|
||||
match p.flag? "ink" with
|
||||
| some ink =>
|
||||
let inkPath := System.FilePath.mk ink.value
|
||||
if ←inkPath.pathExists then
|
||||
htmlOutput doc ws leanHash inkPath
|
||||
else
|
||||
throw $ IO.userError "Invalid path to LeanInk binary provided"
|
||||
| none => htmlOutput doc ws leanHash none
|
||||
pure 0
|
||||
| Except.error rc => pure rc
|
||||
if p.hasFlag "single" && p.hasFlag "setup" then
|
||||
throw $ IO.userError "Can't have single and setup at the same time"
|
||||
else
|
||||
let res ← lakeSetup modules
|
||||
let modules := modules.map Name.mkSimple
|
||||
match res with
|
||||
| Except.ok (ws, leanHash) =>
|
||||
IO.println s!"Loading modules from: {←searchPathRef.get}"
|
||||
--if p.hasFlag "single" then
|
||||
-- if modules.length ≠ 1 then
|
||||
-- throw $ IO.userError "Called single with more than a single module"
|
||||
-- else
|
||||
-- let (doc, hierarchy) ← load modules false
|
||||
-- IO.println "Outputting HTML"
|
||||
-- let baseConfig := getSimpleBaseContext hierarchy
|
||||
-- htmlOutputResults baseConfig doc ws leanHash (←findLeanInk? p)
|
||||
-- pure 0
|
||||
--else if p.hasFlag "setup" then
|
||||
-- let config := {
|
||||
-- fileName := default,
|
||||
-- fileMap := default,
|
||||
-- }
|
||||
-- let env ← importModules (List.map (Import.mk · false) modules) Options.empty
|
||||
-- let relevantModules ← Prod.fst <$> Meta.MetaM.toIO (Process.getRelevantModules modules) config { env := env } {}
|
||||
-- let hierarchy := Hierarchy.fromArray relevantModules.toArray
|
||||
-- let baseConfig := getSimpleBaseContext hierarchy
|
||||
-- htmlOutputSetup baseConfig
|
||||
-- pure 0
|
||||
--else
|
||||
let (doc, hierarchy) ← load modules true
|
||||
IO.println "Outputting HTML"
|
||||
htmlOutput doc hierarchy ws leanHash (←findLeanInk? p)
|
||||
pure 0
|
||||
| Except.error rc => pure rc
|
||||
|
||||
def docGenCmd : Cmd := `[Cli|
|
||||
"doc-gen4" VIA runDocGenCmd; ["0.0.1"]
|
||||
|
@ -29,6 +57,8 @@ def docGenCmd : Cmd := `[Cli|
|
|||
|
||||
FLAGS:
|
||||
ink : String; "Path to a LeanInk binary to use for rendering the Lean sources."
|
||||
--single; "Generate documentation only for a single module, will cause broken links if there are others"
|
||||
--setup; "Only output the files that are always required"
|
||||
|
||||
ARGS:
|
||||
...modules : String; "The modules to generate the HTML for"
|
||||
|
|
|
@ -6,6 +6,17 @@
|
|||
|
||||
const CACHE_DB_NAME = "declaration-data";
|
||||
const CACHE_DB_VERSION = 1;
|
||||
const CACHE_DB_KEY = "DECLARATIONS_KEY";
|
||||
|
||||
async function fetchModuleData(module) {
|
||||
const moduleDataUrl = new URL(
|
||||
`${SITE_ROOT}declaration-data-${module}.bmp`,
|
||||
window.location
|
||||
);
|
||||
const moduleData = await fetch(moduleDataUrl);
|
||||
const moduleDataJson = await moduleData.json();
|
||||
return moduleDataJson;
|
||||
}
|
||||
|
||||
/**
|
||||
* The DeclarationDataCenter is used for declaration searching.
|
||||
|
@ -41,42 +52,39 @@ export class DeclarationDataCenter {
|
|||
*/
|
||||
static async init() {
|
||||
if (!DeclarationDataCenter.singleton) {
|
||||
const timestampUrl = new URL(
|
||||
`${SITE_ROOT}declaration-data.timestamp`,
|
||||
window.location
|
||||
);
|
||||
const dataUrl = new URL(
|
||||
const dataListUrl = new URL(
|
||||
`${SITE_ROOT}declaration-data.bmp`,
|
||||
window.location
|
||||
);
|
||||
|
||||
const timestampRes = await fetch(timestampUrl);
|
||||
const timestamp = await timestampRes.text();
|
||||
|
||||
// try to use cache first
|
||||
const data = await fetchCachedDeclarationData(timestamp).catch(_e => null);
|
||||
const data = await fetchCachedDeclarationData().catch(_e => null);
|
||||
if (data) {
|
||||
// if data is defined, use the cached one.
|
||||
DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
|
||||
} else {
|
||||
// undefined. then fetch the data from the server.
|
||||
const dataRes = await fetch(dataUrl);
|
||||
const dataJson = await dataRes.json();
|
||||
const dataListRes = await fetch(dataListUrl);
|
||||
const dataListJson = await dataListRes.json();
|
||||
|
||||
// TODO: this is probably kind of inefficient
|
||||
const dataJsonUnflattened = await Promise.all(dataListJson.map(fetchModuleData));
|
||||
|
||||
const dataJson = dataJsonUnflattened.flat();
|
||||
// the data is a map of name (original case) to declaration data.
|
||||
const data = new Map(
|
||||
dataJson.map(({ name, doc, link, docLink, sourceLink }) => [
|
||||
dataJson.map(({ name, doc, docLink, sourceLink }) => [
|
||||
name,
|
||||
{
|
||||
name,
|
||||
lowerName: name.toLowerCase(),
|
||||
lowerDoc: doc.toLowerCase(),
|
||||
link,
|
||||
docLink,
|
||||
sourceLink,
|
||||
},
|
||||
])
|
||||
);
|
||||
await cacheDeclarationData(timestamp, data);
|
||||
await cacheDeclarationData(data);
|
||||
DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
|
||||
}
|
||||
}
|
||||
|
@ -137,7 +145,6 @@ function getMatches(declarations, pattern, maxResults = 30) {
|
|||
name,
|
||||
lowerName,
|
||||
lowerDoc,
|
||||
link,
|
||||
docLink,
|
||||
sourceLink,
|
||||
} of declarations.values()) {
|
||||
|
@ -156,7 +163,6 @@ function getMatches(declarations, pattern, maxResults = 30) {
|
|||
err,
|
||||
lowerName,
|
||||
lowerDoc,
|
||||
link,
|
||||
docLink,
|
||||
sourceLink,
|
||||
});
|
||||
|
@ -195,17 +201,16 @@ async function getDeclarationDatabase() {
|
|||
|
||||
/**
|
||||
* Store data in indexedDB object store.
|
||||
* @param {string} timestamp
|
||||
* @param {Map<string, any>} data
|
||||
*/
|
||||
async function cacheDeclarationData(timestamp, data) {
|
||||
async function cacheDeclarationData(data) {
|
||||
let db = await getDeclarationDatabase();
|
||||
let store = db
|
||||
.transaction("declaration", "readwrite")
|
||||
.objectStore("declaration");
|
||||
return new Promise((resolve, reject) => {
|
||||
let clearRequest = store.clear();
|
||||
let addRequest = store.add(data, timestamp);
|
||||
let addRequest = store.add(data, CACHE_DB_KEY);
|
||||
|
||||
addRequest.onsuccess = function (event) {
|
||||
resolve();
|
||||
|
@ -221,16 +226,15 @@ async function cacheDeclarationData(timestamp, data) {
|
|||
|
||||
/**
|
||||
* Retrieve data from indexedDB database.
|
||||
* @param {string} timestamp
|
||||
* @returns {Promise<Map<string, any>|undefined>}
|
||||
*/
|
||||
async function fetchCachedDeclarationData(timestamp) {
|
||||
async function fetchCachedDeclarationData() {
|
||||
let db = await getDeclarationDatabase();
|
||||
let store = db
|
||||
.transaction("declaration", "readonly")
|
||||
.objectStore("declaration");
|
||||
return new Promise((resolve, reject) => {
|
||||
let transactionRequest = store.get(timestamp);
|
||||
let transactionRequest = store.get(CACHE_DB_KEY);
|
||||
transactionRequest.onsuccess = function (event) {
|
||||
resolve(event.result);
|
||||
};
|
||||
|
|
Loading…
Reference in New Issue