feat: Preparations to split doc-gen build process
parent
0f9b80abb2
commit
25b1ddb66b
|
@ -208,14 +208,15 @@ open Lean
|
||||||
open LeanInk.Annotation.Alectryon
|
open LeanInk.Annotation.Alectryon
|
||||||
open scoped DocGen4.Jsx
|
open scoped DocGen4.Jsx
|
||||||
|
|
||||||
def moduleToHtml (module : Process.Module) (inkPath : System.FilePath) (sourceFilePath : System.FilePath) : HtmlT IO Html := withReader (setCurrentName module.name) do
|
def moduleToHtml (module : Process.Module) (inkPath : System.FilePath) (sourceFilePath : System.FilePath) : HtmlT IO Html := withTheReader SiteBaseContext (setCurrentName module.name) do
|
||||||
let json ← runInk inkPath sourceFilePath
|
let json ← runInk inkPath sourceFilePath
|
||||||
let fragments := fromJson? json
|
let fragments := fromJson? json
|
||||||
match fragments with
|
match fragments with
|
||||||
| .ok fragments =>
|
| .ok fragments =>
|
||||||
let render := StateT.run (LeanInk.Annotation.Alectryon.renderFragments fragments) { counter := 0 }
|
let render := StateT.run (LeanInk.Annotation.Alectryon.renderFragments fragments) { counter := 0 }
|
||||||
let ctx ← read
|
let ctx ← read
|
||||||
let (html, _) := ReaderT.run render ctx
|
let baseCtx ← readThe SiteBaseContext
|
||||||
|
let (html, _) := render |>.run ctx baseCtx
|
||||||
pure html
|
pure html
|
||||||
| .error err => throw $ IO.userError s!"Error while parsing LeanInk Output: {err}"
|
| .error err => throw $ IO.userError s!"Error while parsing LeanInk Output: {err}"
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ def lakeSetup (imports : List String) : IO (Except UInt32 (Lake.Workspace × Str
|
||||||
Load a list of modules from the current Lean search path into an `Environment`
|
Load a list of modules from the current Lean search path into an `Environment`
|
||||||
to process for documentation.
|
to process for documentation.
|
||||||
-/
|
-/
|
||||||
def load (imports : List Name) : IO Process.AnalyzerResult := do
|
def load (imports : List Name) (transitiveModules : Bool) : IO (Process.AnalyzerResult × Hierarchy) := do
|
||||||
let env ← importModules (List.map (Import.mk · false) imports) Options.empty
|
let env ← importModules (List.map (Import.mk · false) imports) Options.empty
|
||||||
IO.println "Processing modules"
|
IO.println "Processing modules"
|
||||||
let config := {
|
let config := {
|
||||||
|
@ -49,6 +49,6 @@ def load (imports : List Name) : IO Process.AnalyzerResult := do
|
||||||
fileName := default,
|
fileName := default,
|
||||||
fileMap := default,
|
fileMap := default,
|
||||||
}
|
}
|
||||||
Prod.fst <$> Meta.MetaM.toIO Process.process config { env := env} {} {}
|
Prod.fst <$> Meta.MetaM.toIO (Process.process imports transitiveModules) config { env := env } {} {}
|
||||||
|
|
||||||
end DocGen4
|
end DocGen4
|
||||||
|
|
|
@ -11,86 +11,91 @@ import DocGen4.Output.Index
|
||||||
import DocGen4.Output.Module
|
import DocGen4.Output.Module
|
||||||
import DocGen4.Output.NotFound
|
import DocGen4.Output.NotFound
|
||||||
import DocGen4.Output.Find
|
import DocGen4.Output.Find
|
||||||
import DocGen4.Output.Semantic
|
|
||||||
import DocGen4.Output.SourceLinker
|
import DocGen4.Output.SourceLinker
|
||||||
import DocGen4.LeanInk.Output
|
import DocGen4.LeanInk.Output
|
||||||
|
import Std.Data.HashMap
|
||||||
|
|
||||||
namespace DocGen4
|
namespace DocGen4
|
||||||
|
|
||||||
open Lean IO System Output Process
|
open Lean IO System Output Process Std
|
||||||
|
|
||||||
/--
|
def htmlOutputSetup (config : SiteBaseContext) : IO Unit := do
|
||||||
The main entrypoint for outputting the documentation HTML based on an
|
|
||||||
`AnalyzerResult`.
|
|
||||||
-/
|
|
||||||
def htmlOutput (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String) (inkPath : Option System.FilePath) : IO Unit := do
|
|
||||||
let config := {
|
|
||||||
depthToRoot := 0,
|
|
||||||
result := result,
|
|
||||||
currentName := none,
|
|
||||||
sourceLinker := ←sourceLinker ws leanHash
|
|
||||||
leanInkEnabled := inkPath.isSome
|
|
||||||
}
|
|
||||||
let basePath := FilePath.mk "." / "build" / "doc"
|
let basePath := FilePath.mk "." / "build" / "doc"
|
||||||
let srcBasePath := basePath / "src"
|
let srcBasePath := basePath / "src"
|
||||||
let indexHtml := ReaderT.run index config
|
let declarationDataPath := basePath / "declaration-data.bmp"
|
||||||
let findHtml := ReaderT.run find { config with depthToRoot := 1 }
|
|
||||||
let notFoundHtml := ReaderT.run notFound config
|
|
||||||
-- Rendering the entire lean compiler takes time....
|
|
||||||
--let sourceSearchPath := ((←Lean.findSysroot) / "src" / "lean") :: ws.root.srcDir :: ws.leanSrcPath
|
|
||||||
let sourceSearchPath := ws.root.srcDir :: ws.leanSrcPath
|
|
||||||
|
|
||||||
|
-- Base structure
|
||||||
FS.createDirAll basePath
|
FS.createDirAll basePath
|
||||||
FS.createDirAll (basePath / "find")
|
FS.createDirAll (basePath / "find")
|
||||||
FS.createDirAll (basePath / "semantic")
|
|
||||||
FS.createDirAll srcBasePath
|
FS.createDirAll srcBasePath
|
||||||
|
|
||||||
let mut declList := #[]
|
-- The three HTML files we always need
|
||||||
for (_, mod) in result.moduleInfo.toArray do
|
let indexHtml := ReaderT.run index config
|
||||||
for decl in filterMapDocInfo mod.members do
|
|
||||||
let name := decl.getName.toString
|
|
||||||
let config := { config with depthToRoot := 0 }
|
|
||||||
let doc := decl.getDocString.getD ""
|
|
||||||
let root := Id.run <| ReaderT.run (getRoot) config
|
|
||||||
let link := root ++ s!"../semantic/{decl.getName.hash}.xml#"
|
|
||||||
let docLink := Id.run <| ReaderT.run (declNameToLink decl.getName) config
|
|
||||||
let sourceLink := Id.run <| ReaderT.run (getSourceUrl mod.name decl.getDeclarationRange) config
|
|
||||||
let obj := Json.mkObj [("name", name), ("doc", doc), ("link", link), ("docLink", docLink), ("sourceLink", sourceLink)]
|
|
||||||
declList := declList.push obj
|
|
||||||
let xml := toString <| Id.run <| ReaderT.run (semanticXml decl) config
|
|
||||||
FS.writeFile (basePath / "semantic" / s!"{decl.getName.hash}.xml") xml
|
|
||||||
let json := Json.arr declList
|
|
||||||
|
|
||||||
FS.writeFile (basePath / "semantic" / "docgen4.xml") <| toString <| Id.run <| ReaderT.run schemaXml config
|
|
||||||
|
|
||||||
FS.writeFile (basePath / "index.html") indexHtml.toString
|
FS.writeFile (basePath / "index.html") indexHtml.toString
|
||||||
|
|
||||||
|
let notFoundHtml := ReaderT.run notFound config
|
||||||
FS.writeFile (basePath / "404.html") notFoundHtml.toString
|
FS.writeFile (basePath / "404.html") notFoundHtml.toString
|
||||||
|
|
||||||
|
let findHtml := ReaderT.run find { config with depthToRoot := 1 }
|
||||||
FS.writeFile (basePath / "find" / "index.html") findHtml.toString
|
FS.writeFile (basePath / "find" / "index.html") findHtml.toString
|
||||||
|
|
||||||
|
-- The root JSON for find
|
||||||
|
let topLevelModules := config.hierarchy.getChildren.toArray.map (Json.str ∘ toString ∘ Prod.fst)
|
||||||
|
FS.writeFile declarationDataPath (Json.arr topLevelModules).compress
|
||||||
|
|
||||||
|
-- All the static stuff
|
||||||
FS.writeFile (basePath / "style.css") styleCss
|
FS.writeFile (basePath / "style.css") styleCss
|
||||||
|
|
||||||
let declarationDataPath := basePath / "declaration-data.bmp"
|
|
||||||
FS.writeFile declarationDataPath json.compress
|
|
||||||
FS.writeFile (basePath / "declaration-data.timestamp") <| toString (←declarationDataPath.metadata).modified.sec
|
|
||||||
|
|
||||||
FS.writeFile (basePath / "declaration-data.js") declarationDataCenterJs
|
FS.writeFile (basePath / "declaration-data.js") declarationDataCenterJs
|
||||||
FS.writeFile (basePath / "nav.js") navJs
|
FS.writeFile (basePath / "nav.js") navJs
|
||||||
FS.writeFile (basePath / "find" / "find.js") findJs
|
FS.writeFile (basePath / "find" / "find.js") findJs
|
||||||
FS.writeFile (basePath / "how-about.js") howAboutJs
|
FS.writeFile (basePath / "how-about.js") howAboutJs
|
||||||
FS.writeFile (basePath / "search.js") searchJs
|
FS.writeFile (basePath / "search.js") searchJs
|
||||||
FS.writeFile (basePath / "mathjax-config.js") mathjaxConfigJs
|
FS.writeFile (basePath / "mathjax-config.js") mathjaxConfigJs
|
||||||
|
|
||||||
|
-- All the static stuff for LeanInk
|
||||||
FS.writeFile (srcBasePath / "alectryon.css") alectryonCss
|
FS.writeFile (srcBasePath / "alectryon.css") alectryonCss
|
||||||
FS.writeFile (srcBasePath / "alectryon.js") alectryonJs
|
FS.writeFile (srcBasePath / "alectryon.js") alectryonJs
|
||||||
FS.writeFile (srcBasePath / "docutils_basic.css") docUtilsCss
|
FS.writeFile (srcBasePath / "docutils_basic.css") docUtilsCss
|
||||||
FS.writeFile (srcBasePath / "pygments.css") pygmentsCss
|
FS.writeFile (srcBasePath / "pygments.css") pygmentsCss
|
||||||
|
|
||||||
|
|
||||||
|
def htmlOutputResults (baseConfig : SiteBaseContext) (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String) (inkPath : Option System.FilePath) : IO Unit := do
|
||||||
|
let config : SiteContext := {
|
||||||
|
result := result,
|
||||||
|
sourceLinker := ←sourceLinker ws leanHash
|
||||||
|
leanInkEnabled := inkPath.isSome
|
||||||
|
}
|
||||||
|
let basePath := FilePath.mk "." / "build" / "doc"
|
||||||
|
let srcBasePath := basePath / "src"
|
||||||
|
-- Rendering the entire lean compiler takes time....
|
||||||
|
--let sourceSearchPath := ((←Lean.findSysroot) / "src" / "lean") :: ws.root.srcDir :: ws.leanSrcPath
|
||||||
|
let sourceSearchPath := ws.root.srcDir :: ws.leanSrcPath
|
||||||
|
|
||||||
|
let mut declMap := HashMap.empty
|
||||||
|
for (_, mod) in result.moduleInfo.toArray do
|
||||||
|
let topLevelMod := mod.name.getRoot
|
||||||
|
let mut jsonDecls := #[]
|
||||||
|
for decl in filterMapDocInfo mod.members do
|
||||||
|
let name := decl.getName.toString
|
||||||
|
let doc := decl.getDocString.getD ""
|
||||||
|
let docLink := declNameToLink decl.getName |>.run config baseConfig
|
||||||
|
IO.println s!"DOC: {docLink}"
|
||||||
|
let sourceLink := getSourceUrl mod.name decl.getDeclarationRange |>.run config baseConfig
|
||||||
|
let json := Json.mkObj [("name", name), ("doc", doc), ("docLink", docLink), ("sourceLink", sourceLink)]
|
||||||
|
jsonDecls := jsonDecls.push json
|
||||||
|
let currentModDecls := declMap.findD topLevelMod #[]
|
||||||
|
declMap := declMap.insert topLevelMod (currentModDecls ++ jsonDecls)
|
||||||
|
|
||||||
|
for (topLevelMod, decls) in declMap.toList do
|
||||||
|
FS.writeFile (basePath / s!"declaration-data-{topLevelMod}.bmp") (Json.arr decls).compress
|
||||||
|
|
||||||
for (modName, module) in result.moduleInfo.toArray do
|
for (modName, module) in result.moduleInfo.toArray do
|
||||||
let fileDir := moduleNameToDirectory basePath modName
|
let fileDir := moduleNameToDirectory basePath modName
|
||||||
let filePath := moduleNameToFile basePath modName
|
let filePath := moduleNameToFile basePath modName
|
||||||
-- path: 'basePath/module/components/till/last.html'
|
-- path: 'basePath/module/components/till/last.html'
|
||||||
-- The last component is the file name, so we drop it from the depth to root.
|
-- The last component is the file name, so we drop it from the depth to root.
|
||||||
let config := { config with depthToRoot := modName.components.dropLast.length }
|
let baseConfig := { baseConfig with depthToRoot := modName.components.dropLast.length }
|
||||||
let moduleHtml := ReaderT.run (moduleToHtml module) config
|
let moduleHtml := moduleToHtml module |>.run config baseConfig
|
||||||
FS.createDirAll $ fileDir
|
FS.createDirAll $ fileDir
|
||||||
FS.writeFile filePath moduleHtml.toString
|
FS.writeFile filePath moduleHtml.toString
|
||||||
if let some inkPath := inkPath then
|
if let some inkPath := inkPath then
|
||||||
|
@ -98,12 +103,28 @@ def htmlOutput (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String
|
||||||
IO.println s!"Inking: {modName.toString}"
|
IO.println s!"Inking: {modName.toString}"
|
||||||
-- path: 'basePath/src/module/components/till/last.html'
|
-- path: 'basePath/src/module/components/till/last.html'
|
||||||
-- The last component is the file name, however we are in src/ here so dont drop it this time
|
-- The last component is the file name, however we are in src/ here so dont drop it this time
|
||||||
let config := { config with depthToRoot := modName.components.length }
|
let baseConfig := { baseConfig with depthToRoot := modName.components.length }
|
||||||
let srcHtml ← ReaderT.run (LeanInk.moduleToHtml module inkPath inputPath) config
|
let srcHtml ← LeanInk.moduleToHtml module inkPath inputPath |>.run config baseConfig
|
||||||
let srcDir := moduleNameToDirectory srcBasePath modName
|
let srcDir := moduleNameToDirectory srcBasePath modName
|
||||||
let srcPath := moduleNameToFile srcBasePath modName
|
let srcPath := moduleNameToFile srcBasePath modName
|
||||||
FS.createDirAll srcDir
|
FS.createDirAll srcDir
|
||||||
FS.writeFile srcPath srcHtml.toString
|
FS.writeFile srcPath srcHtml.toString
|
||||||
|
|
||||||
|
def getSimpleBaseContext (hierarchy : Hierarchy) : SiteBaseContext :=
|
||||||
|
{
|
||||||
|
depthToRoot := 0,
|
||||||
|
currentName := none,
|
||||||
|
hierarchy
|
||||||
|
}
|
||||||
|
|
||||||
|
/--
|
||||||
|
The main entrypoint for outputting the documentation HTML based on an
|
||||||
|
`AnalyzerResult`.
|
||||||
|
-/
|
||||||
|
def htmlOutput (result : AnalyzerResult) (hierarchy : Hierarchy) (ws : Lake.Workspace) (leanHash: String) (inkPath : Option System.FilePath) : IO Unit := do
|
||||||
|
let baseConfig := getSimpleBaseContext hierarchy
|
||||||
|
htmlOutputSetup baseConfig
|
||||||
|
htmlOutputResults baseConfig result ws leanHash inkPath
|
||||||
|
|
||||||
end DocGen4
|
end DocGen4
|
||||||
|
|
||||||
|
|
|
@ -13,13 +13,14 @@ open scoped DocGen4.Jsx
|
||||||
open Lean System Widget Elab Process
|
open Lean System Widget Elab Process
|
||||||
|
|
||||||
/--
|
/--
|
||||||
The context used in the `HtmlM` monad for HTML templating.
|
The context used in the `BaseHtmlM` monad for HTML templating.
|
||||||
-/
|
-/
|
||||||
structure SiteContext where
|
structure SiteBaseContext where
|
||||||
|
|
||||||
/--
|
/--
|
||||||
The full analysis result from the Process module.
|
The module hierarchy as a tree structure.
|
||||||
-/
|
-/
|
||||||
result : AnalyzerResult
|
hierarchy : Hierarchy
|
||||||
/--
|
/--
|
||||||
How far away we are from the page root, used for relative links to the root.
|
How far away we are from the page root, used for relative links to the root.
|
||||||
-/
|
-/
|
||||||
|
@ -29,6 +30,15 @@ structure SiteContext where
|
||||||
pages that don't have a module name.
|
pages that don't have a module name.
|
||||||
-/
|
-/
|
||||||
currentName : Option Name
|
currentName : Option Name
|
||||||
|
|
||||||
|
/--
|
||||||
|
The context used in the `HtmlM` monad for HTML templating.
|
||||||
|
-/
|
||||||
|
structure SiteContext where
|
||||||
|
/--
|
||||||
|
The full analysis result from the Process module.
|
||||||
|
-/
|
||||||
|
result : AnalyzerResult
|
||||||
/--
|
/--
|
||||||
A function to link declaration names to their source URLs, usually Github ones.
|
A function to link declaration names to their source URLs, usually Github ones.
|
||||||
-/
|
-/
|
||||||
|
@ -38,50 +48,63 @@ structure SiteContext where
|
||||||
-/
|
-/
|
||||||
leanInkEnabled : Bool
|
leanInkEnabled : Bool
|
||||||
|
|
||||||
def setCurrentName (name : Name) (ctx : SiteContext) := {ctx with currentName := some name}
|
def setCurrentName (name : Name) (ctx : SiteBaseContext) := {ctx with currentName := some name}
|
||||||
|
|
||||||
abbrev HtmlT := ReaderT SiteContext
|
abbrev BaseHtmlT := ReaderT SiteBaseContext
|
||||||
|
abbrev BaseHtmlM := BaseHtmlT Id
|
||||||
|
|
||||||
|
abbrev HtmlT (m) := ReaderT SiteContext (BaseHtmlT m)
|
||||||
abbrev HtmlM := HtmlT Id
|
abbrev HtmlM := HtmlT Id
|
||||||
|
|
||||||
|
def HtmlT.run (x : HtmlT m α) (ctx : SiteContext) (baseCtx : SiteBaseContext) : m α :=
|
||||||
|
ReaderT.run x ctx |>.run baseCtx
|
||||||
|
|
||||||
|
def HtmlM.run (x : HtmlM α) (ctx : SiteContext) (baseCtx : SiteBaseContext) : α :=
|
||||||
|
ReaderT.run x ctx |>.run baseCtx |>.run
|
||||||
|
|
||||||
/--
|
/--
|
||||||
Obtains the root URL as a relative one to the current depth.
|
Obtains the root URL as a relative one to the current depth.
|
||||||
-/
|
-/
|
||||||
def getRoot : HtmlM String := do
|
def getRoot : BaseHtmlM String := do
|
||||||
let rec go: Nat -> String
|
let rec go: Nat -> String
|
||||||
| 0 => "./"
|
| 0 => "./"
|
||||||
| Nat.succ n' => "../" ++ go n'
|
| Nat.succ n' => "../" ++ go n'
|
||||||
let d <- SiteContext.depthToRoot <$> read
|
let d <- SiteBaseContext.depthToRoot <$> read
|
||||||
return (go d)
|
return (go d)
|
||||||
|
|
||||||
|
def getHierarchy : BaseHtmlM Hierarchy := do pure (←read).hierarchy
|
||||||
|
def getCurrentName : BaseHtmlM (Option Name) := do pure (←read).currentName
|
||||||
def getResult : HtmlM AnalyzerResult := do pure (←read).result
|
def getResult : HtmlM AnalyzerResult := do pure (←read).result
|
||||||
def getCurrentName : HtmlM (Option Name) := do pure (←read).currentName
|
|
||||||
def getSourceUrl (module : Name) (range : Option DeclarationRange): HtmlM String := do pure $ (←read).sourceLinker module range
|
def getSourceUrl (module : Name) (range : Option DeclarationRange): HtmlM String := do pure $ (←read).sourceLinker module range
|
||||||
def leanInkEnabled? : HtmlM Bool := do pure (←read).leanInkEnabled
|
def leanInkEnabled? : HtmlM Bool := do pure (←read).leanInkEnabled
|
||||||
|
|
||||||
/--
|
/--
|
||||||
If a template is meant to be extended because it for example only provides the
|
If a template is meant to be extended because it for example only provides the
|
||||||
header but no real content this is the way to fill the template with content.
|
header but no real content this is the way to fill the template with content.
|
||||||
|
This is untyped so HtmlM and BaseHtmlM can be mixed.
|
||||||
-/
|
-/
|
||||||
def templateExtends {α β : Type} (base : α → HtmlM β) (new : HtmlM α) : HtmlM β :=
|
def templateExtends {α β} {m} [Bind m] (base : α → m β) (new : m α) : m β :=
|
||||||
new >>= base
|
new >>= base
|
||||||
|
|
||||||
|
def templateLiftExtends {α β} {m n} [Bind m] [MonadLift n m] (base : α → n β) (new : m α) : m β :=
|
||||||
|
new >>= (monadLift ∘ base)
|
||||||
/--
|
/--
|
||||||
Returns the doc-gen4 link to a module name.
|
Returns the doc-gen4 link to a module name.
|
||||||
-/
|
-/
|
||||||
def moduleNameToLink (n : Name) : HtmlM String := do
|
def moduleNameToLink (n : Name) : BaseHtmlM String := do
|
||||||
let parts := n.components.map Name.toString
|
let parts := n.components.map Name.toString
|
||||||
pure $ (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
pure $ (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
||||||
|
|
||||||
/--
|
/--
|
||||||
Returns the HTML doc-gen4 link to a module name.
|
Returns the HTML doc-gen4 link to a module name.
|
||||||
-/
|
-/
|
||||||
def moduleToHtmlLink (module : Name) : HtmlM Html := do
|
def moduleToHtmlLink (module : Name) : BaseHtmlM Html := do
|
||||||
pure <a href={←moduleNameToLink module}>{module.toString}</a>
|
pure <a href={←moduleNameToLink module}>{module.toString}</a>
|
||||||
|
|
||||||
/--
|
/--
|
||||||
Returns the LeanInk link to a module name.
|
Returns the LeanInk link to a module name.
|
||||||
-/
|
-/
|
||||||
def moduleNameToInkLink (n : Name) : HtmlM String := do
|
def moduleNameToInkLink (n : Name) : BaseHtmlM String := do
|
||||||
let parts := "src" :: n.components.map Name.toString
|
let parts := "src" :: n.components.map Name.toString
|
||||||
pure $ (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
pure $ (← getRoot) ++ (parts.intersperse "/").foldl (· ++ ·) "" ++ ".html"
|
||||||
|
|
||||||
|
|
|
@ -6,13 +6,13 @@ namespace Output
|
||||||
open scoped DocGen4.Jsx
|
open scoped DocGen4.Jsx
|
||||||
open Lean
|
open Lean
|
||||||
|
|
||||||
def find : HtmlM Html := do
|
def find : BaseHtmlM Html := do
|
||||||
pure
|
pure
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<link rel="preload" href={s!"{←getRoot}declaration-data.bmp"}/>
|
<link rel="preload" href={s!"{←getRoot}declaration-data.bmp"}/>
|
||||||
<script>{s!"const SITE_ROOT={String.quote (←getRoot)};"}</script>
|
<script>{s!"const SITE_ROOT={String.quote (←getRoot)};"}</script>
|
||||||
<script type="module" async="true" src={s!"./find.js"}></script>
|
<script type="module" async="true" src="./find.js"></script>
|
||||||
</head>
|
</head>
|
||||||
<body></body>
|
<body></body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -11,7 +11,7 @@ namespace Output
|
||||||
|
|
||||||
open scoped DocGen4.Jsx
|
open scoped DocGen4.Jsx
|
||||||
|
|
||||||
def index : HtmlM Html := do templateExtends (baseHtml "Index") $ pure $
|
def index : BaseHtmlM Html := do templateExtends (baseHtml "Index") $ pure $
|
||||||
<main>
|
<main>
|
||||||
<a id="top"></a>
|
<a id="top"></a>
|
||||||
<h1> Welcome to the documentation page </h1>
|
<h1> Welcome to the documentation page </h1>
|
||||||
|
|
|
@ -233,10 +233,10 @@ def internalNav (members : Array Name) (moduleName : Name) : HtmlM Html := do
|
||||||
/--
|
/--
|
||||||
The main entry point to rendering the HTML for an entire module.
|
The main entry point to rendering the HTML for an entire module.
|
||||||
-/
|
-/
|
||||||
def moduleToHtml (module : Process.Module) : HtmlM Html := withReader (setCurrentName module.name) do
|
def moduleToHtml (module : Process.Module) : HtmlM Html := withTheReader SiteBaseContext (setCurrentName module.name) do
|
||||||
let memberDocs ← module.members.mapM (λ i => moduleMemberToHtml module.name i)
|
let memberDocs ← module.members.mapM (λ i => moduleMemberToHtml module.name i)
|
||||||
let memberNames := filterMapDocInfo module.members |>.map DocInfo.getName
|
let memberNames := filterMapDocInfo module.members |>.map DocInfo.getName
|
||||||
templateExtends (baseHtmlGenerator module.name.toString) $ pure #[
|
templateLiftExtends (baseHtmlGenerator module.name.toString) $ pure #[
|
||||||
←internalNav memberNames module.name,
|
←internalNav memberNames module.name,
|
||||||
Html.element "main" false #[] memberDocs
|
Html.element "main" false #[] memberDocs
|
||||||
]
|
]
|
||||||
|
|
|
@ -13,7 +13,7 @@ namespace Output
|
||||||
open Lean
|
open Lean
|
||||||
open scoped DocGen4.Jsx
|
open scoped DocGen4.Jsx
|
||||||
|
|
||||||
def moduleListFile (file : Name) : HtmlM Html := do
|
def moduleListFile (file : Name) : BaseHtmlM Html := do
|
||||||
pure <div class={if (← getCurrentName) == file then "nav_link visible" else "nav_link"}>
|
pure <div class={if (← getCurrentName) == file then "nav_link visible" else "nav_link"}>
|
||||||
{←moduleToHtmlLink file}
|
{←moduleToHtmlLink file}
|
||||||
</div>
|
</div>
|
||||||
|
@ -21,7 +21,7 @@ def moduleListFile (file : Name) : HtmlM Html := do
|
||||||
/--
|
/--
|
||||||
Build the HTML tree representing the module hierarchy.
|
Build the HTML tree representing the module hierarchy.
|
||||||
-/
|
-/
|
||||||
partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
|
partial def moduleListDir (h : Hierarchy) : BaseHtmlM Html := do
|
||||||
let children := Array.mk (h.getChildren.toList.map Prod.snd)
|
let children := Array.mk (h.getChildren.toList.map Prod.snd)
|
||||||
let dirs := children.filter (λ c => c.getChildren.toList.length != 0)
|
let dirs := children.filter (λ c => c.getChildren.toList.length != 0)
|
||||||
let files := children.filter (λ c => Hierarchy.isFile c ∧ c.getChildren.toList.length = 0)
|
let files := children.filter (λ c => Hierarchy.isFile c ∧ c.getChildren.toList.length = 0)
|
||||||
|
@ -30,7 +30,7 @@ partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
|
||||||
let fileNodes ← (files.mapM moduleListFile)
|
let fileNodes ← (files.mapM moduleListFile)
|
||||||
let moduleLink ← moduleNameToLink h.getName
|
let moduleLink ← moduleNameToLink h.getName
|
||||||
let summary :=
|
let summary :=
|
||||||
if (←getResult).moduleInfo.contains h.getName then
|
if h.isFile then
|
||||||
<summary>{←moduleToHtmlLink h.getName}</summary>
|
<summary>{←moduleToHtmlLink h.getName}</summary>
|
||||||
else
|
else
|
||||||
<summary>{h.getName.toString}</summary>
|
<summary>{h.getName.toString}</summary>
|
||||||
|
@ -45,17 +45,17 @@ partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
|
||||||
/--
|
/--
|
||||||
Return a list of top level modules, linkified and rendered as HTML
|
Return a list of top level modules, linkified and rendered as HTML
|
||||||
-/
|
-/
|
||||||
def moduleList : HtmlM Html := do
|
def moduleList : BaseHtmlM Html := do
|
||||||
let hierarchy := (←getResult).hierarchy
|
let hierarchy ← getHierarchy
|
||||||
let mut list := Array.empty
|
let mut list := Array.empty
|
||||||
for (n, cs) in hierarchy.getChildren do
|
for (_, cs) in hierarchy.getChildren do
|
||||||
list := list.push $ ←moduleListDir cs
|
list := list.push $ ←moduleListDir cs
|
||||||
pure <div class="module_list">[list]</div>
|
pure <div class="module_list">[list]</div>
|
||||||
|
|
||||||
/--
|
/--
|
||||||
The main entry point to rendering the navbar on the left hand side.
|
The main entry point to rendering the navbar on the left hand side.
|
||||||
-/
|
-/
|
||||||
def navbar : HtmlM Html := do
|
def navbar : BaseHtmlM Html := do
|
||||||
pure
|
pure
|
||||||
<nav class="nav">
|
<nav class="nav">
|
||||||
<h3>General documentation</h3>
|
<h3>General documentation</h3>
|
||||||
|
|
|
@ -14,7 +14,7 @@ open scoped DocGen4.Jsx
|
||||||
/--
|
/--
|
||||||
Render the 404 page.
|
Render the 404 page.
|
||||||
-/
|
-/
|
||||||
def notFound : HtmlM Html := do templateExtends (baseHtml "404") $ pure $
|
def notFound : BaseHtmlM Html := do templateExtends (baseHtml "404") $ pure $
|
||||||
<main>
|
<main>
|
||||||
<h1>404 Not Found</h1>
|
<h1>404 Not Found</h1>
|
||||||
<p> Unfortunately, the page you were looking for is no longer here. </p>
|
<p> Unfortunately, the page you were looking for is no longer here. </p>
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
import DocGen4.Output.Template
|
|
||||||
import DocGen4.Output.DocString
|
|
||||||
import DocGen4.Process
|
|
||||||
import Lean.Data.Xml
|
|
||||||
|
|
||||||
open Lean Xml DocGen4.Process
|
|
||||||
|
|
||||||
namespace DocGen4
|
|
||||||
namespace Output
|
|
||||||
|
|
||||||
instance : ToString $ Array Element where
|
|
||||||
toString xs := xs.map toString |>.foldl String.append ""
|
|
||||||
|
|
||||||
instance : Coe Element Content where
|
|
||||||
coe e := Content.Element e
|
|
||||||
|
|
||||||
-- TODO: syntax metaprogramming and basic semantic data
|
|
||||||
|
|
||||||
def semanticXml (i : DocInfo) : HtmlM $ Array Element := do
|
|
||||||
pure #[
|
|
||||||
Element.Element
|
|
||||||
"rdf:RDF"
|
|
||||||
(Std.RBMap.fromList [
|
|
||||||
("xmlns:rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
|
|
||||||
("xmlns:docgen4", s!"{←getRoot}semactic/docgen4.xml#")
|
|
||||||
] _)
|
|
||||||
#[
|
|
||||||
Element.Element
|
|
||||||
"rdf:Description"
|
|
||||||
(Std.RBMap.fromList [
|
|
||||||
("rdf:about", s!"{←getRoot}semactic/{i.getName.hash}.xml#")
|
|
||||||
] _)
|
|
||||||
#[]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
def schemaXml : HtmlM $ Array Element := do
|
|
||||||
pure #[
|
|
||||||
Element.Element
|
|
||||||
"rdf:RDF"
|
|
||||||
(Std.RBMap.fromList [
|
|
||||||
("xmlns:rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
|
|
||||||
("xmlns:docgen4", s!"{←getRoot}semactic/docgen4.xml#")
|
|
||||||
] _)
|
|
||||||
#[
|
|
||||||
Element.Element
|
|
||||||
"docgen4:hasInstance"
|
|
||||||
Std.RBMap.empty
|
|
||||||
#[
|
|
||||||
Element.Element
|
|
||||||
"rdfs:type"
|
|
||||||
Std.RBMap.empty
|
|
||||||
#[Content.Character "rdf:Property"]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
end Output
|
|
||||||
end DocGen4
|
|
|
@ -14,7 +14,7 @@ open scoped DocGen4.Jsx
|
||||||
/--
|
/--
|
||||||
The HTML template used for all pages.
|
The HTML template used for all pages.
|
||||||
-/
|
-/
|
||||||
def baseHtmlGenerator (title : String) (site : Array Html) : HtmlM Html := do
|
def baseHtmlGenerator (title : String) (site : Array Html) : BaseHtmlM Html := do
|
||||||
pure
|
pure
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
|
@ -66,7 +66,7 @@ def baseHtmlGenerator (title : String) (site : Array Html) : HtmlM Html := do
|
||||||
/--
|
/--
|
||||||
A comfortability wrapper around `baseHtmlGenerator`.
|
A comfortability wrapper around `baseHtmlGenerator`.
|
||||||
-/
|
-/
|
||||||
def baseHtml (title : String) (site : Html) : HtmlM Html := baseHtmlGenerator title #[site]
|
def baseHtml (title : String) (site : Html) : BaseHtmlM Html := baseHtmlGenerator title #[site]
|
||||||
|
|
||||||
end Output
|
end Output
|
||||||
end DocGen4
|
end DocGen4
|
||||||
|
|
|
@ -6,14 +6,20 @@ Authors: Henrik Böving
|
||||||
|
|
||||||
import Lean
|
import Lean
|
||||||
import Std.Data.HashMap
|
import Std.Data.HashMap
|
||||||
|
import Std.Data.HashSet
|
||||||
|
|
||||||
import DocGen4.Process.Base
|
import DocGen4.Process.Base
|
||||||
import DocGen4.Process.Hierarchy
|
import DocGen4.Process.Hierarchy
|
||||||
import DocGen4.Process.DocInfo
|
import DocGen4.Process.DocInfo
|
||||||
|
|
||||||
|
open Std
|
||||||
|
|
||||||
|
def HashSet.fromArray [BEq α] [Hashable α] (xs : Array α) : HashSet α :=
|
||||||
|
xs.foldr (flip .insert) .empty
|
||||||
|
|
||||||
namespace DocGen4.Process
|
namespace DocGen4.Process
|
||||||
|
|
||||||
open Lean Meta Std
|
open Lean Meta
|
||||||
|
|
||||||
/--
|
/--
|
||||||
Member of a module, either a declaration or some module doc string.
|
Member of a module, either a declaration or some module doc string.
|
||||||
|
@ -54,10 +60,6 @@ structure AnalyzerResult where
|
||||||
-/
|
-/
|
||||||
moduleInfo : HashMap Name Module
|
moduleInfo : HashMap Name Module
|
||||||
/--
|
/--
|
||||||
The module hierarchy as a tree structure.
|
|
||||||
-/
|
|
||||||
hierarchy : Hierarchy
|
|
||||||
/--
|
|
||||||
An adjacency matrix for the import relation between modules, indexed
|
An adjacency matrix for the import relation between modules, indexed
|
||||||
my the values in `name2ModIdx`.
|
my the values in `name2ModIdx`.
|
||||||
-/
|
-/
|
||||||
|
@ -86,21 +88,33 @@ def getDocString : ModuleMember → Option String
|
||||||
|
|
||||||
end ModuleMember
|
end ModuleMember
|
||||||
|
|
||||||
|
def getRelevantModules (imports : List Name) : MetaM (HashSet Name) := do
|
||||||
|
let env ← getEnv
|
||||||
|
let mut relevant := .empty
|
||||||
|
for module in env.header.moduleNames do
|
||||||
|
if module.getRoot ∈ imports then
|
||||||
|
relevant := relevant.insert module
|
||||||
|
pure relevant
|
||||||
|
|
||||||
/--
|
/--
|
||||||
Run the doc-gen analysis on all modules that are loaded into the `Environment`
|
Run the doc-gen analysis on all modules that are loaded into the `Environment`
|
||||||
of this `MetaM` run.
|
of this `MetaM` run.
|
||||||
-/
|
-/
|
||||||
def process : MetaM AnalyzerResult := do
|
def process (imports : List Name) (transitiveModules : Bool) : MetaM (AnalyzerResult × Hierarchy) := do
|
||||||
let env ← getEnv
|
let env ← getEnv
|
||||||
let mut res := mkHashMap env.header.moduleNames.size
|
let relevantNames := if transitiveModules then (HashSet.fromArray env.header.moduleNames) else (←getRelevantModules imports)
|
||||||
for module in env.header.moduleNames do
|
let mut res := mkHashMap relevantNames.size
|
||||||
let modDocs := match getModuleDoc? env module with
|
for module in relevantNames.toArray do
|
||||||
| none => #[]
|
let modDocs := getModuleDoc? env module |>.getD #[] |>.map .modDoc
|
||||||
| some ds => ds
|
|
||||||
|>.map (λ doc => ModuleMember.modDoc doc)
|
|
||||||
res := res.insert module (Module.mk module modDocs)
|
res := res.insert module (Module.mk module modDocs)
|
||||||
|
|
||||||
for cinfo in env.constants.toList do
|
for (name, cinfo) in env.constants.toList do
|
||||||
|
let some modidx := env.getModuleIdxFor? name | unreachable!
|
||||||
|
let moduleName := env.allImportedModuleNames.get! modidx
|
||||||
|
-- skip irrelevant names
|
||||||
|
if !relevantNames.contains moduleName.getRoot then
|
||||||
|
continue
|
||||||
|
|
||||||
try
|
try
|
||||||
let config := {
|
let config := {
|
||||||
maxHeartbeats := 5000000,
|
maxHeartbeats := 5000000,
|
||||||
|
@ -108,14 +122,13 @@ def process : MetaM AnalyzerResult := do
|
||||||
fileName := ←getFileName,
|
fileName := ←getFileName,
|
||||||
fileMap := ←getFileMap
|
fileMap := ←getFileMap
|
||||||
}
|
}
|
||||||
let analysis := Prod.fst <$> Meta.MetaM.toIO (DocInfo.ofConstant cinfo) config { env := env} {} {}
|
let analysis := Prod.fst <$> Meta.MetaM.toIO (DocInfo.ofConstant (name, cinfo)) config { env := env} {} {}
|
||||||
if let some dinfo ← analysis then
|
if let some dinfo ← analysis then
|
||||||
let some modidx := env.getModuleIdxFor? dinfo.getName | unreachable!
|
|
||||||
let moduleName := env.allImportedModuleNames.get! modidx
|
let moduleName := env.allImportedModuleNames.get! modidx
|
||||||
let module := res.find! moduleName
|
let module := res.find! moduleName
|
||||||
res := res.insert moduleName {module with members := module.members.push (ModuleMember.docInfo dinfo)}
|
res := res.insert moduleName {module with members := module.members.push (ModuleMember.docInfo dinfo)}
|
||||||
catch e =>
|
catch e =>
|
||||||
IO.println s!"WARNING: Failed to obtain information for: {cinfo.fst}: {←e.toMessageData.toString}"
|
IO.println s!"WARNING: Failed to obtain information for: {name}: {←e.toMessageData.toString}"
|
||||||
|
|
||||||
-- TODO: This is definitely not the most efficient way to store this data
|
-- TODO: This is definitely not the most efficient way to store this data
|
||||||
let mut adj := Array.mkArray res.size (Array.mkArray res.size false)
|
let mut adj := Array.mkArray res.size (Array.mkArray res.size false)
|
||||||
|
@ -128,13 +141,14 @@ def process : MetaM AnalyzerResult := do
|
||||||
let some importIdx := env.getModuleIdx? imp.module | unreachable!
|
let some importIdx := env.getModuleIdx? imp.module | unreachable!
|
||||||
adj := adj.set! modIdx (adj.get! modIdx |>.set! importIdx true)
|
adj := adj.set! modIdx (adj.get! modIdx |>.set! importIdx true)
|
||||||
|
|
||||||
pure {
|
let hierarchy := Hierarchy.fromArray relevantNames.toArray
|
||||||
|
let analysis := {
|
||||||
name2ModIdx := env.const2ModIdx,
|
name2ModIdx := env.const2ModIdx,
|
||||||
moduleNames := env.header.moduleNames,
|
moduleNames := env.header.moduleNames,
|
||||||
moduleInfo := res,
|
moduleInfo := res,
|
||||||
hierarchy := Hierarchy.fromArray env.header.moduleNames,
|
|
||||||
importAdj := adj
|
importAdj := adj
|
||||||
}
|
}
|
||||||
|
pure (analysis, hierarchy)
|
||||||
|
|
||||||
def filterMapDocInfo (ms : Array ModuleMember) : Array DocInfo :=
|
def filterMapDocInfo (ms : Array ModuleMember) : Array DocInfo :=
|
||||||
ms.filterMap filter
|
ms.filterMap filter
|
||||||
|
|
|
@ -27,6 +27,9 @@ namespace HierarchyMap
|
||||||
def toList : HierarchyMap → List (Name × Hierarchy)
|
def toList : HierarchyMap → List (Name × Hierarchy)
|
||||||
| t => t.revFold (fun ps k v => (k, v)::ps) []
|
| t => t.revFold (fun ps k v => (k, v)::ps) []
|
||||||
|
|
||||||
|
def toArray : HierarchyMap → Array (Name × Hierarchy)
|
||||||
|
| t => t.fold (fun ps k v => ps ++ #[(k, v)] ) #[]
|
||||||
|
|
||||||
def hForIn [Monad m] (t : HierarchyMap) (init : σ) (f : (Name × Hierarchy) → σ → m (ForInStep σ)) : m σ :=
|
def hForIn [Monad m] (t : HierarchyMap) (init : σ) (f : (Name × Hierarchy) → σ → m (ForInStep σ)) : m σ :=
|
||||||
t.forIn init (fun a b acc => f (a, b) acc)
|
t.forIn init (fun a b acc => f (a, b) acc)
|
||||||
|
|
||||||
|
@ -58,7 +61,7 @@ partial def insert! (h : Hierarchy) (n : Name) : Hierarchy := Id.run $ do
|
||||||
| none =>
|
| none =>
|
||||||
node hn h.isFile (cs.insert Name.cmp n $ empty n true)
|
node hn h.isFile (cs.insert Name.cmp n $ empty n true)
|
||||||
| some (node _ true _) => h
|
| some (node _ true _) => h
|
||||||
| some hierarchy@(node _ false ccs) =>
|
| some (node _ false ccs) =>
|
||||||
cs := cs.erase Name.cmp n
|
cs := cs.erase Name.cmp n
|
||||||
node hn h.isFile (cs.insert Name.cmp n $ node n true ccs)
|
node hn h.isFile (cs.insert Name.cmp n $ node n true ccs)
|
||||||
else
|
else
|
||||||
|
|
50
Main.lean
50
Main.lean
|
@ -4,22 +4,50 @@ import Cli
|
||||||
|
|
||||||
open DocGen4 Lean Cli
|
open DocGen4 Lean Cli
|
||||||
|
|
||||||
def runDocGenCmd (p : Parsed) : IO UInt32 := do
|
def findLeanInk? (p : Parsed) : IO (Option System.FilePath) := do
|
||||||
let modules : List String := p.variableArgsAs! String |>.toList
|
|
||||||
let res ← lakeSetup modules
|
|
||||||
match res with
|
|
||||||
| Except.ok (ws, leanHash) =>
|
|
||||||
IO.println s!"Loading modules from: {←searchPathRef.get}"
|
|
||||||
let doc ← load $ modules.map Name.mkSimple
|
|
||||||
IO.println "Outputting HTML"
|
|
||||||
match p.flag? "ink" with
|
match p.flag? "ink" with
|
||||||
| some ink =>
|
| some ink =>
|
||||||
let inkPath := System.FilePath.mk ink.value
|
let inkPath := System.FilePath.mk ink.value
|
||||||
if ←inkPath.pathExists then
|
if ←inkPath.pathExists then
|
||||||
htmlOutput doc ws leanHash inkPath
|
pure $ some inkPath
|
||||||
else
|
else
|
||||||
throw $ IO.userError "Invalid path to LeanInk binary provided"
|
throw $ IO.userError "Invalid path to LeanInk binary provided"
|
||||||
| none => htmlOutput doc ws leanHash none
|
| none => pure none
|
||||||
|
|
||||||
|
def runDocGenCmd (p : Parsed) : IO UInt32 := do
|
||||||
|
let modules : List String := p.variableArgsAs! String |>.toList
|
||||||
|
if p.hasFlag "single" && p.hasFlag "setup" then
|
||||||
|
throw $ IO.userError "Can't have single and setup at the same time"
|
||||||
|
else
|
||||||
|
let res ← lakeSetup modules
|
||||||
|
let modules := modules.map Name.mkSimple
|
||||||
|
match res with
|
||||||
|
| Except.ok (ws, leanHash) =>
|
||||||
|
IO.println s!"Loading modules from: {←searchPathRef.get}"
|
||||||
|
--if p.hasFlag "single" then
|
||||||
|
-- if modules.length ≠ 1 then
|
||||||
|
-- throw $ IO.userError "Called single with more than a single module"
|
||||||
|
-- else
|
||||||
|
-- let (doc, hierarchy) ← load modules false
|
||||||
|
-- IO.println "Outputting HTML"
|
||||||
|
-- let baseConfig := getSimpleBaseContext hierarchy
|
||||||
|
-- htmlOutputResults baseConfig doc ws leanHash (←findLeanInk? p)
|
||||||
|
-- pure 0
|
||||||
|
--else if p.hasFlag "setup" then
|
||||||
|
-- let config := {
|
||||||
|
-- fileName := default,
|
||||||
|
-- fileMap := default,
|
||||||
|
-- }
|
||||||
|
-- let env ← importModules (List.map (Import.mk · false) modules) Options.empty
|
||||||
|
-- let relevantModules ← Prod.fst <$> Meta.MetaM.toIO (Process.getRelevantModules modules) config { env := env } {}
|
||||||
|
-- let hierarchy := Hierarchy.fromArray relevantModules.toArray
|
||||||
|
-- let baseConfig := getSimpleBaseContext hierarchy
|
||||||
|
-- htmlOutputSetup baseConfig
|
||||||
|
-- pure 0
|
||||||
|
--else
|
||||||
|
let (doc, hierarchy) ← load modules true
|
||||||
|
IO.println "Outputting HTML"
|
||||||
|
htmlOutput doc hierarchy ws leanHash (←findLeanInk? p)
|
||||||
pure 0
|
pure 0
|
||||||
| Except.error rc => pure rc
|
| Except.error rc => pure rc
|
||||||
|
|
||||||
|
@ -29,6 +57,8 @@ def docGenCmd : Cmd := `[Cli|
|
||||||
|
|
||||||
FLAGS:
|
FLAGS:
|
||||||
ink : String; "Path to a LeanInk binary to use for rendering the Lean sources."
|
ink : String; "Path to a LeanInk binary to use for rendering the Lean sources."
|
||||||
|
--single; "Generate documentation only for a single module, will cause broken links if there are others"
|
||||||
|
--setup; "Only output the files that are always required"
|
||||||
|
|
||||||
ARGS:
|
ARGS:
|
||||||
...modules : String; "The modules to generate the HTML for"
|
...modules : String; "The modules to generate the HTML for"
|
||||||
|
|
|
@ -6,6 +6,17 @@
|
||||||
|
|
||||||
const CACHE_DB_NAME = "declaration-data";
|
const CACHE_DB_NAME = "declaration-data";
|
||||||
const CACHE_DB_VERSION = 1;
|
const CACHE_DB_VERSION = 1;
|
||||||
|
const CACHE_DB_KEY = "DECLARATIONS_KEY";
|
||||||
|
|
||||||
|
async function fetchModuleData(module) {
|
||||||
|
const moduleDataUrl = new URL(
|
||||||
|
`${SITE_ROOT}declaration-data-${module}.bmp`,
|
||||||
|
window.location
|
||||||
|
);
|
||||||
|
const moduleData = await fetch(moduleDataUrl);
|
||||||
|
const moduleDataJson = await moduleData.json();
|
||||||
|
return moduleDataJson;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The DeclarationDataCenter is used for declaration searching.
|
* The DeclarationDataCenter is used for declaration searching.
|
||||||
|
@ -41,42 +52,39 @@ export class DeclarationDataCenter {
|
||||||
*/
|
*/
|
||||||
static async init() {
|
static async init() {
|
||||||
if (!DeclarationDataCenter.singleton) {
|
if (!DeclarationDataCenter.singleton) {
|
||||||
const timestampUrl = new URL(
|
const dataListUrl = new URL(
|
||||||
`${SITE_ROOT}declaration-data.timestamp`,
|
|
||||||
window.location
|
|
||||||
);
|
|
||||||
const dataUrl = new URL(
|
|
||||||
`${SITE_ROOT}declaration-data.bmp`,
|
`${SITE_ROOT}declaration-data.bmp`,
|
||||||
window.location
|
window.location
|
||||||
);
|
);
|
||||||
|
|
||||||
const timestampRes = await fetch(timestampUrl);
|
|
||||||
const timestamp = await timestampRes.text();
|
|
||||||
|
|
||||||
// try to use cache first
|
// try to use cache first
|
||||||
const data = await fetchCachedDeclarationData(timestamp).catch(_e => null);
|
const data = await fetchCachedDeclarationData().catch(_e => null);
|
||||||
if (data) {
|
if (data) {
|
||||||
// if data is defined, use the cached one.
|
// if data is defined, use the cached one.
|
||||||
DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
|
DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
|
||||||
} else {
|
} else {
|
||||||
// undefined. then fetch the data from the server.
|
// undefined. then fetch the data from the server.
|
||||||
const dataRes = await fetch(dataUrl);
|
const dataListRes = await fetch(dataListUrl);
|
||||||
const dataJson = await dataRes.json();
|
const dataListJson = await dataListRes.json();
|
||||||
|
|
||||||
|
// TODO: this is probably kind of inefficient
|
||||||
|
const dataJsonUnflattened = await Promise.all(dataListJson.map(fetchModuleData));
|
||||||
|
|
||||||
|
const dataJson = dataJsonUnflattened.flat();
|
||||||
// the data is a map of name (original case) to declaration data.
|
// the data is a map of name (original case) to declaration data.
|
||||||
const data = new Map(
|
const data = new Map(
|
||||||
dataJson.map(({ name, doc, link, docLink, sourceLink }) => [
|
dataJson.map(({ name, doc, docLink, sourceLink }) => [
|
||||||
name,
|
name,
|
||||||
{
|
{
|
||||||
name,
|
name,
|
||||||
lowerName: name.toLowerCase(),
|
lowerName: name.toLowerCase(),
|
||||||
lowerDoc: doc.toLowerCase(),
|
lowerDoc: doc.toLowerCase(),
|
||||||
link,
|
|
||||||
docLink,
|
docLink,
|
||||||
sourceLink,
|
sourceLink,
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
await cacheDeclarationData(timestamp, data);
|
await cacheDeclarationData(data);
|
||||||
DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
|
DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -137,7 +145,6 @@ function getMatches(declarations, pattern, maxResults = 30) {
|
||||||
name,
|
name,
|
||||||
lowerName,
|
lowerName,
|
||||||
lowerDoc,
|
lowerDoc,
|
||||||
link,
|
|
||||||
docLink,
|
docLink,
|
||||||
sourceLink,
|
sourceLink,
|
||||||
} of declarations.values()) {
|
} of declarations.values()) {
|
||||||
|
@ -156,7 +163,6 @@ function getMatches(declarations, pattern, maxResults = 30) {
|
||||||
err,
|
err,
|
||||||
lowerName,
|
lowerName,
|
||||||
lowerDoc,
|
lowerDoc,
|
||||||
link,
|
|
||||||
docLink,
|
docLink,
|
||||||
sourceLink,
|
sourceLink,
|
||||||
});
|
});
|
||||||
|
@ -195,17 +201,16 @@ async function getDeclarationDatabase() {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store data in indexedDB object store.
|
* Store data in indexedDB object store.
|
||||||
* @param {string} timestamp
|
|
||||||
* @param {Map<string, any>} data
|
* @param {Map<string, any>} data
|
||||||
*/
|
*/
|
||||||
async function cacheDeclarationData(timestamp, data) {
|
async function cacheDeclarationData(data) {
|
||||||
let db = await getDeclarationDatabase();
|
let db = await getDeclarationDatabase();
|
||||||
let store = db
|
let store = db
|
||||||
.transaction("declaration", "readwrite")
|
.transaction("declaration", "readwrite")
|
||||||
.objectStore("declaration");
|
.objectStore("declaration");
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let clearRequest = store.clear();
|
let clearRequest = store.clear();
|
||||||
let addRequest = store.add(data, timestamp);
|
let addRequest = store.add(data, CACHE_DB_KEY);
|
||||||
|
|
||||||
addRequest.onsuccess = function (event) {
|
addRequest.onsuccess = function (event) {
|
||||||
resolve();
|
resolve();
|
||||||
|
@ -221,16 +226,15 @@ async function cacheDeclarationData(timestamp, data) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve data from indexedDB database.
|
* Retrieve data from indexedDB database.
|
||||||
* @param {string} timestamp
|
|
||||||
* @returns {Promise<Map<string, any>|undefined>}
|
* @returns {Promise<Map<string, any>|undefined>}
|
||||||
*/
|
*/
|
||||||
async function fetchCachedDeclarationData(timestamp) {
|
async function fetchCachedDeclarationData() {
|
||||||
let db = await getDeclarationDatabase();
|
let db = await getDeclarationDatabase();
|
||||||
let store = db
|
let store = db
|
||||||
.transaction("declaration", "readonly")
|
.transaction("declaration", "readonly")
|
||||||
.objectStore("declaration");
|
.objectStore("declaration");
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let transactionRequest = store.get(timestamp);
|
let transactionRequest = store.get(CACHE_DB_KEY);
|
||||||
transactionRequest.onsuccess = function (event) {
|
transactionRequest.onsuccess = function (event) {
|
||||||
resolve(event.result);
|
resolve(event.result);
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue