diff --git a/DocGen4/IncludeStr.lean b/DocGen4/IncludeStr.lean
index 511a181..afc3ceb 100644
--- a/DocGen4/IncludeStr.lean
+++ b/DocGen4/IncludeStr.lean
@@ -7,13 +7,37 @@ import Lean
namespace DocGen4
-open Lean System IO Lean.Elab.Term
+open Lean System IO Lean.Elab.Term FS
+
+deriving instance DecidableEq for FileType
+
+/--
+ Traverse all subdirectories fo `f` to find if one satisfies `p`.
+-/
+partial def traverseDir (f : FilePath) (p : FilePath → IO Bool) : IO (Option FilePath) := do
+ if (← p f) then
+ return f
+ for d in (← System.FilePath.readDir f) do
+ let subDir := d.path
+ let metadata ← subDir.metadata
+ if metadata.type = FileType.dir then
+ if let some p ← traverseDir subDir p then
+ return p
+ return none
syntax (name := includeStr) "include_str" str : term
@[termElab includeStr] def includeStrImpl : TermElab := λ stx expectedType? => do
- let str := stx[1].isStrLit?.get!
- let path := FilePath.mk str
+ let str := stx[1].isStrLit?.get!
+ let srcPath := (FilePath.mk (← read).fileName)
+ let currentDir ← IO.currentDir
+ -- HACK: Currently we cannot get current file path in VSCode, we have to traversely find the matched subdirectory in the current directory.
+ if let some path ← match srcPath.parent with
+ | some p => pure $ some $ p / str
+ | none => do
+ let foundDir ← traverseDir currentDir λ p => p / str |>.pathExists
+ pure $ foundDir.map (· / str)
+ then
if ←path.pathExists then
if ←path.isDir then
throwError s!"{str} is a directory"
@@ -21,6 +45,8 @@ syntax (name := includeStr) "include_str" str : term
let content ← FS.readFile path
pure $ mkStrLit content
else
- throwError s!"\"{str}\" does not exist as a file"
+ throwError s!"{path} does not exist as a file"
+ else
+ throwError s!"No such file in whole directory: {str}"
end DocGen4
diff --git a/DocGen4/Load.lean b/DocGen4/Load.lean
index 20da38a..841891a 100644
--- a/DocGen4/Load.lean
+++ b/DocGen4/Load.lean
@@ -5,6 +5,7 @@ Authors: Henrik Böving
-/
import Lean
+import Lake
import DocGen4.Process
import Std.Data.HashMap
@@ -12,40 +13,25 @@ namespace DocGen4
open Lean System Std IO
-def getLakePath : IO FilePath := do
- match (← IO.getEnv "LAKE") with
- | some path => pure $ System.FilePath.mk path
- | none =>
- let lakePath := (←findSysroot?) / "bin" / "lake"
- pure $ lakePath.withExtension System.FilePath.exeExtension
-
--- Modified from the LSP Server
-def lakeSetupSearchPath (lakePath : System.FilePath) (imports : Array String) : IO Lean.SearchPath := do
- let args := #["print-paths"] ++ imports
- let cmdStr := " ".intercalate (toString lakePath :: args.toList)
- let lakeProc ← Process.spawn {
- stdin := Process.Stdio.null
- stdout := Process.Stdio.piped
- stderr := Process.Stdio.piped
- cmd := lakePath.toString
- args
- }
- let stdout := String.trim (← lakeProc.stdout.readToEnd)
- let stderr := String.trim (← lakeProc.stderr.readToEnd)
- match (← lakeProc.wait) with
- | 0 =>
- let stdout := stdout.split (· == '\n') |>.getLast!
- let Except.ok (paths : LeanPaths) ← pure (Json.parse stdout >>= fromJson?)
- | throw $ userError s!"invalid output from `{cmdStr}`:\n{stdout}\nstderr:\n{stderr}"
- initSearchPath (← findSysroot?) paths.oleanPath
- paths.oleanPath.mapM realPathNormalized
- | 2 => pure [] -- no lakefile.lean
- | _ => throw $ userError s!"`{cmdStr}` failed:\n{stdout}\nstderr:\n{stderr}"
+def lakeSetup (imports : List String) : IO (Except UInt32 (Lake.Workspace × String)) := do
+ let (leanInstall?, lakeInstall?) ← Lake.findInstall?
+ let res ← StateT.run Lake.Cli.loadWorkspace {leanInstall?, lakeInstall?} |>.toIO'
+ match res with
+ | Except.ok (ws, options) =>
+ let lean := leanInstall?.get!
+ if lean.githash ≠ Lean.githash then
+ IO.println s!"WARNING: This doc-gen was built with Lean: {Lean.githash} but the project is running on: {lean.githash}"
+ let lake := lakeInstall?.get!
+ let ctx ← Lake.mkBuildContext ws lean lake
+ ws.root.buildImportsAndDeps imports |>.run Lake.LogMethods.eio ctx
+ initSearchPath (←findSysroot) ws.leanPaths.oleanPath
+ pure $ Except.ok (ws, lean.githash)
+ | Except.error rc => pure $ Except.error rc
def load (imports : List Name) : IO AnalyzerResult := do
let env ← importModules (List.map (Import.mk · false) imports) Options.empty
-- TODO parameterize maxHeartbeats
IO.println "Processing modules"
- Prod.fst <$> (Meta.MetaM.toIO process { maxHeartbeats := 100000000, options := ⟨[(`pp.tagAppFns, true)]⟩ } { env := env} {} {})
+ Prod.fst <$> Meta.MetaM.toIO process { maxHeartbeats := 100000000, options := ⟨[(`pp.tagAppFns, true)]⟩ } { env := env} {} {}
end DocGen4
diff --git a/DocGen4/Output.lean b/DocGen4/Output.lean
index 21f28a6..f4dd4d5 100644
--- a/DocGen4/Output.lean
+++ b/DocGen4/Output.lean
@@ -4,12 +4,14 @@ Released under Apache 2.0 license as described in the file LICENSE.
Authors: Henrik Böving
-/
import Lean
+import Lake
import DocGen4.Process
import DocGen4.Output.Base
import DocGen4.Output.Index
import DocGen4.Output.Module
import DocGen4.Output.NotFound
import DocGen4.Output.Find
+import DocGen4.Output.Semantic
namespace DocGen4
@@ -24,12 +26,8 @@ Three link types from git supported:
TODO: This function is quite brittle and very github specific, we can
probably do better.
-/
-def getGithubBaseUrl : IO String := do
- let out ← IO.Process.output {cmd := "git", args := #["remote", "get-url", "origin"]}
- if out.exitCode != 0 then
- throw <| IO.userError <| "git exited with code " ++ toString out.exitCode
- let mut url := out.stdout.trimRight
-
+def getGithubBaseUrl (gitUrl : String) : String := Id.run do
+ let mut url := gitUrl
if url.startsWith "git@" then
url := url.drop 15
url := url.dropRight 4
@@ -39,57 +37,106 @@ def getGithubBaseUrl : IO String := do
else
pure url
-def getCommit : IO String := do
+def getProjectGithubUrl : IO String := do
+ let out ← IO.Process.output {cmd := "git", args := #["remote", "get-url", "origin"]}
+ if out.exitCode != 0 then
+ throw <| IO.userError <| "git exited with code " ++ toString out.exitCode
+ pure out.stdout.trimRight
+
+def getProjectCommit : IO String := do
let out ← IO.Process.output {cmd := "git", args := #["rev-parse", "HEAD"]}
if out.exitCode != 0 then
throw <| IO.userError <| "git exited with code " ++ toString out.exitCode
pure out.stdout.trimRight
-def sourceLinker : IO (Name → Option DeclarationRange → String) := do
- let baseUrl ← getGithubBaseUrl
- let commit ← getCommit
- pure λ name range =>
- let parts := name.components.map Name.toString
+def sourceLinker (ws : Lake.Workspace) (leanHash : String): IO (Name → Option DeclarationRange → String) := do
+ -- Compute a map from package names to source URL
+ let mut gitMap := Std.mkHashMap
+ let projectBaseUrl := getGithubBaseUrl (←getProjectGithubUrl)
+ let projectCommit ← getProjectCommit
+ gitMap := gitMap.insert ws.root.name (projectBaseUrl, projectCommit)
+ for pkg in ws.packageArray do
+ for dep in pkg.dependencies do
+ let value := match dep.src with
+ | Lake.Source.git url commit => (getGithubBaseUrl url, commit)
+ -- TODO: What do we do here if linking a source is not possible?
+ | _ => ("https://example.com", "master")
+ gitMap := gitMap.insert dep.name value
+
+ pure $ λ module range =>
+ let parts := module.components.map Name.toString
let path := (parts.intersperse "/").foldl (· ++ ·) ""
- let r := name.getRoot
- let basic := if r == `Lean ∨ r == `Init ∨ r == `Std then
- s!"https://github.com/leanprover/lean4/blob/{githash}/src/{path}.lean"
+ let root := module.getRoot
+ let basic := if root == `Lean ∨ root == `Init ∨ root == `Std then
+ s!"https://github.com/leanprover/lean4/blob/{leanHash}/src/{path}.lean"
else
- s!"{baseUrl}/blob/{commit}/{path}.lean"
+ match ws.packageForModule? module with
+ | some pkg =>
+ let (baseUrl, commit) := gitMap.find! pkg.name
+ s!"{baseUrl}/blob/{commit}/{path}.lean"
+ | none => "https://example.com"
match range with
| some range => s!"{basic}#L{range.pos.line}-L{range.endPos.line}"
| none => basic
-def htmlOutput (result : AnalyzerResult) : IO Unit := do
- let basePath := FilePath.mk "./build/doc/"
- let config := { depthToRoot := 0, result := result, currentName := none, sourceLinker := ←sourceLinker}
+def htmlOutput (result : AnalyzerResult) (ws : Lake.Workspace) (leanHash: String) : IO Unit := do
+ let config : SiteContext := { depthToRoot := 0, result := result, currentName := none, sourceLinker := ←sourceLinker ws leanHash}
+ let basePath := FilePath.mk "." / "build" / "doc"
+ let indexHtml := ReaderT.run index config
+ let findHtml := ReaderT.run find config
+ let notFoundHtml := ReaderT.run notFound config
FS.createDirAll basePath
FS.createDirAll (basePath / "find")
- let indexHtml := ReaderT.run index config
- let notFoundHtml := ReaderT.run notFound config
+ FS.createDirAll (basePath / "semantic")
let mut declList := #[]
for (module, mod) in result.moduleInfo.toArray do
for decl in filterMapDocInfo mod.members do
- let findDir := basePath / "find" / decl.getName.toString
- let findFile := (findDir / "index.html")
- -- path: 'basePath/find/decl.getName.toString'
+-- <<<<<<< HEAD
+ let name := decl.getName.toString
+ -- let findDir := basePath / "find" / ma,e
+ -- let findFile := (findDir / "index.html")
let config := { config with depthToRoot := 2 }
- let findHtml := ReaderT.run (findRedirectHtml decl.getName) config
- FS.createDirAll findDir
- FS.writeFile findFile findHtml.toString
- let obj := Json.mkObj [("name", decl.getName.toString), ("description", decl.getDocString.getD "")]
+ -- let findHtml := ReaderT.run (findRedirectHtml decl.getName) config
+ -- FS.createDirAll findDir
+ -- FS.writeFile findFile findHtml.toString
+ -- let obj := Json.mkObj [("name", decl.getName.toString), ("description", decl.getDocString.getD "")]
+-- =======
+ let doc := decl.getDocString.getD ""
+ -- let root := module.getRoot
+ let root := Id.run <| ReaderT.run (getRoot) config
+ let link := root ++ s!"../semantic/{decl.getName.hash}.xml#"
+ let docLink := Id.run <| ReaderT.run (declNameToLink decl.getName) config
+ let sourceLink := Id.run <| ReaderT.run (getSourceUrl mod.name decl.getDeclarationRange) config
+ let obj := Json.mkObj [("name", name), ("doc", doc), ("link", link), ("docLink", docLink), ("sourceLink", sourceLink)]
+-- >>>>>>> upstream/main
declList := declList.push obj
+ let xml := toString <| Id.run <| ReaderT.run (semanticXml decl) config
+ FS.writeFile (basePath / "semantic" / s!"{decl.getName.hash}.xml") xml
let json := Json.arr declList
- FS.writeFile (basePath / "searchable_data.bmp") json.compress
+ FS.writeFile (basePath / "semantic" / "docgen4.xml") <| toString <| Id.run <| ReaderT.run schemaXml config
+
FS.writeFile (basePath / "index.html") indexHtml.toString
- FS.writeFile (basePath / "style.css") styleCss
FS.writeFile (basePath / "404.html") notFoundHtml.toString
+ FS.writeFile (basePath / "find" / "index.html") findHtml.toString
+
+ FS.writeFile (basePath / "style.css") styleCss
+
+ let declarationDataPath := basePath / "declaration-data.bmp"
+ FS.writeFile declarationDataPath json.compress
+ FS.writeFile (basePath / "declaration-data.timestamp") <| toString (←declarationDataPath.metadata).modified.sec
+
+ let root := Id.run <| ReaderT.run (getRoot) config
+ FS.writeFile (basePath / "site-root.js") (siteRootJs.replace "{siteRoot}" root)
+ FS.writeFile (basePath / "declaration-data.js") declarationDataCenterJs
FS.writeFile (basePath / "nav.js") navJs
+ FS.writeFile (basePath / "find" / "find.js") findJs
+ FS.writeFile (basePath / "how-about.js") howAboutJs
FS.writeFile (basePath / "search.js") searchJs
FS.writeFile (basePath / "mathjax-config.js") mathjaxConfigJs
+
for (module, content) in result.moduleInfo.toArray do
let fileDir := moduleNameToDirectory basePath module
let filePath := moduleNameToFile basePath module
diff --git a/DocGen4/Output/Base.lean b/DocGen4/Output/Base.lean
index 0723518..a1e82b7 100644
--- a/DocGen4/Output/Base.lean
+++ b/DocGen4/Output/Base.lean
@@ -53,10 +53,14 @@ def moduleNameToDirectory (basePath : FilePath) (n : Name) : FilePath :=
basePath / parts.foldl (· / ·) (FilePath.mk ".")
section Static
- def styleCss : String := include_str "./static/style.css"
- def navJs : String := include_str "./static/nav.js"
- def searchJs : String := include_str "./static/search.js"
- def mathjaxConfigJs : String := include_str "./static/mathjax-config.js"
+ def styleCss : String := include_str "../../static/style.css"
+ def siteRootJs : String := include_str "../../static/site-root.js"
+ def declarationDataCenterJs : String := include_str "../../static/declaration-data.js"
+ def navJs : String := include_str "../../static/nav.js"
+ def howAboutJs : String := include_str "../../static/how-about.js"
+ def searchJs : String := include_str "../../static/search.js"
+ def findJs : String := include_str "../../static/find/find.js"
+ def mathjaxConfigJs : String := include_str "../../static/mathjax-config.js"
end Static
def declNameToLink (name : Name) : HtmlM String := do
diff --git a/DocGen4/Output/Find.lean b/DocGen4/Output/Find.lean
index 41b82dd..95bbd8e 100644
--- a/DocGen4/Output/Find.lean
+++ b/DocGen4/Output/Find.lean
@@ -6,10 +6,16 @@ namespace Output
open scoped DocGen4.Jsx
open Lean
-def findRedirectHtml (decl : Name) : HtmlM Html := do
- let res ← getResult
- let url ← declNameToLink decl
- let contentString := s!"0;url={url}"
- pure $ Html.element "meta" false #[("http-equiv", "refresh"), ("content", contentString)] #[]
+def find : HtmlM Html := do
+ pure
+
+
+
+
+
+
+
+
end Output
end DocGen4
+
diff --git a/DocGen4/Output/Navbar.lean b/DocGen4/Output/Navbar.lean
index efc9735..1a75288 100644
--- a/DocGen4/Output/Navbar.lean
+++ b/DocGen4/Output/Navbar.lean
@@ -14,7 +14,7 @@ open Lean
open scoped DocGen4.Jsx
def moduleListFile (file : Name) : HtmlM Html := do
- pure
+ pure
@@ -29,7 +29,12 @@ partial def moduleListDir (h : Hierarchy) : HtmlM Html := do
pure
- {Html.element "summary" true #[] #[{h.getName.toString}]}
+ {
+ if (←getResult).moduleInfo.contains h.getName then
+ Html.element "summary" true #[] #[{h.getName.toString}]
+ else
+ {h.getName.toString}
+ }
[dirNodes]
[fileNodes]
diff --git a/DocGen4/Output/Semantic.lean b/DocGen4/Output/Semantic.lean
new file mode 100644
index 0000000..7b22cdf
--- /dev/null
+++ b/DocGen4/Output/Semantic.lean
@@ -0,0 +1,58 @@
+import DocGen4.Output.Template
+import DocGen4.Output.DocString
+import Lean.Data.Xml
+
+open Lean Xml
+
+namespace DocGen4
+namespace Output
+
+instance : ToString $ Array Element where
+ toString xs := xs.map toString |>.foldl String.append ""
+
+instance : Coe Element Content where
+ coe e := Content.Element e
+
+-- TODO: syntax metaprogramming and basic semantic data
+
+def semanticXml (i : DocInfo) : HtmlM $ Array Element := do
+ pure #[
+ Element.Element
+ "rdf:RDF"
+ (Std.RBMap.fromList [
+ ("xmlns:rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
+ ("xmlns:docgen4", s!"{←getRoot}semactic/docgen4.xml#")
+ ] _)
+ #[
+ Element.Element
+ "rdf:Description"
+ (Std.RBMap.fromList [
+ ("rdf:about", s!"{←getRoot}semactic/{i.getName.hash}.xml#")
+ ] _)
+ #[]
+ ]
+ ]
+
+def schemaXml : HtmlM $ Array Element := do
+ pure #[
+ Element.Element
+ "rdf:RDF"
+ (Std.RBMap.fromList [
+ ("xmlns:rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
+ ("xmlns:docgen4", s!"{←getRoot}semactic/docgen4.xml#")
+ ] _)
+ #[
+ Element.Element
+ "docgen4:hasInstance"
+ Std.RBMap.empty
+ #[
+ Element.Element
+ "rdfs:type"
+ Std.RBMap.empty
+ #[Content.Character "rdf:Property"]
+ ]
+ ]
+ ]
+
+end Output
+end DocGen4
diff --git a/DocGen4/Output/Template.lean b/DocGen4/Output/Template.lean
index 8ed58ee..05390fe 100644
--- a/DocGen4/Output/Template.lean
+++ b/DocGen4/Output/Template.lean
@@ -15,46 +15,48 @@ def baseHtmlArray (title : String) (site : Array Html) : HtmlM Html := do
pure
+
+
{title}
+
+
+
+
-
{title}
-
-
+
+
+
+
+
+
+
+
+
+
-
+
-
- Documentation
- {title}
- -- TODO: Replace this form with our own search
-
-
+
+ Documentation
+ {title}
+ -- TODO: Replace this form with our own search
+
+
- [site]
-
- {←navbar}
+ [site]
+
+ {←navbar}
- -- Lean in JS in HTML in Lean...very meta
-
-
- -- TODO Add more js stuff
-
-
- -- mathjax
-
-
-
+
diff --git a/Main.lean b/Main.lean
index 062cf6b..88a0640 100644
--- a/Main.lean
+++ b/Main.lean
@@ -1,15 +1,30 @@
import DocGen4
import Lean
+import Cli
-open DocGen4 Lean IO
+open DocGen4 Lean Cli
-def main (modules : List String) : IO Unit := do
- if modules.isEmpty then
- IO.println "Usage: doc-gen4 Module1 Module2 ..."
- IO.Process.exit 1
- return
- let path ← lakeSetupSearchPath (←getLakePath) modules.toArray
- IO.println s!"Loading modules from: {path}"
- let doc ← load $ modules.map Name.mkSimple
- IO.println "Outputting HTML"
- htmlOutput doc
+def runDocGenCmd (p : Parsed) : IO UInt32 := do
+ -- let root := p.positionalArg! "root" |>.as! String
+ let modules : List String := p.variableArgsAs! String |>.toList
+ let res ← lakeSetup modules
+ match res with
+ | Except.ok (ws, leanHash) =>
+ IO.println s!"Loading modules from: {←searchPathRef.get}"
+ let doc ← load $ modules.map Name.mkSimple
+ IO.println "Outputting HTML"
+ htmlOutput doc ws leanHash
+ pure 0
+ | Except.error rc => pure rc
+
+def docGenCmd : Cmd := `[Cli|
+ "doc-gen4" VIA runDocGenCmd; ["0.0.1"]
+ "A documentation generator for Lean 4."
+
+ ARGS:
+ -- root : String; "The root URL to generate the HTML for (will be relative in the future)"
+ ...modules : String; "The modules to generate the HTML for"
+]
+
+def main (args : List String) : IO UInt32 :=
+ docGenCmd.validate args
diff --git a/lakefile.lean b/lakefile.lean
index cc8dd0e..d18d59c 100644
--- a/lakefile.lean
+++ b/lakefile.lean
@@ -12,6 +12,14 @@ package «doc-gen4» {
{
name := `Unicode
src := Source.git "https://github.com/xubaiw/Unicode.lean" "3b7b85472d42854a474099928a3423bb97d4fa64"
+ },
+ {
+ name := `Cli
+ src := Source.git "https://github.com/mhuisi/lean4-cli" "1f8663e3dafdcc11ff476d74ef9b99ae5bdaedd3"
+ },
+ {
+ name := `lake
+ src := Source.git "https://github.com/leanprover/lake" "9378575b5575f49a185d50130743a190a9be2f82"
}
]
}
diff --git a/lean-toolchain b/lean-toolchain
index 7c2b6c2..8e380ba 100644
--- a/lean-toolchain
+++ b/lean-toolchain
@@ -1 +1 @@
-leanprover/lean4:nightly-2022-02-17
+leanprover/lean4:nightly-2022-03-06
diff --git a/static/declaration-data.js b/static/declaration-data.js
new file mode 100644
index 0000000..2b778d5
--- /dev/null
+++ b/static/declaration-data.js
@@ -0,0 +1,243 @@
+/**
+ * This module is a wrapper that facilitates manipulating the declaration data.
+ *
+ * Please see {@link DeclarationDataCenter} for more information.
+ */
+
+import { SITE_ROOT } from "./site-root.js";
+
+const CACHE_DB_NAME = "declaration-data";
+const CACHE_DB_VERSION = 1;
+
+/**
+ * The DeclarationDataCenter is used for declaration searching.
+ *
+ * For usage, see the {@link init} and {@link search} methods.
+ */
+export class DeclarationDataCenter {
+ /**
+ * The declaration data. Users should not interact directly with this field.
+ *
+ * *NOTE:* This is not made private to support legacy browsers.
+ */
+ declarationData = null;
+
+ /**
+ * Used to implement the singleton, in case we need to fetch data mutiple times in the same page.
+ */
+ static singleton = null;
+
+ /**
+ * Construct a DeclarationDataCenter with given data.
+ *
+ * Please use {@link DeclarationDataCenter.init} instead, which automates the data fetching process.
+ * @param {*} declarationData
+ */
+ constructor(declarationData) {
+ this.declarationData = declarationData;
+ }
+
+ /**
+ * The actual constructor of DeclarationDataCenter
+ * @returns {Promise
}
+ */
+ static async init() {
+ if (!DeclarationDataCenter.singleton) {
+ const timestampUrl = new URL(
+ `${SITE_ROOT}declaration-data.timestamp`,
+ window.location
+ );
+ const dataUrl = new URL(
+ `${SITE_ROOT}declaration-data.bmp`,
+ window.location
+ );
+
+ const timestampRes = await fetch(timestampUrl);
+ const timestamp = await timestampRes.text();
+
+ // try to use cache first
+ const data = await fetchCachedDeclarationData(timestamp).catch(_e => null);
+ if (data) {
+ // if data is defined, use the cached one.
+ DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
+ } else {
+ // undefined. then fetch the data from the server.
+ const dataRes = await fetch(dataUrl);
+ const dataJson = await dataRes.json();
+ // the data is a map of name (original case) to declaration data.
+ const data = new Map(
+ dataJson.map(({ name, doc, link, docLink, sourceLink }) => [
+ name,
+ {
+ name,
+ lowerName: name.toLowerCase(),
+ lowerDoc: doc.toLowerCase(),
+ link,
+ docLink,
+ sourceLink,
+ },
+ ])
+ );
+ await cacheDeclarationData(timestamp, data);
+ DeclarationDataCenter.singleton = new DeclarationDataCenter(data);
+ }
+ }
+ return DeclarationDataCenter.singleton;
+ }
+
+ /**
+ * Search for a declaration.
+ * @returns {Array}
+ */
+ search(pattern, strict = true) {
+ if (!pattern) {
+ return [];
+ }
+ if (strict) {
+ let decl = this.declarationData.get(pattern);
+ return decl ? [decl] : [];
+ } else {
+ return getMatches(this.declarationData, pattern);
+ }
+ }
+}
+
+function isSeparater(char) {
+ return char === "." || char === "_";
+}
+
+// HACK: the fuzzy matching is quite hacky
+
+function matchCaseSensitive(declName, lowerDeclName, pattern) {
+ let i = 0,
+ j = 0,
+ err = 0,
+ lastMatch = 0;
+ while (i < declName.length && j < pattern.length) {
+ if (pattern[j] === declName[i] || pattern[j] === lowerDeclName[i]) {
+ err += (isSeparater(pattern[j]) ? 0.125 : 1) * (i - lastMatch);
+ if (pattern[j] !== declName[i]) err += 0.5;
+ lastMatch = i + 1;
+ j++;
+ } else if (isSeparater(declName[i])) {
+ err += 0.125 * (i + 1 - lastMatch);
+ lastMatch = i + 1;
+ }
+ i++;
+ }
+ err += 0.125 * (declName.length - lastMatch);
+ if (j === pattern.length) {
+ return err;
+ }
+}
+
+function getMatches(declarations, pattern, maxResults = 30) {
+ const lowerPats = pattern.toLowerCase().split(/\s/g);
+ const patNoSpaces = pattern.replace(/\s/g, "");
+ const results = [];
+ for (const {
+ name,
+ lowerName,
+ lowerDoc,
+ link,
+ docLink,
+ sourceLink,
+ } of declarations.values()) {
+ let err = matchCaseSensitive(name, lowerName, patNoSpaces);
+ // match all words as substrings of docstring
+ if (
+ err >= 3 &&
+ pattern.length > 3 &&
+ lowerPats.every((l) => lowerDoc.indexOf(l) != -1)
+ ) {
+ err = 3;
+ }
+ if (err !== undefined) {
+ results.push({
+ name,
+ err,
+ lowerName,
+ lowerDoc,
+ link,
+ docLink,
+ sourceLink,
+ });
+ }
+ }
+ return results.sort(({ err: a }, { err: b }) => a - b).slice(0, maxResults);
+}
+
+// TODO: refactor the indexedDB part to be more robust
+
+/**
+ * Get the indexedDB database, automatically initialized.
+ * @returns {Promise}
+ */
+async function getDeclarationDatabase() {
+ return new Promise((resolve, reject) => {
+ const request = indexedDB.open(CACHE_DB_NAME, CACHE_DB_VERSION);
+
+ request.onerror = function (event) {
+ reject(
+ new Error(
+ `fail to open indexedDB ${CACHE_DB_NAME} of version ${CACHE_DB_VERSION}`
+ )
+ );
+ };
+ request.onupgradeneeded = function (event) {
+ let db = event.target.result;
+ // We only need to store one object, so no key path or increment is needed.
+ db.createObjectStore("declaration");
+ };
+ request.onsuccess = function (event) {
+ resolve(event.target.result);
+ };
+ });
+}
+
+/**
+ * Store data in indexedDB object store.
+ * @param {string} timestamp
+ * @param {Map} data
+ */
+async function cacheDeclarationData(timestamp, data) {
+ let db = await getDeclarationDatabase();
+ let store = db
+ .transaction("declaration", "readwrite")
+ .objectStore("declaration");
+ return new Promise((resolve, reject) => {
+ let clearRequest = store.clear();
+ let addRequest = store.add(data, timestamp);
+
+ addRequest.onsuccess = function (event) {
+ resolve();
+ };
+ addRequest.onerror = function (event) {
+ reject(new Error(`fail to store declaration data`));
+ };
+ clearRequest.onerror = function (event) {
+ reject(new Error("fail to clear object store"));
+ };
+ });
+}
+
+/**
+ * Retrieve data from indexedDB database.
+ * @param {string} timestamp
+ * @returns {Promise