diff --git a/.gitignore b/.gitignore index 7e41fc2..5306c36 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Lean build +lakefile.olean lake-packages _target leanpkg.path diff --git a/DocGen4/Load.lean b/DocGen4/Load.lean index 947e48a..c579837 100644 --- a/DocGen4/Load.lean +++ b/DocGen4/Load.lean @@ -5,35 +5,17 @@ Authors: Henrik Böving -/ import Lean -import Lake -import Lake.CLI.Main import DocGen4.Process import Lean.Data.HashMap namespace DocGen4 open Lean System IO -/-- -Sets up a lake workspace for the current project. Furthermore initialize -the Lean search path with the path to the proper compiler from lean-toolchain -as well as all the dependencies. --/ -def lakeSetup : IO (Except UInt32 Lake.Workspace) := do - let (leanInstall?, lakeInstall?) ← Lake.findInstall? - let config := Lake.mkLoadConfig.{0} {leanInstall?, lakeInstall?} - match ←(EIO.toIO' config) with - | .ok config => - let ws : Lake.Workspace ← Lake.loadWorkspace config - |>.run Lake.MonadLog.eio - |>.toIO (λ _ => IO.userError "Failed to load Lake workspace") - pure <| Except.ok ws - | .error err => - throw <| IO.userError err.toString -def envOfImports (imports : List Name) : IO Environment := do +def envOfImports (imports : Array Name) : IO Environment := do importModules (imports.map (Import.mk · false)) Options.empty -def loadInit (imports : List Name) : IO Hierarchy := do +def loadInit (imports : Array Name) : IO Hierarchy := do let env ← envOfImports imports pure <| Hierarchy.fromArray env.header.moduleNames @@ -42,6 +24,7 @@ Load a list of modules from the current Lean search path into an `Environment` to process for documentation. -/ def load (task : Process.AnalyzeTask) : IO (Process.AnalyzerResult × Hierarchy) := do + initSearchPath (← findSysroot) let env ← envOfImports task.getLoad let config := { -- TODO: parameterize maxHeartbeats @@ -55,6 +38,6 @@ def load (task : Process.AnalyzeTask) : IO (Process.AnalyzerResult × Hierarchy) Prod.fst <$> Meta.MetaM.toIO (Process.process task) config { env := env } {} {} def loadCore : IO (Process.AnalyzerResult × Hierarchy) := do - load <| .loadAll [`Init, `Lean, `Lake] + load <| .loadAll #[`Init, `Lean, `Lake] end DocGen4 diff --git a/DocGen4/Output.lean b/DocGen4/Output.lean index 543e001..41fd7fb 100644 --- a/DocGen4/Output.lean +++ b/DocGen4/Output.lean @@ -4,7 +4,6 @@ Released under Apache 2.0 license as described in the file LICENSE. Authors: Henrik Böving -/ import Lean -import Lake import DocGen4.Process import DocGen4.Output.Base import DocGen4.Output.Index @@ -42,6 +41,8 @@ def htmlOutputSetup (config : SiteBaseContext) : IO Unit := do ("declaration-data.js", declarationDataCenterJs), ("color-scheme.js", colorSchemeJs), ("nav.js", navJs), + ("jump-src.js", jumpSrcJs), + ("expand-nav.js", expandNavJs), ("how-about.js", howAboutJs), ("search.html", searchHtml), ("search.js", searchJs), @@ -79,19 +80,18 @@ def htmlOutputDeclarationDatas (result : AnalyzerResult) : HtmlT IO Unit := do let jsonDecls ← Module.toJson mod FS.writeFile (declarationsBasePath / s!"declaration-data-{mod.name}.bmp") (toJson jsonDecls).compress -def htmlOutputResults (baseConfig : SiteBaseContext) (result : AnalyzerResult) (ws : Lake.Workspace) (ink : Bool) : IO Unit := do +def htmlOutputResults (baseConfig : SiteBaseContext) (result : AnalyzerResult) (gitUrl? : Option String) (ink : Bool) : IO Unit := do let config : SiteContext := { result := result, - sourceLinker := ← SourceLinker.sourceLinker ws + sourceLinker := ← SourceLinker.sourceLinker gitUrl? leanInkEnabled := ink } FS.createDirAll basePath FS.createDirAll declarationsBasePath - -- Rendering the entire lean compiler takes time.... - --let sourceSearchPath := ((←Lean.findSysroot) / "src" / "lean") :: ws.root.srcDir :: ws.leanSrcPath - let sourceSearchPath := ws.root.srcDir :: ws.leanSrcPath + let some p := (← IO.getEnv "LEAN_SRC_PATH") | throw <| IO.userError "LEAN_SRC_PATH not found in env" + let sourceSearchPath := System.SearchPath.parse p discard <| htmlOutputDeclarationDatas result |>.run config baseConfig @@ -119,8 +119,6 @@ def getSimpleBaseContext (hierarchy : Hierarchy) : IO SiteBaseContext := do depthToRoot := 0, currentName := none, hierarchy - projectGithubUrl := ← SourceLinker.getProjectGithubUrl - projectCommit := ← SourceLinker.getProjectCommit } def htmlOutputIndex (baseConfig : SiteBaseContext) : IO Unit := do @@ -146,9 +144,9 @@ def htmlOutputIndex (baseConfig : SiteBaseContext) : IO Unit := do The main entrypoint for outputting the documentation HTML based on an `AnalyzerResult`. -/ -def htmlOutput (result : AnalyzerResult) (hierarchy : Hierarchy) (ws : Lake.Workspace) (ink : Bool) : IO Unit := do +def htmlOutput (result : AnalyzerResult) (hierarchy : Hierarchy) (gitUrl? : Option String) (ink : Bool) : IO Unit := do let baseConfig ← getSimpleBaseContext hierarchy - htmlOutputResults baseConfig result ws ink + htmlOutputResults baseConfig result gitUrl? ink htmlOutputIndex baseConfig end DocGen4 diff --git a/DocGen4/Output/Base.lean b/DocGen4/Output/Base.lean index 6c4769c..4b22db6 100644 --- a/DocGen4/Output/Base.lean +++ b/DocGen4/Output/Base.lean @@ -33,14 +33,6 @@ structure SiteBaseContext where pages that don't have a module name. -/ currentName : Option Name - /-- - The Github URL of the project that we are building docs for. - -/ - projectGithubUrl : String - /-- - The commit of the project that we are building docs for. - -/ - projectCommit : String /-- The context used in the `HtmlM` monad for HTML templating. @@ -94,8 +86,6 @@ def getCurrentName : BaseHtmlM (Option Name) := do return (← read).currentName def getResult : HtmlM AnalyzerResult := do return (← read).result def getSourceUrl (module : Name) (range : Option DeclarationRange): HtmlM String := do return (← read).sourceLinker module range def leanInkEnabled? : HtmlM Bool := do return (← read).leanInkEnabled -def getProjectGithubUrl : BaseHtmlM String := do return (← read).projectGithubUrl -def getProjectCommit : BaseHtmlM String := do return (← read).projectCommit /-- If a template is meant to be extended because it for example only provides the @@ -156,7 +146,9 @@ are used in documentation generation, notably JS and CSS ones. def styleCss : String := include_str "../../static/style.css" def declarationDataCenterJs : String := include_str "../../static/declaration-data.js" def colorSchemeJs : String := include_str "../../static/color-scheme.js" + def jumpSrcJs : String := include_str "../../static/jump-src.js" def navJs : String := include_str "../../static/nav.js" + def expandNavJs : String := include_str "../../static/expand-nav.js" def howAboutJs : String := include_str "../../static/how-about.js" def searchJs : String := include_str "../../static/search.js" def instancesJs : String := include_str "../../static/instances.js" @@ -261,11 +253,12 @@ partial def infoFormatToHtml (i : CodeWithInfos) : HtmlM (Array Html) := do | .sort _ => match t with | .text t => - let mut sortPrefix :: rest := t.splitOn " " | unreachable! + let sortPrefix :: rest := t.splitOn " " | unreachable! let sortLink := {sortPrefix} - if rest != [] then - rest := " " :: rest - return #[sortLink, Html.text <| String.join rest] + let mut restStr := String.intercalate " " rest + if restStr.length != 0 then + restStr := " " ++ restStr + return #[sortLink, Html.text restStr] | _ => return #[[← infoFormatToHtml t]] | _ => diff --git a/DocGen4/Output/DocString.lean b/DocGen4/Output/DocString.lean index 01530f4..c39c4c8 100644 --- a/DocGen4/Output/DocString.lean +++ b/DocGen4/Output/DocString.lean @@ -70,7 +70,9 @@ partial def xmlGetHeadingId (el : Xml.Element) : String := -/ def nameToLink? (s : String) : HtmlM (Option String) := do let res ← getResult - if let some name := Lean.Syntax.decodeNameLit ("`" ++ s) then + if s.endsWith ".lean" && s.contains '/' then + return (← getRoot) ++ s.dropRight 5 ++ ".html" + else if let some name := Lean.Syntax.decodeNameLit ("`" ++ s) then -- with exactly the same name if res.name2ModIdx.contains name then declNameToLink name @@ -208,7 +210,7 @@ partial def modifyElement (element : Element) : HtmlM Element := /-- Convert docstring to Html. -/ def docStringToHtml (s : String) : HtmlM (Array Html) := do - let rendered := CMark.renderHtml s + let rendered := CMark.renderHtml (Html.escape s) match manyDocument rendered.mkIterator with | Parsec.ParseResult.success _ res => res.mapM fun x => do return Html.text <| toString (← modifyElement x) diff --git a/DocGen4/Output/SourceLinker.lean b/DocGen4/Output/SourceLinker.lean index 48ca406..44121f7 100644 --- a/DocGen4/Output/SourceLinker.lean +++ b/DocGen4/Output/SourceLinker.lean @@ -4,97 +4,31 @@ Released under Apache 2.0 license as described in the file LICENSE. Authors: Henrik Böving -/ import Lean -import Lake.Load namespace DocGen4.Output.SourceLinker open Lean -/-- -Turns a Github git remote URL into an HTTPS Github URL. -Three link types from git supported: -- https://github.com/org/repo -- https://github.com/org/repo.git -- git@github.com:org/repo.git - -TODO: This function is quite brittle and very Github specific, we can -probably do better. --/ -def getGithubBaseUrl (gitUrl : String) : String := Id.run do - let mut url := gitUrl - if url.startsWith "git@" then - url := url.drop 15 - url := url.dropRight 4 - return s!"https://github.com/{url}" - else if url.endsWith ".git" then - return url.dropRight 4 - else - return url - -/-- -Obtain the Github URL of a project by parsing the origin remote. --/ -def getProjectGithubUrl (directory : System.FilePath := "." ) : IO String := do - let out ← IO.Process.output { - cmd := "git", - args := #["remote", "get-url", "origin"], - cwd := directory - } - if out.exitCode != 0 then - throw <| IO.userError <| "git exited with code " ++ toString out.exitCode - return out.stdout.trimRight - -/-- -Obtain the git commit hash of the project that is currently getting analyzed. --/ -def getProjectCommit (directory : System.FilePath := "." ) : IO String := do - let out ← IO.Process.output { - cmd := "git", - args := #["rev-parse", "HEAD"] - cwd := directory - } - if out.exitCode != 0 then - throw <| IO.userError <| "git exited with code " ++ toString out.exitCode - return out.stdout.trimRight - /-- Given a lake workspace with all the dependencies as well as the hash of the compiler release to work with this provides a function to turn names of declarations into (optionally positional) Github URLs. -/ -def sourceLinker (ws : Lake.Workspace) : IO (Name → Option DeclarationRange → String) := do - let leanHash := ws.lakeEnv.lean.githash - -- Compute a map from package names to source URL - let mut gitMap := Lean.mkHashMap - let projectBaseUrl := getGithubBaseUrl (← getProjectGithubUrl) - let projectCommit ← getProjectCommit - gitMap := gitMap.insert ws.root.name (projectBaseUrl, projectCommit) - let manifest ← Lake.Manifest.loadOrEmpty ws.root.manifestFile - |>.run (Lake.MonadLog.eio .normal) - |>.toIO (fun _ => IO.userError "Failed to load lake manifest") - for pkg in manifest.entryArray do - match pkg with - | .git _ _ _ url rev .. => gitMap := gitMap.insert pkg.name (getGithubBaseUrl url, rev) - | .path _ _ _ path => - let pkgBaseUrl := getGithubBaseUrl (← getProjectGithubUrl path) - let pkgCommit ← getProjectCommit path - gitMap := gitMap.insert pkg.name (pkgBaseUrl, pkgCommit) +def sourceLinker (gitUrl? : Option String) : IO (Name → Option DeclarationRange → String) := do + -- TOOD: Refactor this, we don't need to pass in the module into the returned closure + -- since we have one sourceLinker per module return fun module range => let parts := module.components.map Name.toString - let path := (parts.intersperse "/").foldl (· ++ ·) "" + let path := String.intercalate "/" parts let root := module.getRoot + let leanHash := Lean.githash let basic := if root == `Lean ∨ root == `Init then s!"https://github.com/leanprover/lean4/blob/{leanHash}/src/{path}.lean" else if root == `Lake then s!"https://github.com/leanprover/lean4/blob/{leanHash}/src/lake/{path}.lean" else - match ws.packageArray.find? (·.isLocalModule module) with - | some pkg => - match gitMap.find? pkg.name with - | some (baseUrl, commit) => s!"{baseUrl}/blob/{commit}/{path}.lean" - | none => "https://example.com" - | none => "https://example.com" + gitUrl?.get! match range with | some range => s!"{basic}#L{range.pos.line}-L{range.endPos.line}" diff --git a/DocGen4/Output/Template.lean b/DocGen4/Output/Template.lean index 192fb4b..5995e16 100644 --- a/DocGen4/Output/Template.lean +++ b/DocGen4/Output/Template.lean @@ -32,7 +32,9 @@ def baseHtmlGenerator (title : String) (site : Array Html) : BaseHtmlM Html := d [moduleConstant] + + diff --git a/DocGen4/Output/ToHtmlFormat.lean b/DocGen4/Output/ToHtmlFormat.lean index e645ee8..5a4285e 100644 --- a/DocGen4/Output/ToHtmlFormat.lean +++ b/DocGen4/Output/ToHtmlFormat.lean @@ -53,10 +53,10 @@ partial def textLength : Html → Nat def escapePairs : Array (String × String) := #[ - ("&", "&"), - ("<", "<"), - (">", ">"), - ("\"", """) + ("&", "&"), + ("<", "<"), + (">", ">"), + ("\"", """) ] def escape (s : String) : String := diff --git a/DocGen4/Output/ToJson.lean b/DocGen4/Output/ToJson.lean index 8e17726..ad6a59c 100644 --- a/DocGen4/Output/ToJson.lean +++ b/DocGen4/Output/ToJson.lean @@ -36,17 +36,22 @@ structure JsonModule where deriving FromJson, ToJson structure JsonHeaderIndex where - headers : List (String × String) := [] + declarations : List (String × JsonDeclaration) := [] + +structure JsonIndexedDeclarationInfo where + kind : String + docLink : String + deriving FromJson, ToJson structure JsonIndex where - declarations : List (String × JsonDeclarationInfo) := [] + declarations : List (String × JsonIndexedDeclarationInfo) := [] instances : HashMap String (RBTree String Ord.compare) := .empty importedBy : HashMap String (Array String) := .empty modules : List (String × String) := [] instancesFor : HashMap String (RBTree String Ord.compare) := .empty instance : ToJson JsonHeaderIndex where - toJson idx := Json.mkObj <| idx.headers.map (fun (k, v) => (k, toJson v)) + toJson idx := Json.mkObj <| idx.declarations.map (fun (k, v) => (k, toJson v)) instance : ToJson JsonIndex where toJson idx := Id.run do @@ -65,13 +70,16 @@ instance : ToJson JsonIndex where return finalJson def JsonHeaderIndex.addModule (index : JsonHeaderIndex) (module : JsonModule) : JsonHeaderIndex := - let merge idx decl := { idx with headers := (decl.info.name, decl.header) :: idx.headers } + let merge idx decl := { idx with declarations := (decl.info.name, decl) :: idx.declarations } module.declarations.foldl merge index def JsonIndex.addModule (index : JsonIndex) (module : JsonModule) : BaseHtmlM JsonIndex := do let mut index := index let newModule := (module.name, ← moduleNameToHtmlLink (String.toName module.name)) - let newDecls := module.declarations.map (fun d => (d.info.name, d.info)) + let newDecls := module.declarations.map (fun d => (d.info.name, { + kind := d.info.kind, + docLink := d.info.docLink, + })) index := { index with modules := newModule :: index.modules declarations := newDecls ++ index.declarations diff --git a/DocGen4/Process/Analyze.lean b/DocGen4/Process/Analyze.lean index 9c4c03d..da74efb 100644 --- a/DocGen4/Process/Analyze.lean +++ b/DocGen4/Process/Analyze.lean @@ -110,7 +110,7 @@ def process (task : AnalyzeTask) : MetaM (AnalyzerResult × Hierarchy) := do let env ← getEnv let relevantModules := match task with | .loadAll _ => HashSet.fromArray env.header.moduleNames - | .loadAllLimitAnalysis analysis => HashSet.fromArray analysis.toArray + | .loadAllLimitAnalysis analysis => HashSet.fromArray analysis let allModules := env.header.moduleNames let mut res ← getAllModuleDocs relevantModules.toArray diff --git a/DocGen4/Process/DefinitionInfo.lean b/DocGen4/Process/DefinitionInfo.lean index a260557..c6255d5 100644 --- a/DocGen4/Process/DefinitionInfo.lean +++ b/DocGen4/Process/DefinitionInfo.lean @@ -12,20 +12,13 @@ namespace DocGen4.Process open Lean Meta Widget -partial def stripArgs (e : Expr) : Expr := +partial def stripArgs (e : Expr) (k : Expr → MetaM α) : MetaM α := match e.consumeMData with - | Expr.lam name _ body _ => + | Expr.forallE name type body bi => let name := name.eraseMacroScopes - stripArgs (Expr.instantiate1 body (mkFVar ⟨name⟩)) - | Expr.forallE name _ body _ => - let name := name.eraseMacroScopes - stripArgs (Expr.instantiate1 body (mkFVar ⟨name⟩)) - | _ => e - -def processEq (eq : Name) : MetaM CodeWithInfos := do - let type ← (mkConstWithFreshMVarLevels eq >>= inferType) - let final := stripArgs type - prettyPrintTerm final + Meta.withLocalDecl name bi type fun fvar => do + stripArgs (Expr.instantiate1 body fvar) k + | _ => k e def valueToEq (v : DefinitionVal) : MetaM Expr := withLCtx {} {} do withOptions (tactic.hygienic.set . false) do @@ -35,6 +28,10 @@ def valueToEq (v : DefinitionVal) : MetaM Expr := withLCtx {} {} do let type ← mkForallFVars xs type return type +def processEq (eq : Name) : MetaM CodeWithInfos := do + let type ← (mkConstWithFreshMVarLevels eq >>= inferType) + stripArgs type prettyPrintTerm + def DefinitionInfo.ofDefinitionVal (v : DefinitionVal) : MetaM DefinitionInfo := do let info ← Info.ofConstantVal v.toConstantVal let isUnsafe := v.safety == DefinitionSafety.unsafe @@ -52,7 +49,7 @@ def DefinitionInfo.ofDefinitionVal (v : DefinitionVal) : MetaM DefinitionInfo := isNonComputable } | none => - let equations := #[← prettyPrintTerm <| stripArgs (← valueToEq v)] + let equations := #[← stripArgs (← valueToEq v) prettyPrintTerm] return { toInfo := info, isUnsafe, diff --git a/DocGen4/Process/Hierarchy.lean b/DocGen4/Process/Hierarchy.lean index 329b778..6d10e88 100644 --- a/DocGen4/Process/Hierarchy.lean +++ b/DocGen4/Process/Hierarchy.lean @@ -95,14 +95,16 @@ def baseDirBlackList : HashSet String := "color-scheme.js", "declaration-data.js", "declarations", + "expand-nav.js", "find", + "foundational_types.html", "how-about.js", "index.html", - "search.html", - "foundational_types.html", + "jump-src.js", "mathjax-config.js", "navbar.html", "nav.js", + "search.html", "search.js", "src", "style.css" diff --git a/DocGen4/Process/InstanceInfo.lean b/DocGen4/Process/InstanceInfo.lean index 3cd306d..df37004 100644 --- a/DocGen4/Process/InstanceInfo.lean +++ b/DocGen4/Process/InstanceInfo.lean @@ -16,19 +16,23 @@ open Lean Meta def getInstanceTypes (typ : Expr) : MetaM (Array Name) := do let (_, _, tail) ← forallMetaTelescopeReducing typ let args := tail.getAppArgs - let (_, names) ← args.mapM (Expr.forEach · findName) |>.run .empty + let (_, bis, _) ← forallMetaTelescopeReducing (← inferType tail.getAppFn) + let (_, names) ← (bis.zip args).mapM findName |>.run .empty return names where - findName : Expr → StateRefT (Array Name) MetaM Unit - | .const name _ => modify (·.push name) - | .sort .zero => modify (·.push "_builtin_prop") - | .sort (.succ _) => modify (·.push "_builtin_typeu") - | .sort _ => modify (·.push "_builtin_sortu") + findName : BinderInfo × Expr → StateRefT (Array Name) MetaM Unit + | (.default, .sort .zero) => modify (·.push "_builtin_prop") + | (.default, .sort (.succ _)) => modify (·.push "_builtin_typeu") + | (.default, .sort _) => modify (·.push "_builtin_sortu") + | (.default, e) => + match e.getAppFn with + | .const name .. => modify (·.push name) + | _ => return () | _ => return () def InstanceInfo.ofDefinitionVal (v : DefinitionVal) : MetaM InstanceInfo := do let mut info ← DefinitionInfo.ofDefinitionVal v - let some className ← isClass? v.type | unreachable! + let some className ← isClass? v.type | panic! s!"isClass? on {v.name} returned none" if let some instAttr ← getDefaultInstance v.name className then info := { info with attrs := info.attrs.push instAttr } let typeNames ← getInstanceTypes v.type diff --git a/DocGen4/Process/NameInfo.lean b/DocGen4/Process/NameInfo.lean index 8cb0ed8..c827778 100644 --- a/DocGen4/Process/NameInfo.lean +++ b/DocGen4/Process/NameInfo.lean @@ -15,39 +15,42 @@ def NameInfo.ofTypedName (n : Name) (t : Expr) : MetaM NameInfo := do let env ← getEnv return { name := n, type := ← prettyPrintTerm t, doc := ← findDocString? env n} -partial def typeToArgsType (e : Expr) : (Array ((Option Name) × Expr × BinderInfo) × Expr) := - let helper := fun name type body data => - -- Once we hit a name with a macro scope we stop traversing the expression - -- and print what is left after the : instead. The only exception - -- to this is instances since these almost never have a name - -- but should still be printed as arguments instead of after the :. - if data.isInstImplicit && name.hasMacroScopes then - let arg := (none, type, data) - let (args, final) := typeToArgsType (Expr.instantiate1 body (mkFVar ⟨name⟩)) - (#[arg] ++ args, final) - else if name.hasMacroScopes then - (#[], e) - else - let arg := (some name, type, data) - let (args, final) := typeToArgsType (Expr.instantiate1 body (mkFVar ⟨name⟩)) - (#[arg] ++ args, final) - match e.consumeMData with - | Expr.lam name type body binderInfo => helper name type body binderInfo - | Expr.forallE name type body binderInfo => helper name type body binderInfo - | _ => (#[], e) +partial def argTypeTelescope {α : Type} (e : Expr) (k : Array ((Option Name) × Expr × BinderInfo) → Expr → MetaM α) : MetaM α := + go e #[] +where + go (e : Expr) (args : Array ((Option Name) × Expr × BinderInfo)) : MetaM α := do + let helper := fun name type body bi => + -- Once we hit a name with a macro scope we stop traversing the expression + -- and print what is left after the : instead. The only exception + -- to this is instances since these almost never have a name + -- but should still be printed as arguments instead of after the :. + if bi.isInstImplicit && name.hasMacroScopes then + let arg := (none, type, bi) + Meta.withLocalDecl name bi type fun fvar => do + go (Expr.instantiate1 body fvar) (args.push arg) + else if name.hasMacroScopes then + k args e + else + let arg := (some name, type, bi) + Meta.withLocalDecl name bi type fun fvar => do + go (Expr.instantiate1 body fvar) (args.push arg) + match e.consumeMData with + | Expr.forallE name type body binderInfo => helper name type body binderInfo + | _ => k args e def Info.ofConstantVal (v : ConstantVal) : MetaM Info := do - let (args, type) := typeToArgsType v.type - let args ← args.mapM (fun (n, e, b) => do return Arg.mk n (← prettyPrintTerm e) b) - let nameInfo ← NameInfo.ofTypedName v.name type - match ← findDeclarationRanges? v.name with - -- TODO: Maybe selection range is more relevant? Figure this out in the future - | some range => return { - toNameInfo := nameInfo, - args, - declarationRange := range.range, - attrs := ← getAllAttributes v.name - } - | none => panic! s!"{v.name} is a declaration without position" + argTypeTelescope v.type fun args type => do + let args ← args.mapM (fun (n, e, b) => do return Arg.mk n (← prettyPrintTerm e) b) + let nameInfo ← NameInfo.ofTypedName v.name type + match ← findDeclarationRanges? v.name with + -- TODO: Maybe selection range is more relevant? Figure this out in the future + | some range => + return { + toNameInfo := nameInfo, + args, + declarationRange := range.range, + attrs := ← getAllAttributes v.name + } + | none => panic! s!"{v.name} is a declaration without position" end DocGen4.Process diff --git a/DocGen4/Process/StructureInfo.lean b/DocGen4/Process/StructureInfo.lean index 08b275c..aaf32f1 100644 --- a/DocGen4/Process/StructureInfo.lean +++ b/DocGen4/Process/StructureInfo.lean @@ -10,26 +10,26 @@ import DocGen4.Process.NameInfo namespace DocGen4.Process -open Lean Meta +open Lean Meta --- TODO: replace with Leos variant from Zulip -def dropArgs (type : Expr) (n : Nat) : (Expr × List (Name × Expr)) := - match type, n with - | e, 0 => (e, []) - | Expr.forallE name type body _, x + 1 => - let body := body.instantiate1 <| mkFVar ⟨name⟩ - let next := dropArgs body x - { next with snd := (name, type) :: next.snd} - | _, _ + 1 => panic! s!"No forallE left" +/-- + Execute `k` with an array containing pairs `(fieldName, fieldType)`. + `k` is executed in an updated local context which contains local declarations for the `structName` parameters. +-/ +def withFields (info : InductiveVal) (k : Array (Name × Expr) → MetaM α) (includeSubobjectFields : Bool := false) : MetaM α := do + let structName := info.name + let us := info.levelParams.map mkLevelParam + forallTelescopeReducing info.type fun params _ => + withLocalDeclD `s (mkAppN (mkConst structName us) params) fun s => do + let mut info := #[] + for fieldName in getStructureFieldsFlattened (← getEnv) structName includeSubobjectFields do + let proj ← mkProjection s fieldName + info := info.push (fieldName, (← inferType proj)) + k info -def getFieldTypes (struct : Name) (ctor : ConstructorVal) (parents : Nat) : MetaM (Array NameInfo) := do - let type := ctor.type - let (fieldFunction, _) := dropArgs type (ctor.numParams + parents) - let (_, fields) := dropArgs fieldFunction (ctor.numFields - parents) - let mut fieldInfos := #[] - for (name, type) in fields do - fieldInfos := fieldInfos.push <| ← NameInfo.ofTypedName (struct.append name) type - return fieldInfos +def getFieldTypes (v : InductiveVal) : MetaM (Array NameInfo) := do + withFields v fun fields => + fields.foldlM (init := #[]) (fun acc (name, type) => do return acc.push (← NameInfo.ofTypedName (v.name.append name) type)) def StructureInfo.ofInductiveVal (v : InductiveVal) : MetaM StructureInfo := do let info ← Info.ofConstantVal v.toConstantVal @@ -42,7 +42,7 @@ def StructureInfo.ofInductiveVal (v : InductiveVal) : MetaM StructureInfo := do if i.fieldNames.size - parents.size > 0 then return { toInfo := info, - fieldInfo := ← getFieldTypes v.name ctorVal parents.size, + fieldInfo := ← getFieldTypes v, parents, ctor } diff --git a/Main.lean b/Main.lean index e78677d..e8dbd6c 100644 --- a/Main.lean +++ b/Main.lean @@ -11,15 +11,12 @@ def getTopLevelModules (p : Parsed) : IO (List String) := do return topLevelModules def runSingleCmd (p : Parsed) : IO UInt32 := do - let relevantModules := [p.positionalArg! "module" |>.as! String |> String.toName] - let res ← lakeSetup - match res with - | Except.ok ws => - let (doc, hierarchy) ← load <| .loadAllLimitAnalysis relevantModules - let baseConfig ← getSimpleBaseContext hierarchy - htmlOutputResults baseConfig doc ws (p.hasFlag "ink") - return 0 - | Except.error rc => pure rc + let relevantModules := #[p.positionalArg! "module" |>.as! String |> String.toName] + let gitUrl := p.positionalArg! "gitUrl" |>.as! String + let (doc, hierarchy) ← load <| .loadAllLimitAnalysis relevantModules + let baseConfig ← getSimpleBaseContext hierarchy + htmlOutputResults baseConfig doc (some gitUrl) (p.hasFlag "ink") + return 0 def runIndexCmd (_p : Parsed) : IO UInt32 := do let hierarchy ← Hierarchy.fromDirectory Output.basePath @@ -28,14 +25,10 @@ def runIndexCmd (_p : Parsed) : IO UInt32 := do return 0 def runGenCoreCmd (_p : Parsed) : IO UInt32 := do - let res ← lakeSetup - match res with - | Except.ok ws => - let (doc, hierarchy) ← loadCore - let baseConfig ← getSimpleBaseContext hierarchy - htmlOutputResults baseConfig doc ws (ink := False) - return 0 - | Except.error rc => pure rc + let (doc, hierarchy) ← loadCore + let baseConfig ← getSimpleBaseContext hierarchy + htmlOutputResults baseConfig doc none (ink := False) + return 0 def runDocGenCmd (_p : Parsed) : IO UInt32 := do IO.println "You most likely want to use me via Lake now, check my README on Github on how to:" @@ -51,6 +44,7 @@ def singleCmd := `[Cli| ARGS: module : String; "The module to generate the HTML for. Does not have to be part of topLevelModules." + gitUrl : String; "The gitUrl as computed by the Lake facet" ] def indexCmd := `[Cli| diff --git a/lake-manifest.json b/lake-manifest.json index 2bffb3c..df80128 100644 --- a/lake-manifest.json +++ b/lake-manifest.json @@ -1,4 +1,4 @@ -{"version": 5, +{"version": 6, "packagesDir": "lake-packages", "packages": [{"git": @@ -20,15 +20,15 @@ {"git": {"url": "https://github.com/fgdorais/lean4-unicode-basic", "subDir?": null, - "rev": "2491e781ae478b6e6f1d86a7157f1c58fc50f895", + "rev": "4ecf4f1f98d14d03a9e84fa1c082630fa69df88b", "opts": {}, - "name": "«lean4-unicode-basic»", + "name": "UnicodeBasic", "inputRev?": "main", "inherited": false}}, {"git": {"url": "https://github.com/mhuisi/lean4-cli", "subDir?": null, - "rev": "21dac2e9cc7e3cf7da5800814787b833e680b2fd", + "rev": "39229f3630d734af7d9cfb5937ddc6b41d3aa6aa", "opts": {}, "name": "Cli", "inputRev?": "nightly", @@ -72,4 +72,5 @@ "opts": {}, "name": "std", "inputRev?": "main", - "inherited": false}}]} + "inherited": false}}], +"name": "«bookshelf»"} diff --git a/lakefile.lean b/lakefile.lean index 386643d..e1a0b1b 100644 --- a/lakefile.lean +++ b/lakefile.lean @@ -14,7 +14,7 @@ require Cli from git require CMark from git "https://github.com/xubaiw/CMark.lean" @ "main" -require «lean4-unicode-basic» from git +require UnicodeBasic from git "https://github.com/fgdorais/lean4-unicode-basic" @ "main" require leanInk from git @@ -38,22 +38,87 @@ lean_exe «doc-gen4» { supportInterpreter := true } +/-- +Turns a Github git remote URL into an HTTPS Github URL. +Three link types from git supported: +- https://github.com/org/repo +- https://github.com/org/repo.git +- git@github.com:org/repo.git +TODO: This function is quite brittle and very Github specific, we can +probably do better. +-/ +def getGithubBaseUrl (gitUrl : String) : String := Id.run do + let mut url := gitUrl + if url.startsWith "git@" then + url := url.drop 15 + url := url.dropRight 4 + return s!"https://github.com/{url}" + else if url.endsWith ".git" then + return url.dropRight 4 + else + return url + +/-- +Obtain the Github URL of a project by parsing the origin remote. +-/ +def getProjectGithubUrl (directory : System.FilePath := "." ) : IO String := do + let out ← IO.Process.output { + cmd := "git", + args := #["remote", "get-url", "origin"], + cwd := directory + } + if out.exitCode != 0 then + throw <| IO.userError <| s!"git exited with code {out.exitCode} while looking for the git remote in {directory}" + return out.stdout.trimRight + +/-- +Obtain the git commit hash of the project that is currently getting analyzed. +-/ +def getProjectCommit (directory : System.FilePath := "." ) : IO String := do + let out ← IO.Process.output { + cmd := "git", + args := #["rev-parse", "HEAD"] + cwd := directory + } + if out.exitCode != 0 then + throw <| IO.userError <| s!"git exited with code {out.exitCode} while looking for the current commit in {directory}" + return out.stdout.trimRight + +def getGitUrl (pkg : Package) (lib : LeanLibConfig) (mod : Module) : IO String := do + let baseUrl := getGithubBaseUrl (← getProjectGithubUrl pkg.dir) + let commit ← getProjectCommit pkg.dir + + let parts := mod.name.components.map toString + let path := String.intercalate "/" parts + let libPath := pkg.config.srcDir / lib.srcDir + let basePath := String.intercalate "/" (libPath.components.filter (· != ".")) + let url := s!"{baseUrl}/blob/{commit}/{basePath}/{path}.lean" + return url + module_facet docs (mod) : FilePath := do let some docGen4 ← findLeanExe? `«doc-gen4» | error "no doc-gen4 executable configuration found in workspace" let exeJob ← docGen4.exe.fetch - let modJob ← mod.leanBin.fetch - let buildDir := (← getWorkspace).root.buildDir + let modJob ← mod.leanArts.fetch + let ws ← getWorkspace + let pkg ← ws.packages.find? (·.isLocalModule mod.name) + let libConfig ← pkg.leanLibConfigs.toArray.find? (·.isLocalModule mod.name) + -- Build all documentation imported modules + let imports ← mod.imports.fetch + let depDocJobs ← BuildJob.mixArray <| ← imports.mapM fun mod => fetch <| mod.facet `docs + let gitUrl ← getGitUrl pkg libConfig mod + let buildDir := ws.root.buildDir let docFile := mod.filePath (buildDir / "doc") "html" + depDocJobs.bindAsync fun _ depDocTrace => do exeJob.bindAsync fun exeFile exeTrace => do modJob.bindSync fun _ modTrace => do - let depTrace := exeTrace.mix modTrace + let depTrace := mixTraceArray #[exeTrace, modTrace, depDocTrace] let trace ← buildFileUnlessUpToDate docFile depTrace do - logInfo s!"Documenting module: {mod.name}" + logStep s!"Documenting module: {mod.name}" proc { cmd := exeFile.toString - args := #["single", mod.name.toString] - env := #[("LEAN_PATH", (← getAugmentedLeanPath).toString)] + args := #["single", mod.name.toString, gitUrl] + env := ← getAugmentedEnv } return (docFile, trace) @@ -66,11 +131,11 @@ target coreDocs : FilePath := do let dataFile := basePath / "declarations" / "declaration-data-Lean.bmp" exeJob.bindSync fun exeFile exeTrace => do let trace ← buildFileUnlessUpToDate dataFile exeTrace do - logInfo "Documenting Lean core: Init and Lean" + logStep "Documenting Lean core: Init and Lean" proc { cmd := exeFile.toString args := #["genCore"] - env := #[("LEAN_PATH", (← getAugmentedLeanPath).toString)] + env := ← getAugmentedEnv } return (dataFile, trace) @@ -90,6 +155,8 @@ library_facet docs (lib) : FilePath := do basePath / "declaration-data.js", basePath / "color-scheme.js", basePath / "nav.js", + basePath / "jump-src.js", + basePath / "expand-nav.js", basePath / "how-about.js", basePath / "search.js", basePath / "mathjax-config.js", diff --git a/lean-toolchain b/lean-toolchain index 7884c5a..00004ee 100644 --- a/lean-toolchain +++ b/lean-toolchain @@ -1 +1 @@ -leanprover/lean4:4.0.0 \ No newline at end of file +leanprover/lean4:v4.2.0-rc4 \ No newline at end of file diff --git a/static/declaration-data.js b/static/declaration-data.js index 291e09c..378c9bf 100644 --- a/static/declaration-data.js +++ b/static/declaration-data.js @@ -54,7 +54,12 @@ export class DeclarationDataCenter { ); // try to use cache first - const data = await fetchCachedDeclarationData().catch(_e => null); + // TODO: This API is not thought 100% through. If we have a DB cached + // already it will not even ask the remote for a new one so we end up + // with outdated declaration-data. This has to have some form of cache + // invalidation: https://github.com/leanprover/doc-gen4/issues/133 + // const data = await fetchCachedDeclarationData().catch(_e => null); + const data = null; if (data) { // if data is defined, use the cached one. return new DeclarationDataCenter(data); @@ -72,7 +77,7 @@ export class DeclarationDataCenter { * Search for a declaration. * @returns {Array} */ - search(pattern, strict = true, allowedKinds=undefined, maxResults=undefined) { + search(pattern, strict = true, allowedKinds = undefined, maxResults = undefined) { if (!pattern) { return []; } @@ -135,7 +140,7 @@ export class DeclarationDataCenter { } } -function isSeparater(char) { +function isSeparator(char) { return char === "." || char === "_"; } @@ -148,11 +153,11 @@ function matchCaseSensitive(declName, lowerDeclName, pattern) { lastMatch = 0; while (i < declName.length && j < pattern.length) { if (pattern[j] === declName[i] || pattern[j] === lowerDeclName[i]) { - err += (isSeparater(pattern[j]) ? 0.125 : 1) * (i - lastMatch); + err += (isSeparator(pattern[j]) ? 0.125 : 1) * (i - lastMatch); if (pattern[j] !== declName[i]) err += 0.5; lastMatch = i + 1; j++; - } else if (isSeparater(declName[i])) { + } else if (isSeparator(declName[i])) { err += 0.125 * (i + 1 - lastMatch); lastMatch = i + 1; } @@ -168,12 +173,9 @@ function getMatches(declarations, pattern, allowedKinds = undefined, maxResults const lowerPats = pattern.toLowerCase().split(/\s/g); const patNoSpaces = pattern.replace(/\s/g, ""); const results = []; - for (const [_, { - name, + for (const [name, { kind, - doc, docLink, - sourceLink, }] of Object.entries(declarations)) { // Apply "kind" filter if (allowedKinds !== undefined) { @@ -182,26 +184,14 @@ function getMatches(declarations, pattern, allowedKinds = undefined, maxResults } } const lowerName = name.toLowerCase(); - const lowerDoc = doc.toLowerCase(); let err = matchCaseSensitive(name, lowerName, patNoSpaces); - // match all words as substrings of docstring - if ( - err >= 3 && - pattern.length > 3 && - lowerPats.every((l) => lowerDoc.indexOf(l) != -1) - ) { - err = 3; - } if (err !== undefined) { results.push({ name, kind, - doc, err, lowerName, - lowerDoc, docLink, - sourceLink, }); } } @@ -273,12 +263,7 @@ async function fetchCachedDeclarationData() { return new Promise((resolve, reject) => { let transactionRequest = store.get(CACHE_DB_KEY); transactionRequest.onsuccess = function (event) { - // TODO: This API is not thought 100% through. If we have a DB cached - // already it will not even ask the remote for a new one so we end up - // with outdated declaration-data. This has to have some form of cache - // invalidation: https://github.com/leanprover/doc-gen4/issues/133 - // resolve(event.target.result); - resolve(undefined); + resolve(event.target.result); }; transactionRequest.onerror = function (event) { reject(new Error(`fail to store declaration data`)); diff --git a/static/expand-nav.js b/static/expand-nav.js new file mode 100644 index 0000000..6ecb246 --- /dev/null +++ b/static/expand-nav.js @@ -0,0 +1,24 @@ +document.querySelector('.navframe').addEventListener('load', function() { + // Get the current page URL without the suffix after # + var currentPageURL = window.location.href.split('#')[0]; + + // Get all anchor tags + var as = document.querySelector('.navframe').contentWindow.document.body.querySelectorAll('a'); + for (const a of as) { + if (a.href) { + var href = a.href.split('#')[0]; + // find the one with the current url + if (href === currentPageURL) { + a.style.fontStyle = 'italic'; + // open all detail tags above the current + var el = a.parentNode.closest('details'); + while (el) { + el.open = true; + el = el.parentNode.closest('details'); + } + } + // seeing as we found the link we were looking for, stop + break; + } + } +}); \ No newline at end of file diff --git a/static/find/find.js b/static/find/find.js index 038a1a1..57953fc 100644 --- a/static/find/find.js +++ b/static/find/find.js @@ -49,7 +49,7 @@ const queryParams = new Map( const fragmentPaths = fragment?.split(LEAN_FRIENDLY_SLASH_SEPARATOR) ?? []; const encodedPattern = queryParams.get("pattern") ?? fragmentPaths[1]; // if first fail then second, may be undefined -const pattern = decodeURIComponent(encodedPattern); +const pattern = encodedPattern && decodeURIComponent(encodedPattern); const strict = (queryParams.get("strict") ?? "true") === "true"; // default to true const view = fragmentPaths[0]; @@ -81,7 +81,8 @@ async function findAndRedirect(pattern, strict, view) { } else if (view == "doc") { window.location.replace(result.docLink); } else if (view == "src") { - window.location.replace(result.sourceLink); + const [module, decl] = result.docLink.split("#", 2); + window.location.replace(`${module}?jump=src#${decl}`); } else { // fallback to doc page window.location.replace(result.docLink); diff --git a/static/importedBy.js b/static/importedBy.js index dab932d..5eb7fca 100644 --- a/static/importedBy.js +++ b/static/importedBy.js @@ -3,7 +3,7 @@ import { DeclarationDataCenter } from "./declaration-data.js"; fillImportedBy(); async function fillImportedBy() { - if (!MODULE_NAME) { + if (typeof(MODULE_NAME) == "undefined") { return; } const dataCenter = await DeclarationDataCenter.init(); diff --git a/static/jump-src.js b/static/jump-src.js new file mode 100644 index 0000000..e7397e5 --- /dev/null +++ b/static/jump-src.js @@ -0,0 +1,10 @@ +document.addEventListener("DOMContentLoaded", () => { + const hash = document.location.hash.substring(1); + const tgt = new URLSearchParams(document.location.search).get("jump"); + if (tgt === "src" && hash) { + const target = document.getElementById(hash) + ?.querySelector(".gh_link a") + ?.getAttribute("href"); + if (target) window.location.replace(target); + } +}) \ No newline at end of file diff --git a/static/search.js b/static/search.js index 809f4bd..4556c64 100644 --- a/static/search.js +++ b/static/search.js @@ -112,7 +112,7 @@ function handleSearch(dataCenter, err, ev, sr, maxResults, autocomplete) { // update autocomplete results removeAllChildren(sr); - for (const { name, kind, doc, docLink } of result) { + for (const { name, kind, docLink } of result) { const row = sr.appendChild(document.createElement("div")); row.classList.add("search_result") const linkdiv = row.appendChild(document.createElement("div")) @@ -121,11 +121,6 @@ function handleSearch(dataCenter, err, ev, sr, maxResults, autocomplete) { link.innerText = name; link.title = name; link.href = SITE_ROOT + docLink; - if (!autocomplete) { - const doctext = row.appendChild(document.createElement("div")); - doctext.innerText = doc - doctext.classList.add("result_doc") - } } } // handle error @@ -151,7 +146,7 @@ const debounce = (callback, wait) => { DeclarationDataCenter.init() .then((dataCenter) => { // Search autocompletion. - SEARCH_INPUT.addEventListener("input", debounce(ev => handleSearch(dataCenter, null, ev, ac_results, AC_MAX_RESULTS, true), 500)); + SEARCH_INPUT.addEventListener("input", debounce(ev => handleSearch(dataCenter, null, ev, ac_results, AC_MAX_RESULTS, true), 300)); if(SEARCH_PAGE_INPUT) { SEARCH_PAGE_INPUT.addEventListener("input", ev => handleSearch(dataCenter, null, ev, SEARCH_RESULTS, SEARCH_PAGE_MAX_RESULTS, false)) document.querySelectorAll(".kind_checkbox").forEach((checkbox) => @@ -162,7 +157,7 @@ DeclarationDataCenter.init() SEARCH_INPUT.dispatchEvent(new Event("input")) }) .catch(e => { - SEARCH_INPUT.addEventListener("input", debounce(ev => handleSearch(null, e, ev, ac_results, AC_MAX_RESULTS, true), 500)); + SEARCH_INPUT.addEventListener("input", debounce(ev => handleSearch(null, e, ev, ac_results, AC_MAX_RESULTS, true), 300)); if(SEARCH_PAGE_INPUT) { SEARCH_PAGE_INPUT.addEventListener("input", ev => handleSearch(null, e, ev, SEARCH_RESULTS, SEARCH_PAGE_MAX_RESULTS, false)); }