From c568707982b37c58196eb62bba00eed174b2ff62 Mon Sep 17 00:00:00 2001 From: Yorick Peterse Date: Fri, 31 May 2024 22:23:14 +0200 Subject: [PATCH] WIP: implement --- inko.pkg | 2 +- src/idoc/cmd/html.inko | 131 ++++++++++++++++++++------ src/idoc/ir.inko | 203 ++++++++++++++++++++++++++++++++++++++--- 3 files changed, 292 insertions(+), 44 deletions(-) diff --git a/inko.pkg b/inko.pkg index e256237..76c685e 100644 --- a/inko.pkg +++ b/inko.pkg @@ -1,3 +1,3 @@ -require https://github.com/yorickpeterse/inko-wobsite 0.14.0 aefbbaf6d24cb4a6574c92573c0a72d14b863760 +require https://github.com/yorickpeterse/inko-wobsite 0.16.0 8595e430857cb2579cfb2fe0ff19cc7446b516e4 require https://github.com/yorickpeterse/inko-builder 0.12.0 dfb7877ca9dec2109b0fa940190c210dd3fe4059 require https://github.com/yorickpeterse/inko-markdown 0.16.0 e6d26dd94bd44cdd24fb8a159caa949cfe82891f diff --git a/src/idoc/cmd/html.inko b/src/idoc/cmd/html.inko index b7cb780..8bae88a 100644 --- a/src/idoc/cmd/html.inko +++ b/src/idoc/cmd/html.inko @@ -1,10 +1,13 @@ -import idoc.ir (Constant, Module) -import std.fs.file (ReadOnlyFile, WriteOnlyFile) +import builder.html (Document) +import idoc.ir (Module) +import std.fs.file (WriteOnlyFile) import std.fs.path (Path) import std.io (Write) -import std.json (Error, PullParser) +import std.json (Error) import std.optparse (Help, Options) -import std.range (InclusiveRange) +import std.time (DateTime) +import wobsite (Files, FrontMatter, Page, Site, UpdateAssetLinks) +import wobsite.url (file_url) # The default directory in which to look for the output of `inko doc`. # @@ -27,6 +30,58 @@ fn usage(options: ref Options, output: mut Write) { let _ = output.write_string(help) } +class async Parser { + fn async parse( + input: uni Path, + output: Channel[uni Result[Module, (Path, Error)]], + ) { + let res = recover { + let input = recover input + + match Module.parse_file(input.clone) { + case Ok(mod) -> Result.Ok(mod) + case Error(e) -> Result.Error((input, e)) + } + } + + output.send(res) + } +} + +fn generate(source: String, files: ref Files, module: ref Module) -> String { + # TODO: do the actual work + # TODO: update heading levels: + # TODO: push into function/type/whatever + let url = file_url(files.source, source.to_path) + # let page = Page( + # front_matter: FrontMatter(title: 'TITLE', date: DateTime.new), + # url: url, + # source_path: 'TODO'.to_path, + # body: panic('TODO'), + # ) + + let page = Document.html('en', fn (html) { + html.head.with(fn (h) { + # TODO + }) + + html.body.with(fn (b) { + # TODO + }) + }) + + # let body = match mod.documentation { + # case Some(d) -> { + # let html = d.to_html + # + # UpdateAssetLinks.new(files, url).run(html) + # html.to_string + # } + # case _ -> '' + # } + page.to_string +} + fn run( arguments: Array[String], working_directory: ref Path, @@ -49,48 +104,66 @@ fn run( # TODO: # - # - Parse the JSON files # - Convert the JSON data to Markdown files, one for each module. Write these # build/idoc/source/ - # - Adjust the headings in examples to be of the right level + # - Wrap documentation headings such that we don't include them in the table + # of contents. # - Do all this in parallel # - Copy assets (e.g. CSS) from /usr/share/whatever to build/idoc/source/ # - Generate static website using inko-wobsite, write to build/idoc/public/ - let files = try input + let build = OUTPUT_DIR.to_path + let source = build.join('source') + + # Make sure we always start with a clean directory, instead of keeping files + # from previous builds around. + let _ = build.remove_directory_all + + try source.create_directory_all.map_error(fn (e) { + 'failed to create ${source}: ${e}' + }) + + let site = try Site.new(source, build.join('public')).map_error(fn (e) { + 'failed to set up the site generator: ${e}' + }) + + let modules = Channel.new(size: 32) + let mut pending = try input .list .then(fn (iter) { - iter.try_reduce([], fn (acc, res) { + iter.try_reduce(0, fn (sum, res) { match res { - case Ok({ @path = path, @type = File }) -> { - match path.extension { - case Some('json') -> acc.push(path) - case _ -> {} - } + case Ok({ @path = p, @type = File }) if p.extension.or('') == 'json' + -> { + Parser().parse(recover p.clone, modules) + Result.Ok(sum + 1) } - case Error(e) -> throw e - case _ -> {} + case Ok(_) -> Result.Ok(sum) + case Error(e) -> Result.Error(e) } - - Result.Ok(acc) }) }) - .map_error(fn (e) { 'failed to get the JSON input files: ${e}' }) + .map_error(fn (e) { 'failed to get the JSON files to process: ${e}' }) - let dir = OUTPUT_DIR.to_path - let md_dir = dir.join('source/modules') + while pending > 0 { + let mod = recover { + match modules.receive { + case Ok(v) -> v + case Error((p, e)) -> throw 'failed to parse the JSON file ${p}: ${e}' + } + } - try md_dir.create_directory_all.map_error(fn (e) { - "failed to create '${md_dir}': ${e}" - }) + let dir = 'module/' + mod.name.replace('.', '/') - try files.into_iter.try_each(fn (path) { - try Module.parse_file(path.clone).map_error(fn (e) { - 'failed to parse ${path}: ${e}' + site.generate(dir + '/index.html', fn move (files) { + Result.Ok(generate(dir + '/index.md', files, mod)) }) - Result.Ok(nil) - }) + pending -= 1 + } - Result.Ok(nil) + match site.wait { + case Ok(_) -> Result.Ok(nil) + case Error(e) -> Result.Error('failed to build the documentation: ${e}') + } } diff --git a/src/idoc/ir.inko b/src/idoc/ir.inko index a49017d..479892f 100644 --- a/src/idoc/ir.inko +++ b/src/idoc/ir.inko @@ -1,4 +1,5 @@ # An intermediate representation of Inko's documentation files. +import markdown (Document) import std.fs.file (ReadOnlyFile) import std.fs.path (Path) import std.json (Error, ErrorKind, PullParser) @@ -15,6 +16,16 @@ fn parse_range_into( .require_all } +fn parse_markdown(parser: mut PullParser) -> Result[Document, Error] { + let start = try parser.start_of_next_value + + parser.string.then(fn (md) { + Document.parse(md).map_error(fn (e) { + Error.generic(e.message, start + e.offset) + }) + }) +} + class Location { let @lines: InclusiveRange let @columns: InclusiveRange @@ -38,7 +49,7 @@ class Location { class Module { let @name: String let @file: Path - let @documentation: String + let @documentation: Option[Document] let @constants: Array[Constant] let @methods: Array[Method] let @classes: Array[Class] @@ -48,14 +59,13 @@ class Module { let mod = Module( name: '', file: ''.to_path, - documentation: '', + documentation: Option.None, constants: [], methods: [], classes: [], traits: [], ) - # TODO: stream the input file let bytes = ByteArray.new try ReadOnlyFile @@ -69,10 +79,21 @@ class Module { .object .string('name', fn (v) { mod.name = v }) .string('file', fn (v) { mod.file = v.to_path }) - .string('documentation', fn (v) { mod.documentation = v }) + .key('documentation', fn { + parse_markdown(parser).map(fn (v) { mod.documentation = Option.Some(v) }) + }) .values('constants', fn { Constant.parse(parser).map(fn (v) { mod.constants.push(v) }) }) + .values('methods', fn { + Method.parse(parser).map(fn (v) { mod.methods.push(v) }) + }) + .values('classes', fn { + Class.parse(parser).map(fn (v) { mod.classes.push(v) }) + }) + .values('traits', fn { + Trait.parse(parser).map(fn (v) { mod.traits.push(v) }) + }) .require_all Result.Ok(mod) @@ -84,7 +105,7 @@ class Constant { let @location: Location let @public: Bool let @type: String - let @documentation: String + let @documentation: Option[Document] fn static parse(parser: mut PullParser) -> Result[Constant, Error] { let const = Constant( @@ -92,7 +113,7 @@ class Constant { location: Location.default, public: false, type: '', - documentation: '', + documentation: Option.None, ) try parser @@ -101,7 +122,11 @@ class Constant { .key('location', fn { Location.parse_into(const.location, parser) }) .bool('public', fn (v) { const.public = v }) .string('type', fn (v) { const.type = v }) - .string('documentation', fn (v) { const.documentation = v }) + .string('documentation', fn (v) { + parse_markdown(parser).map(fn (v) { + const.documentation = Option.Some(v) + }) + }) .require_all Result.Ok(const) @@ -113,14 +138,62 @@ class Field { let @location: Location let @public: Bool let @type: String - let @documentation: String + let @documentation: Option[Document] + + fn static parse(parser: mut PullParser) -> Result[Field, Error] { + let field = Field( + name: '', + location: Location.default, + public: false, + type: '', + documentation: Option.None, + ) + + try parser + .object + .string('name', fn (v) { field.name = v }) + .key('location', fn { Location.parse_into(field.location, parser) }) + .bool('public', fn (v) { field.public = v }) + .string('type', fn (v) { field.type = v }) + .string('documentation', fn (v) { + parse_markdown(parser).map(fn (v) { + field.documentation = Option.Some(v) + }) + }) + .require_all + + Result.Ok(field) + } } class Constructor { let @name: String let @location: Location let @type: String - let @documentation: String + let @documentation: Option[Document] + + fn static parse(parser: mut PullParser) -> Result[Constructor, Error] { + let cons = Constructor( + name: '', + location: Location.default, + type: '', + documentation: Option.None, + ) + + try parser + .object + .string('name', fn (v) { cons.name = v }) + .key('location', fn { Location.parse_into(cons.location, parser) }) + .string('type', fn (v) { cons.type = v }) + .string('documentation', fn (v) { + parse_markdown(parser).map(fn (v) { + cons.documentation = Option.Some(v) + }) + }) + .require_all + + Result.Ok(cons) + } } class enum MethodKind { @@ -149,12 +222,41 @@ class enum MethodKind { class Method { let @name: String - let @kind: MethodKind let @file: Path let @location: Location + let @kind: MethodKind let @public: Bool let @type: String - let @documentation: String + let @documentation: Option[Document] + + fn static parse(parser: mut PullParser) -> Result[Method, Error] { + let method = Method( + name: '', + kind: MethodKind.Instance, + file: ''.to_path, + location: Location.default, + public: false, + type: '', + documentation: Option.None, + ) + + try parser + .object + .string('name', fn (v) { method.name = v }) + .string('file', fn (v) { method.file = v.to_path }) + .key('location', fn { Location.parse_into(method.location, parser) }) + .int('kind', fn (v) { method.kind = MethodKind.from(v) }) + .bool('public', fn (v) { method.public = v }) + .string('type', fn (v) { method.type = v }) + .string('documentation', fn (v) { + parse_markdown(parser).map(fn (v) { + method.documentation = Option.Some(v) + }) + }) + .require_all + + Result.Ok(method) + } } class enum ClassKind { @@ -181,14 +283,54 @@ class enum ClassKind { class Class { let @name: String - let @kind: ClassKind let @file: Path let @location: Location + let @kind: ClassKind let @public: Bool let @type: String - let @documentation: String + let @documentation: Option[Document] + let @constructors: Array[Constructor] let @fields: Array[Field] let @methods: Array[Method] + + fn static parse(parser: mut PullParser) -> Result[Class, Error] { + let val = Class( + name: '', + file: ''.to_path, + location: Location.default, + kind: ClassKind.Regular, + public: false, + type: '', + documentation: Option.None, + constructors: [], + fields: [], + methods: [], + ) + + try parser + .object + .string('name', fn (v) { val.name = v }) + .string('file', fn (v) { val.file = v.to_path }) + .key('location', fn { Location.parse_into(val.location, parser) }) + .int('kind', fn (v) { val.kind = ClassKind.from(v) }) + .bool('public', fn (v) { val.public = v }) + .string('type', fn (v) { val.type = v }) + .string('documentation', fn (v) { + parse_markdown(parser).map(fn (v) { val.documentation = Option.Some(v) }) + }) + .values('constructors', fn { + Constructor.parse(parser).map(fn (v) { val.constructors.push(v) }) + }) + .values('fields', fn { + Field.parse(parser).map(fn (v) { val.fields.push(v) }) + }) + .values('methods', fn { + Method.parse(parser).map(fn (v) { val.methods.push(v) }) + }) + .require_all + + Result.Ok(val) + } } class Trait { @@ -197,7 +339,40 @@ class Trait { let @location: Location let @public: Bool let @type: String - let @documentation: String + let @documentation: Option[Document] let @required_methods: Array[Method] let @default_methods: Array[Method] + + fn static parse(parser: mut PullParser) -> Result[Trait, Error] { + let val = Trait( + name: '', + file: ''.to_path, + location: Location.default, + public: false, + type: '', + documentation: Option.None, + required_methods: [], + default_methods: [], + ) + + try parser + .object + .string('name', fn (v) { val.name = v }) + .string('file', fn (v) { val.file = v.to_path }) + .key('location', fn { Location.parse_into(val.location, parser) }) + .bool('public', fn (v) { val.public = v }) + .string('type', fn (v) { val.type = v }) + .string('documentation', fn (v) { + parse_markdown(parser).map(fn (v) { val.documentation = Option.Some(v) }) + }) + .values('required_methods', fn { + Method.parse(parser).map(fn (v) { val.required_methods.push(v) }) + }) + .values('default_methods', fn { + Method.parse(parser).map(fn (v) { val.default_methods.push(v) }) + }) + .require_all + + Result.Ok(val) + } }