diff --git a/apps/demo/playwright.config.ts b/apps/demo/playwright.config.ts index 18ef86f..30d259a 100644 --- a/apps/demo/playwright.config.ts +++ b/apps/demo/playwright.config.ts @@ -5,6 +5,7 @@ const config: PlaywrightTestConfig = { command: 'npm run build && npm run preview', port: 4173, }, + testDir: 'tests', testMatch: /(.+\.)?(test|spec)\.[jt]s/, }; diff --git a/apps/demo/tests/test.ts b/apps/demo/tests/test.ts index bd01f59..5a95805 100644 --- a/apps/demo/tests/test.ts +++ b/apps/demo/tests/test.ts @@ -1,6 +1,9 @@ import { expect, test } from '@playwright/test'; +test.describe.configure({ mode: 'parallel' }); + test('tags work', async ({ page }) => { + test.setTimeout(0); await page.goto('http://localhost:4173/playground/tags'); expect(await page.content()).toContain('Addition'); @@ -9,6 +12,7 @@ test('tags work', async ({ page }) => { }); test('tags work with types', async ({ page }) => { + test.setTimeout(0); await page.goto('http://localhost:4173/playground/tags'); expect(await page.content()).toContain('Types'); @@ -18,6 +22,7 @@ test('tags work with types', async ({ page }) => { }); test('partials work', async ({ page }) => { + test.setTimeout(0); await page.goto('http://localhost:4173/playground/partials'); expect(await page.content()).toContain('I am a partial.'); @@ -26,6 +31,7 @@ test('partials work', async ({ page }) => { }); test('named layouts work', async ({ page }) => { + test.setTimeout(0); await page.goto('http://localhost:4173/playground/layout'); expect(await page.content()).toContain('I am on an alternative layout'); diff --git a/packages/process/src/renderer.ts b/packages/process/src/renderer.ts index 895888e..73d4e54 100644 --- a/packages/process/src/renderer.ts +++ b/packages/process/src/renderer.ts @@ -92,7 +92,7 @@ function is_void_element(name: string): boolean { } function is_svelte_component(node: RenderableTreeNodes): boolean { - return Tag.isTag(node) && node.name.startsWith(IMPORT_PREFIX); + return Tag.isTag(node); } function generate_svelte_attribute_value(value: unknown): string { diff --git a/packages/process/src/transformer.ts b/packages/process/src/transformer.ts index 6217c96..9c391ae 100644 --- a/packages/process/src/transformer.ts +++ b/packages/process/src/transformer.ts @@ -8,6 +8,7 @@ import { ConfigType, validate, Tokenizer, + Node, } from '@markdoc/markdoc'; import { ScriptTarget, @@ -27,11 +28,12 @@ import { path_exists, read_file, relative_posix_path, + to_absolute_posix_path, write_to_file, } from './utils'; import * as default_schema from './default_schema'; import type { Config } from './config'; -import { LAYOUT_IMPORT, NODES_IMPORT, TAGS_IMPORT } from './constants'; +import { LAYOUT_IMPORT } from './constants'; import { log_error, log_validation_error } from './log'; type Var = { @@ -39,6 +41,150 @@ type Var = { type: StringConstructor | NumberConstructor | BooleanConstructor; }; +type NodeName = NodeType; +type TagName = string; +type PartialName = string; + +type NodeTagPartialTriplet = [NodeName[], TagName[], PartialName[]]; +type TransformerState = { + nodes: Map; + tags: Map; + partials: Map; + normalized: { + nodes: NodeName[]; + tags: TagName[]; + }; +}; + +function init( + tags_file: string | null, + nodes_file: string | null, + partials_dir: string | null, +): TransformerState { + const node_with_paths = nodes_file ? each_exported_var(nodes_file) : []; + const node_with_schemas = [ + ...Object.entries(prepare_nodes(nodes_file, node_with_paths)), + ]; + + const node_state = node_with_paths.map<[NodeName, [path: string, Schema]]>( + ([node, path]) => [ + node as NodeName, + [ + path, + node_with_schemas.find( + ([snode]) => snode.toLowerCase() == node.toLowerCase(), + )![1], + ], + ], + ); + + const tag_with_paths = tags_file + ? each_exported_var(tags_file.toString()) + : []; + + const tag_with_schemas = [ + ...Object.entries(prepare_tags(tags_file, tag_with_paths)), + ]; + + const tag_state = tag_with_paths.map<[TagName, [path: string, Schema]]>( + ([tag, path]) => [ + tag, + [ + path, + tag_with_schemas.find( + ([stag]) => stag.toLowerCase() == tag.toLowerCase(), + )![1], + ], + ], + ); + + const partial_schemas = prepare_partials(partials_dir); + const partial_state = Object.entries(partial_schemas).map< + [PartialName, [NodeTagPartialTriplet, Node]] + >(([partial, schema]) => [partial, [flatten_node(schema), schema]]); + return { + tags: new Map(tag_state), + nodes: new Map(node_state), + partials: new Map(partial_state), + normalized: { + nodes: [...node_state.map(([k]) => k)], + tags: [...tag_state.map(([k]) => k)], + }, + }; +} + +function flatten_node(node: Node): NodeTagPartialTriplet { + const aux_create_state = (node: Node): NodeTagPartialTriplet => + is_partial_node(node) + ? [ + [], + [], + node.annotations + .filter((a) => a.name == 'file') + .map((node) => node.value), + ] + : node.tag + ? [[], [node.tag], []] + : [[node.type], [], []]; + + return node.children.length + ? combine_nodes_tags_partials([ + aux_create_state(node), + ...node.children.map(flatten_node), + ]) + : aux_create_state(node); +} + +function combine_nodes_tags_partials( + data: NodeTagPartialTriplet[], +): NodeTagPartialTriplet { + return data.reduce( + (acc, node) => acc.map((k, i) => k.concat(node[i])), + [[], [], []], + ) as NodeTagPartialTriplet; +} + +function is_partial_node(node: Node): boolean { + return ( + node.type == 'tag' && node.tag == 'partial' && !!node.annotations.length + ); +} + +function flatten_partials( + travel_state: PartialName[], + transformer_state: TransformerState, + partial_name: PartialName, +): NodeTagPartialTriplet { + if (travel_state.includes(partial_name)) { + throw new Error( + `resolve deps failed: detected cyclic error in partial in the order ${[ + ...travel_state, + partial_name, + ]}`, + ); + } + + if (!partial_name.length) { + return [[], [], []]; + } + + travel_state = [...travel_state, partial_name]; + const res = transformer_state.partials.get(partial_name)?.[0] ?? [ + [], + [], + [], + ]; + const [, , remaining_partials] = res; + + return remaining_partials.length + ? combine_nodes_tags_partials( + remaining_partials.map((v) => + flatten_partials(travel_state, transformer_state, v), + ), + ) + : res; +} + export function transformer({ content, filename, @@ -62,6 +208,8 @@ export function transformer({ validation_threshold: Config['validationThreshold']; allow_comments: Config['allowComments']; }): string { + const transformer_state = init(tags_file, nodes_file, partials_dir); + /** * create tokenizer */ @@ -74,6 +222,52 @@ export function transformer({ */ const ast = markdocParse(tokens); + const [used_cur_nodes, used_cur_tags, used_cur_partials] = flatten_node( + ast, + ).map((nodes) => [...new Set(nodes)]); + + const [used_partials_nodes, used_partials_tags, empty_partials] = + combine_nodes_tags_partials( + used_cur_partials.map((p) => + flatten_partials([], transformer_state, p), + ), + ); + + if (empty_partials.length) { + throw new Error('should never happend'); + } + + const [used_nodes, used_tags] = combine_nodes_tags_partials([ + [used_cur_nodes as NodeName[], used_cur_tags, []], + [used_partials_nodes, used_partials_tags, []], + ]); + + const used_normalized_nodes = [ + ...new Set( + used_nodes + .map((k) => + transformer_state.normalized.nodes.find( + (n) => n.toLowerCase() == k.toLowerCase(), + ), + ) + .filter(Boolean), + ), + ]; + + const used_normalized_tags = [ + ...new Set( + used_tags.map((k) => { + const maybe_tag = transformer_state.normalized.tags.find( + (n) => n.toLowerCase() == k.toLowerCase(), + ); + if (!maybe_tag) throw new Error(`Undefined tag: '${k}'`); + return maybe_tag!; + }), + ), + ]; + + // + /** * load frontmatter */ @@ -94,30 +288,61 @@ export function transformer({ * add used svelte components to the script tag */ let dependencies = ''; - const tags = prepare_tags(tags_file); - const has_tags = Object.keys(tags).length > 0; - const nodes = prepare_nodes(nodes_file); - const has_nodes = Object.keys(nodes).length > 0; - const partials = prepare_partials(partials_dir); + + const tags = used_normalized_tags.map((name) => [ + name, + transformer_state.tags.get(name)![0], + ]); // tags must be presented + + const nodes = used_normalized_nodes + .map((name) => [name, transformer_state.nodes.get(name!)?.[0]]) + .filter(([, maybe_schema]) => maybe_schema) as [string, string][]; // node can be fall back to the default + + const partials = Object.fromEntries( + [...transformer_state.partials.entries()] + .filter(([k]) => used_cur_partials.includes(k)) + .map(([partial, [, node]]) => [partial, node]), + ); /** * add import for tags */ - if (tags_file && has_tags) { - dependencies += `import * as ${TAGS_IMPORT} from '${relative_posix_path( - filename, - tags_file, - )}';`; + if (used_cur_tags.length) { + dependencies += + tags + .map( + ([comp, path]) => + `import ${comp} from '${relative_posix_path( + filename, + to_absolute_posix_path( + filename, + tags_file ?? '', + path, + ), + )}';`, + ) + .join('') ?? ''; } /** * add import for nodes */ - if (nodes_file && has_nodes) { - dependencies += `import * as ${NODES_IMPORT} from '${relative_posix_path( - filename, - nodes_file, - )}';`; + + if (used_cur_nodes.length) { + dependencies += + nodes + .map( + ([comp, path]) => + `import ${comp} from '${relative_posix_path( + filename, + to_absolute_posix_path( + filename, + nodes_file ?? '', + path, + ), + )}';`, + ) + .join('') ?? ''; } /** @@ -134,7 +359,13 @@ export function transformer({ * generate schema for markdoc extension */ if (generate_schema) { - create_schema(tags); + create_schema( + Object.fromEntries( + [...transformer_state.tags.entries()].map( + ([comp, [, schema]]) => [comp, schema], + ), + ), + ); } /** @@ -143,11 +374,20 @@ export function transformer({ const configuration: ConfigType = { tags: { ...config?.tags, - ...tags, + ...Object.fromEntries( + [...transformer_state.tags.entries()].map( + ([comp, [, schema]]) => [comp.toLowerCase(), schema], + ), + ), }, + nodes: { ...config?.nodes, - ...nodes, + ...Object.fromEntries( + [...transformer_state.nodes.entries()].map( + ([comp, [, schema]]) => [comp.toLowerCase(), schema], + ), + ), }, partials: { ...config?.partials, @@ -397,21 +637,18 @@ function get_node_defaults(node_type: NodeType): Partial { function prepare_nodes( nodes_file: Config['nodes'], + comps_with_paths: Array<[string, string]>, ): Partial> { const nodes: Record = {}; if (nodes_file) { - for (const [name] of each_exported_var(nodes_file)) { + for (const [name] of comps_with_paths) { const type = name.toLowerCase() as NodeType; - if (type === 'image') { - } + nodes[name.toLowerCase()] = { ...get_node_defaults(type), transform(node, config) { - if (type === 'image') { - node.attributes.src; - } return new Tag( - `${NODES_IMPORT}.${name}`, + name, node.transformAttributes(config), node.transformChildren(config), ); @@ -423,16 +660,19 @@ function prepare_nodes( return nodes; } -function prepare_tags(tags_file: Config['tags']): Record { +function prepare_tags( + tags_file: Config['tags'], + comps_with_paths: Array<[string, string]>, +): Record { const tags: Record = {}; if (tags_file) { - for (const [name, value] of each_exported_var(tags_file)) { + for (const [name, value] of comps_with_paths) { /** * extract all exported variables from the components */ const attributes = get_component_vars(String(value), tags_file); tags[name.toLowerCase()] = { - render: `${TAGS_IMPORT}.${name}`, + render: name, attributes, }; } diff --git a/packages/process/src/utils.ts b/packages/process/src/utils.ts index ca4ff38..0a71be9 100644 --- a/packages/process/src/utils.ts +++ b/packages/process/src/utils.ts @@ -5,7 +5,8 @@ import { readdirSync, writeFileSync, } from 'fs'; -import { dirname, join, relative, sep } from 'path'; +import { dirname, join, relative, sep, resolve } from 'path'; +import path = require('path'); import { sep as posix_sep } from 'path/posix'; export function get_all_files(path: string): string[] { @@ -38,3 +39,8 @@ export function path_exists(path: string): boolean { export function relative_posix_path(from: string, to: string): string { return relative(dirname(from), to).split(sep).join(posix_sep); } +export function to_absolute_posix_path(...paths: string[]): string { + return resolve(...paths.slice(0, paths.length - 1).map(v => dirname(v)), paths[paths.length-1]) + .split(sep) + .join(posix_sep); +} diff --git a/packages/process/tests/processor.test.mjs b/packages/process/tests/processor.test.mjs index 0f7aefa..f440f7b 100644 --- a/packages/process/tests/processor.test.mjs +++ b/packages/process/tests/processor.test.mjs @@ -78,7 +78,8 @@ test('preprocessor', async (context) => { content: before, filename: 'test.markdoc', }); - assert.equal(markup.code, after); + // Somehow when reading the compiled file, it adds a line line char to the output + assert.equal(markup.code, after.trim()); } catch (error) { if (exception) { assert.equal(error.message, exception); diff --git a/packages/process/tests/processor/nodes, tags and partials/compiled.txt b/packages/process/tests/processor/nodes, tags and partials/compiled.txt index 94ac4ab..ebbbd1c 100644 --- a/packages/process/tests/processor/nodes, tags and partials/compiled.txt +++ b/packages/process/tests/processor/nodes, tags and partials/compiled.txt @@ -1 +1 @@ -
Heading 1Heading 2With ID With Class

slot content

I am a partialLorem IpsumI am nested
\ No newline at end of file +
Heading 1Heading 2With ID With Class

slot content

I am a partialLorem IpsumI am nested
diff --git a/packages/process/tests/processor/nodes/compiled.txt b/packages/process/tests/processor/nodes/compiled.txt index 5ace9da..f4bd9ac 100644 --- a/packages/process/tests/processor/nodes/compiled.txt +++ b/packages/process/tests/processor/nodes/compiled.txt @@ -1 +1 @@ -
Heading 1Heading 2With ID With Class
\ No newline at end of file +
Heading 1Heading 2With ID With Class
diff --git a/packages/process/tests/processor/nodes/source.markdoc b/packages/process/tests/processor/nodes/source.markdoc index 7cc1d95..8103417 100644 --- a/packages/process/tests/processor/nodes/source.markdoc +++ b/packages/process/tests/processor/nodes/source.markdoc @@ -4,4 +4,4 @@ # With ID {% #my-id %} -# With Class{% .my-class %} \ No newline at end of file +# With Class{% .my-class %} diff --git a/packages/process/tests/processor/tags/compiled.txt b/packages/process/tests/processor/tags/compiled.txt index 38ac354..3713e8a 100644 --- a/packages/process/tests/processor/tags/compiled.txt +++ b/packages/process/tests/processor/tags/compiled.txt @@ -1 +1 @@ -

slot content

\ No newline at end of file +

slot content