From 340af568527be3777f70f85bd793e04507f4a16d Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Sun, 5 Oct 2025 22:26:59 -0700
Subject: [PATCH 01/31] recreate dagplanner snapshots
---
.../com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt
index c0c17b8b8..8b051a29c 100644
--- a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt
+++ b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt
@@ -60,7 +60,7 @@ Schema:
- machineid: INTEGER
- updatedTime: TIMESTAMP_LTZ(3) *ROWTIME*
Plan:
-LogicalWatermarkAssigner(rowtime=[updatedTime], watermark=[-($2, 0:INTERVAL SECOND)])
+LogicalWatermarkAssigner(rowtime=[updatedTime], watermark=[-($2, 15000:INTERVAL SECOND)])
LogicalProject(sensorid=[$0], machineid=[$1], updatedTime=[CAST($2):TIMESTAMP_LTZ(3) *ROWTIME*])
LogicalTableScan(table=[[default_catalog, default_database, Sensors, metadata=[timestamp]]])
SQL: CREATE VIEW `Sensors__view`
@@ -95,7 +95,7 @@ CREATE TABLE `Sensors` (
`machineid` INTEGER,
`updatedTime` TIMESTAMP_LTZ(3) METADATA FROM 'timestamp',
PRIMARY KEY (`sensorid`) NOT ENFORCED,
- WATERMARK FOR `updatedTime` AS `updatedTime` - INTERVAL '0.0' SECOND
+ WATERMARK FOR `updatedTime` AS `updatedTime` - INTERVAL '15.0' SECOND
) WITH (
'connector' = 'upsert-kafka',
'key.flexible-json.timestamp-format.standard' = 'ISO-8601',
From 8bb17721ad645564170e5a31ec69d0582070a218 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Sun, 5 Oct 2025 22:29:29 -0700
Subject: [PATCH 02/31] update other snapshots
---
.../com/datasqrl/UseCaseCompileTest/loan-loan-package.txt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt
index fdf5c9653..43b443e90 100644
--- a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt
+++ b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt
@@ -102,7 +102,7 @@ Schema:
- message: VARCHAR(2147483647) CHARACTER SET "UTF-16LE"
- event_time: TIMESTAMP_LTZ(3) *ROWTIME*
Plan:
-LogicalWatermarkAssigner(rowtime=[event_time], watermark=[-($4, 0:INTERVAL SECOND)])
+LogicalWatermarkAssigner(rowtime=[event_time], watermark=[-($4, 15000:INTERVAL SECOND)])
LogicalProject(_uuid=[$0], loan_application_id=[$1], status=[$2], message=[$3], event_time=[CAST($4):TIMESTAMP_LTZ(3) *ROWTIME*])
LogicalTableScan(table=[[default_catalog, default_database, ApplicationUpdates, metadata=[timestamp]]])
SQL: CREATE VIEW `ApplicationUpdates__view`
@@ -371,7 +371,7 @@ CREATE TABLE `ApplicationUpdates` (
`status` STRING NOT NULL,
`message` STRING,
`event_time` TIMESTAMP_LTZ(3) METADATA FROM 'timestamp',
- WATERMARK FOR `event_time` AS `event_time` - INTERVAL '0.0' SECOND
+ WATERMARK FOR `event_time` AS `event_time` - INTERVAL '15.0' SECOND
) WITH (
'connector' = 'kafka',
'flexible-json.timestamp-format.standard' = 'ISO-8601',
From 2ac7a3d5d59033134c860f52564bc4012d232732 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Sun, 5 Oct 2025 20:52:53 -0700
Subject: [PATCH 03/31] enable mermaid
---
documentation/docusaurus.config.ts | 5 +
documentation/package-lock.json | 2141 +++++++++++++++++++++++-----
documentation/package.json | 1 +
3 files changed, 1806 insertions(+), 341 deletions(-)
diff --git a/documentation/docusaurus.config.ts b/documentation/docusaurus.config.ts
index 933d293c1..3dd564f9f 100644
--- a/documentation/docusaurus.config.ts
+++ b/documentation/docusaurus.config.ts
@@ -39,6 +39,10 @@ const config: Config = {
onBrokenLinks: 'warn',
onBrokenMarkdownLinks: 'warn',
+ markdown: {
+ mermaid: true,
+ },
+
// Even if you don't use internationalization, you can use this field to set
// useful metadata like html lang. For example, if your site is Chinese, you
// may want to replace "en" with "zh-Hans".
@@ -90,6 +94,7 @@ const config: Config = {
searchResultContextMaxLength: 50
}),
],
+ '@docusaurus/theme-mermaid',
],
themeConfig: {
diff --git a/documentation/package-lock.json b/documentation/package-lock.json
index 53ab0cced..123df9f35 100644
--- a/documentation/package-lock.json
+++ b/documentation/package-lock.json
@@ -10,6 +10,7 @@
"dependencies": {
"@docusaurus/core": "3.7.0",
"@docusaurus/preset-classic": "3.7.0",
+ "@docusaurus/theme-mermaid": "^3.7.0",
"@easyops-cn/docusaurus-search-local": "^0.52.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
@@ -271,6 +272,28 @@
"node": ">=6.0.0"
}
},
+ "node_modules/@antfu/install-pkg": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz",
+ "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==",
+ "license": "MIT",
+ "dependencies": {
+ "package-manager-detector": "^1.3.0",
+ "tinyexec": "^1.0.1"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
+ "node_modules/@antfu/utils": {
+ "version": "9.3.0",
+ "resolved": "https://registry.npmjs.org/@antfu/utils/-/utils-9.3.0.tgz",
+ "integrity": "sha512-9hFT4RauhcUzqOE4f1+frMKLZrgNog5b06I7VmZQV1BkvwvqrbC8EBZf3L1eEL2AKb6rNKjER0sEvJiSP1FXEA==",
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
"node_modules/@babel/code-frame": {
"version": "7.26.2",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
@@ -1947,6 +1970,51 @@
"node": ">=6.9.0"
}
},
+ "node_modules/@braintree/sanitize-url": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz",
+ "integrity": "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==",
+ "license": "MIT"
+ },
+ "node_modules/@chevrotain/cst-dts-gen": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz",
+ "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@chevrotain/gast": "11.0.3",
+ "@chevrotain/types": "11.0.3",
+ "lodash-es": "4.17.21"
+ }
+ },
+ "node_modules/@chevrotain/gast": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz",
+ "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@chevrotain/types": "11.0.3",
+ "lodash-es": "4.17.21"
+ }
+ },
+ "node_modules/@chevrotain/regexp-to-ast": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz",
+ "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@chevrotain/types": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz",
+ "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@chevrotain/utils": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz",
+ "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==",
+ "license": "Apache-2.0"
+ },
"node_modules/@colors/colors": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
@@ -1958,9 +2026,9 @@
}
},
"node_modules/@csstools/cascade-layer-name-parser": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/@csstools/cascade-layer-name-parser/-/cascade-layer-name-parser-2.0.4.tgz",
- "integrity": "sha512-7DFHlPuIxviKYZrOiwVU/PiHLm3lLUR23OMuEEtfEOQTOp9hzQ2JjdY6X5H18RVuUPJqSCI+qNnD5iOLMVE0bA==",
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/@csstools/cascade-layer-name-parser/-/cascade-layer-name-parser-2.0.5.tgz",
+ "integrity": "sha512-p1ko5eHgV+MgXFVa4STPKpvPxr6ReS8oS2jzTukjR74i5zJNyWO1ZM1m8YKBXnzDKWfBN1ztLYlHxbVemDD88A==",
"funding": [
{
"type": "github",
@@ -1976,14 +2044,14 @@
"node": ">=18"
},
"peerDependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
}
},
"node_modules/@csstools/color-helpers": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.0.2.tgz",
- "integrity": "sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA==",
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz",
+ "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==",
"funding": [
{
"type": "github",
@@ -2000,9 +2068,9 @@
}
},
"node_modules/@csstools/css-calc": {
- "version": "2.1.3",
- "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.3.tgz",
- "integrity": "sha512-XBG3talrhid44BY1x3MHzUx/aTG8+x/Zi57M4aTKK9RFB4aLlF3TTSzfzn8nWVHWL3FgAXAxmupmDd6VWww+pw==",
+ "version": "2.1.4",
+ "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
+ "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
"funding": [
{
"type": "github",
@@ -2018,14 +2086,14 @@
"node": ">=18"
},
"peerDependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
}
},
"node_modules/@csstools/css-color-parser": {
- "version": "3.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.0.9.tgz",
- "integrity": "sha512-wILs5Zk7BU86UArYBJTPy/FMPPKVKHMj1ycCEyf3VUptol0JNRLFU/BZsJ4aiIHJEbSLiizzRrw8Pc1uAEDrXw==",
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz",
+ "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==",
"funding": [
{
"type": "github",
@@ -2038,21 +2106,21 @@
],
"license": "MIT",
"dependencies": {
- "@csstools/color-helpers": "^5.0.2",
- "@csstools/css-calc": "^2.1.3"
+ "@csstools/color-helpers": "^5.1.0",
+ "@csstools/css-calc": "^2.1.4"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
}
},
"node_modules/@csstools/css-parser-algorithms": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.4.tgz",
- "integrity": "sha512-Up7rBoV77rv29d3uKHUIVubz1BTcgyUK72IvCQAbfbMv584xHcGKCKbWh7i8hPrRJ7qU4Y8IO3IY9m+iTB7P3A==",
+ "version": "3.0.5",
+ "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
+ "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
"funding": [
{
"type": "github",
@@ -2068,13 +2136,13 @@
"node": ">=18"
},
"peerDependencies": {
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-tokenizer": "^3.0.4"
}
},
"node_modules/@csstools/css-tokenizer": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.3.tgz",
- "integrity": "sha512-UJnjoFsmxfKUdNYdWgOB0mWUypuLvAfQPH1+pyvRJs6euowbFkFC6P13w1l8mJyi3vxYMxc9kld5jZEGRQs6bw==",
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
+ "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
"funding": [
{
"type": "github",
@@ -2091,9 +2159,9 @@
}
},
"node_modules/@csstools/media-query-list-parser": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/@csstools/media-query-list-parser/-/media-query-list-parser-4.0.2.tgz",
- "integrity": "sha512-EUos465uvVvMJehckATTlNqGj4UJWkTmdWuDMjqvSUkjGpmOyFZBVwb4knxCm/k2GMTXY+c/5RkdndzFYWeX5A==",
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/@csstools/media-query-list-parser/-/media-query-list-parser-4.0.3.tgz",
+ "integrity": "sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==",
"funding": [
{
"type": "github",
@@ -2109,14 +2177,43 @@
"node": ">=18"
},
"peerDependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/postcss-alpha-function": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-alpha-function/-/postcss-alpha-function-1.0.1.tgz",
+ "integrity": "sha512-isfLLwksH3yHkFXfCI2Gcaqg7wGGHZZwunoJzEZk0yKYIokgre6hYVFibKL3SYAoR1kBXova8LB+JoO5vZzi9w==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "dependencies": {
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
+ "@csstools/utilities": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
}
},
"node_modules/@csstools/postcss-cascade-layers": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-5.0.1.tgz",
- "integrity": "sha512-XOfhI7GShVcKiKwmPAnWSqd2tBR0uxt+runAxttbSp/LY2U16yAVPmAf7e9q4JJ0d+xMNmpwNDLBXnmRCl3HMQ==",
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-5.0.2.tgz",
+ "integrity": "sha512-nWBE08nhO8uWl6kSAeCx4im7QfVko3zLrtgWZY4/bP87zrSPpSyN/3W3TDqz1jJuH+kbKOHXg5rJnK+ZVYcFFg==",
"funding": [
{
"type": "github",
@@ -2175,9 +2272,38 @@
}
},
"node_modules/@csstools/postcss-color-function": {
- "version": "4.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-color-function/-/postcss-color-function-4.0.9.tgz",
- "integrity": "sha512-2UeQCGMO5+EeQsPQK2DqXp0dad+P6nIz6G2dI06APpBuYBKxZEq7CTH+UiztFQ8cB1f89dnO9+D/Kfr+JfI2hw==",
+ "version": "4.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-color-function/-/postcss-color-function-4.0.12.tgz",
+ "integrity": "sha512-yx3cljQKRaSBc2hfh8rMZFZzChaFgwmO2JfFgFr1vMcF3C/uyy5I4RFIBOIWGq1D+XbKCG789CGkG6zzkLpagA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "dependencies": {
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
+ "@csstools/utilities": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
+ "node_modules/@csstools/postcss-color-function-display-p3-linear": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-color-function-display-p3-linear/-/postcss-color-function-display-p3-linear-1.0.1.tgz",
+ "integrity": "sha512-E5qusdzhlmO1TztYzDIi8XPdPoYOjoTY6HBYBCYSj+Gn4gQRBlvjgPQXzfzuPQqt8EhkC/SzPKObg4Mbn8/xMg==",
"funding": [
{
"type": "github",
@@ -2190,10 +2316,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2204,9 +2330,38 @@
}
},
"node_modules/@csstools/postcss-color-mix-function": {
- "version": "3.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-color-mix-function/-/postcss-color-mix-function-3.0.9.tgz",
- "integrity": "sha512-Enj7ZIIkLD7zkGCN31SZFx4H1gKiCs2Y4taBo/v/cqaHN7p1qGrf5UTMNSjQFZ7MgClGufHx4pddwFTGL+ipug==",
+ "version": "3.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-color-mix-function/-/postcss-color-mix-function-3.0.12.tgz",
+ "integrity": "sha512-4STERZfCP5Jcs13P1U5pTvI9SkgLgfMUMhdXW8IlJWkzOOOqhZIjcNhWtNJZes2nkBDsIKJ0CJtFtuaZ00moag==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "dependencies": {
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
+ "@csstools/utilities": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
+ "node_modules/@csstools/postcss-color-mix-variadic-function-arguments": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-color-mix-variadic-function-arguments/-/postcss-color-mix-variadic-function-arguments-1.0.2.tgz",
+ "integrity": "sha512-rM67Gp9lRAkTo+X31DUqMEq+iK+EFqsidfecmhrteErxJZb6tUoJBVQca1Vn1GpDql1s1rD1pKcuYzMsg7Z1KQ==",
"funding": [
{
"type": "github",
@@ -2219,10 +2374,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2233,9 +2388,37 @@
}
},
"node_modules/@csstools/postcss-content-alt-text": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-content-alt-text/-/postcss-content-alt-text-2.0.5.tgz",
- "integrity": "sha512-9BOS535v6YmyOYk32jAHXeddRV+iyd4vRcbrEekpwxmueAXX5J8WgbceFnE4E4Pmw/ysnB9v+n/vSWoFmcLMcA==",
+ "version": "2.0.8",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-content-alt-text/-/postcss-content-alt-text-2.0.8.tgz",
+ "integrity": "sha512-9SfEW9QCxEpTlNMnpSqFaHyzsiRpZ5J5+KqCu1u5/eEJAWsMhzT40qf0FIbeeglEvrGRMdDzAxMIz3wqoGSb+Q==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "dependencies": {
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
+ "@csstools/utilities": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4"
+ }
+ },
+ "node_modules/@csstools/postcss-contrast-color-function": {
+ "version": "2.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-contrast-color-function/-/postcss-contrast-color-function-2.0.12.tgz",
+ "integrity": "sha512-YbwWckjK3qwKjeYz/CijgcS7WDUCtKTd8ShLztm3/i5dhh4NaqzsbYnhm4bjrpFpnLZ31jVcbK8YL77z3GBPzA==",
"funding": [
{
"type": "github",
@@ -2248,9 +2431,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2261,9 +2445,9 @@
}
},
"node_modules/@csstools/postcss-exponential-functions": {
- "version": "2.0.8",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-exponential-functions/-/postcss-exponential-functions-2.0.8.tgz",
- "integrity": "sha512-vHgDXtGIBPpFQnFNDftMQg4MOuXcWnK91L/7REjBNYzQ/p2Fa/6RcnehTqCRrNtQ46PNIolbRsiDdDuxiHolwQ==",
+ "version": "2.0.9",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-exponential-functions/-/postcss-exponential-functions-2.0.9.tgz",
+ "integrity": "sha512-abg2W/PI3HXwS/CZshSa79kNWNZHdJPMBXeZNyPQFbbj8sKO3jXxOt/wF7juJVjyDTc6JrvaUZYFcSBZBhaxjw==",
"funding": [
{
"type": "github",
@@ -2276,9 +2460,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -2314,9 +2498,9 @@
}
},
"node_modules/@csstools/postcss-gamut-mapping": {
- "version": "2.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-gamut-mapping/-/postcss-gamut-mapping-2.0.9.tgz",
- "integrity": "sha512-quksIsFm3DGsf8Qbr9KiSGBF2w3RwxSfOfma5wbORDB1AFF15r4EVW7sUuWw3s5IAEGMqzel/dE2rQsI7Yb8mA==",
+ "version": "2.0.11",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-gamut-mapping/-/postcss-gamut-mapping-2.0.11.tgz",
+ "integrity": "sha512-fCpCUgZNE2piVJKC76zFsgVW1apF6dpYsqGyH8SIeCcM4pTEsRTWTLCaJIMKFEundsCKwY1rwfhtrio04RJ4Dw==",
"funding": [
{
"type": "github",
@@ -2329,9 +2513,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -2341,9 +2525,9 @@
}
},
"node_modules/@csstools/postcss-gradients-interpolation-method": {
- "version": "5.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-gradients-interpolation-method/-/postcss-gradients-interpolation-method-5.0.9.tgz",
- "integrity": "sha512-duqTeUHF4ambUybAmhX9KonkicLM/WNp2JjMUbegRD4O8A/tb6fdZ7jUNdp/UUiO1FIdDkMwmNw6856bT0XF8Q==",
+ "version": "5.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-gradients-interpolation-method/-/postcss-gradients-interpolation-method-5.0.12.tgz",
+ "integrity": "sha512-jugzjwkUY0wtNrZlFeyXzimUL3hN4xMvoPnIXxoZqxDvjZRiSh+itgHcVUWzJ2VwD/VAMEgCLvtaJHX+4Vj3Ow==",
"funding": [
{
"type": "github",
@@ -2356,10 +2540,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2370,9 +2554,9 @@
}
},
"node_modules/@csstools/postcss-hwb-function": {
- "version": "4.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-hwb-function/-/postcss-hwb-function-4.0.9.tgz",
- "integrity": "sha512-sDpdPsoGAhYl/PMSYfu5Ez82wXb2bVkg1Cb8vsRLhpXhAk4OSlsJN+GodAql6tqc1B2G/WToxsFU6G74vkhPvA==",
+ "version": "4.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-hwb-function/-/postcss-hwb-function-4.0.12.tgz",
+ "integrity": "sha512-mL/+88Z53KrE4JdePYFJAQWFrcADEqsLprExCM04GDNgHIztwFzj0Mbhd/yxMBngq0NIlz58VVxjt5abNs1VhA==",
"funding": [
{
"type": "github",
@@ -2385,10 +2569,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2399,9 +2583,9 @@
}
},
"node_modules/@csstools/postcss-ic-unit": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-ic-unit/-/postcss-ic-unit-4.0.1.tgz",
- "integrity": "sha512-lECc38i1w3qU9nhrUhP6F8y4BfcQJkR1cb8N6tZNf2llM6zPkxnqt04jRCwsUgNcB3UGKDy+zLenhOYGHqCV+Q==",
+ "version": "4.0.4",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-ic-unit/-/postcss-ic-unit-4.0.4.tgz",
+ "integrity": "sha512-yQ4VmossuOAql65sCPppVO1yfb7hDscf4GseF0VCA/DTDaBc0Wtf8MTqVPfjGYlT5+2buokG0Gp7y0atYZpwjg==",
"funding": [
{
"type": "github",
@@ -2414,7 +2598,7 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0",
"postcss-value-parser": "^4.2.0"
},
@@ -2448,9 +2632,9 @@
}
},
"node_modules/@csstools/postcss-is-pseudo-class": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-5.0.1.tgz",
- "integrity": "sha512-JLp3POui4S1auhDR0n8wHd/zTOWmMsmK3nQd3hhL6FhWPaox5W7j1se6zXOG/aP07wV2ww0lxbKYGwbBszOtfQ==",
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-5.0.3.tgz",
+ "integrity": "sha512-jS/TY4SpG4gszAtIg7Qnf3AS2pjcUM5SzxpApOrlndMeGhIbaTzWBzzP/IApXoNWEW7OhcjkRT48jnAUIFXhAQ==",
"funding": [
{
"type": "github",
@@ -2509,9 +2693,9 @@
}
},
"node_modules/@csstools/postcss-light-dark-function": {
- "version": "2.0.8",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-light-dark-function/-/postcss-light-dark-function-2.0.8.tgz",
- "integrity": "sha512-v8VU5WtrZIyEtk88WB4fkG22TGd8HyAfSFfZZQ1uNN0+arMJdZc++H3KYTfbYDpJRGy8GwADYH8ySXiILn+OyA==",
+ "version": "2.0.11",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-light-dark-function/-/postcss-light-dark-function-2.0.11.tgz",
+ "integrity": "sha512-fNJcKXJdPM3Lyrbmgw2OBbaioU7yuKZtiXClf4sGdQttitijYlZMD5K7HrC/eF83VRWRrYq6OZ0Lx92leV2LFA==",
"funding": [
{
"type": "github",
@@ -2524,9 +2708,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2628,9 +2812,9 @@
}
},
"node_modules/@csstools/postcss-logical-viewport-units": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-viewport-units/-/postcss-logical-viewport-units-3.0.3.tgz",
- "integrity": "sha512-OC1IlG/yoGJdi0Y+7duz/kU/beCwO+Gua01sD6GtOtLi7ByQUpcIqs7UE/xuRPay4cHgOMatWdnDdsIDjnWpPw==",
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-viewport-units/-/postcss-logical-viewport-units-3.0.4.tgz",
+ "integrity": "sha512-q+eHV1haXA4w9xBwZLKjVKAWn3W2CMqmpNpZUk5kRprvSiBEGMgrNH3/sJZ8UA3JgyHaOt3jwT9uFa4wLX4EqQ==",
"funding": [
{
"type": "github",
@@ -2643,7 +2827,7 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-tokenizer": "^3.0.3",
+ "@csstools/css-tokenizer": "^3.0.4",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2654,9 +2838,9 @@
}
},
"node_modules/@csstools/postcss-media-minmax": {
- "version": "2.0.8",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-media-minmax/-/postcss-media-minmax-2.0.8.tgz",
- "integrity": "sha512-Skum5wIXw2+NyCQWUyfstN3c1mfSh39DRAo+Uh2zzXOglBG8xB9hnArhYFScuMZkzeM+THVa//mrByKAfumc7w==",
+ "version": "2.0.9",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-media-minmax/-/postcss-media-minmax-2.0.9.tgz",
+ "integrity": "sha512-af9Qw3uS3JhYLnCbqtZ9crTvvkR+0Se+bBqSr7ykAnl9yKhk6895z9rf+2F4dClIDJWxgn0iZZ1PSdkhrbs2ig==",
"funding": [
{
"type": "github",
@@ -2669,10 +2853,10 @@
],
"license": "MIT",
"dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/media-query-list-parser": "^4.0.2"
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/media-query-list-parser": "^4.0.3"
},
"engines": {
"node": ">=18"
@@ -2682,9 +2866,9 @@
}
},
"node_modules/@csstools/postcss-media-queries-aspect-ratio-number-values": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-media-queries-aspect-ratio-number-values/-/postcss-media-queries-aspect-ratio-number-values-3.0.4.tgz",
- "integrity": "sha512-AnGjVslHMm5xw9keusQYvjVWvuS7KWK+OJagaG0+m9QnIjZsrysD2kJP/tr/UJIyYtMCtu8OkUd+Rajb4DqtIQ==",
+ "version": "3.0.5",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-media-queries-aspect-ratio-number-values/-/postcss-media-queries-aspect-ratio-number-values-3.0.5.tgz",
+ "integrity": "sha512-zhAe31xaaXOY2Px8IYfoVTB3wglbJUVigGphFLj6exb7cjZRH9A6adyE22XfFK3P2PzwRk0VDeTJmaxpluyrDg==",
"funding": [
{
"type": "github",
@@ -2697,9 +2881,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/media-query-list-parser": "^4.0.2"
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/media-query-list-parser": "^4.0.3"
},
"engines": {
"node": ">=18"
@@ -2760,9 +2944,9 @@
}
},
"node_modules/@csstools/postcss-oklab-function": {
- "version": "4.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-oklab-function/-/postcss-oklab-function-4.0.9.tgz",
- "integrity": "sha512-UHrnujimwtdDw8BYDcWJtBXuJ13uc/BjAddPdfMc/RsWxhg8gG8UbvTF0tnMtHrZ4i7lwy85fPEzK1AiykMyRA==",
+ "version": "4.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-oklab-function/-/postcss-oklab-function-4.0.12.tgz",
+ "integrity": "sha512-HhlSmnE1NKBhXsTnNGjxvhryKtO7tJd1w42DKOGFD6jSHtYOrsJTQDKPMwvOfrzUAk8t7GcpIfRyM7ssqHpFjg==",
"funding": [
{
"type": "github",
@@ -2775,10 +2959,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2789,9 +2973,9 @@
}
},
"node_modules/@csstools/postcss-progressive-custom-properties": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-4.0.1.tgz",
- "integrity": "sha512-Ofz81HaY8mmbP8/Qr3PZlUzjsyV5WuxWmvtYn+jhYGvvjFazTmN9R2io5W5znY1tyk2CA9uM0IPWyY4ygDytCw==",
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-4.2.1.tgz",
+ "integrity": "sha512-uPiiXf7IEKtUQXsxu6uWtOlRMXd2QWWy5fhxHDnPdXKCQckPP3E34ZgDoZ62r2iT+UOgWsSbM4NvHE5m3mAEdw==",
"funding": [
{
"type": "github",
@@ -2814,9 +2998,9 @@
}
},
"node_modules/@csstools/postcss-random-function": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-random-function/-/postcss-random-function-2.0.0.tgz",
- "integrity": "sha512-MYZKxSr4AKfjECL8vg49BbfNNzK+t3p2OWX+Xf7rXgMaTP44oy/e8VGWu4MLnJ3NUd9tFVkisLO/sg+5wMTNsg==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-random-function/-/postcss-random-function-2.0.1.tgz",
+ "integrity": "sha512-q+FQaNiRBhnoSNo+GzqGOIBKoHQ43lYz0ICrV+UudfWnEF6ksS6DsBIJSISKQT2Bvu3g4k6r7t0zYrk5pDlo8w==",
"funding": [
{
"type": "github",
@@ -2829,9 +3013,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -2841,9 +3025,9 @@
}
},
"node_modules/@csstools/postcss-relative-color-syntax": {
- "version": "3.0.9",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-relative-color-syntax/-/postcss-relative-color-syntax-3.0.9.tgz",
- "integrity": "sha512-+AGOcLF5PmMnTRPnOdCvY7AwvD5veIOhTWbJV6vC3hB1tt0ii/k6QOwhWfsGGg1ZPQ0JY15u+wqLR4ZTtB0luA==",
+ "version": "3.0.12",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-relative-color-syntax/-/postcss-relative-color-syntax-3.0.12.tgz",
+ "integrity": "sha512-0RLIeONxu/mtxRtf3o41Lq2ghLimw0w9ByLWnnEVuy89exmEEq8bynveBxNW3nyHqLAFEeNtVEmC1QK9MZ8Huw==",
"funding": [
{
"type": "github",
@@ -2856,10 +3040,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -2908,9 +3092,9 @@
}
},
"node_modules/@csstools/postcss-sign-functions": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-sign-functions/-/postcss-sign-functions-1.1.3.tgz",
- "integrity": "sha512-4F4GRhj8xNkBtLZ+3ycIhReaDfKJByXI+cQGIps3AzCO8/CJOeoDPxpMnL5vqZrWKOceSATHEQJUO/Q/r2y7OQ==",
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-sign-functions/-/postcss-sign-functions-1.1.4.tgz",
+ "integrity": "sha512-P97h1XqRPcfcJndFdG95Gv/6ZzxUBBISem0IDqPZ7WMvc/wlO+yU0c5D/OCpZ5TJoTt63Ok3knGk64N+o6L2Pg==",
"funding": [
{
"type": "github",
@@ -2923,9 +3107,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -2935,9 +3119,9 @@
}
},
"node_modules/@csstools/postcss-stepped-value-functions": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-4.0.8.tgz",
- "integrity": "sha512-6Y4yhL4fNhgzbZ/wUMQ4EjFUfoNNMpEXZnDw1JrlcEBHUT15gplchtFsZGk7FNi8PhLHJfCUwVKrEHzhfhKK+g==",
+ "version": "4.0.9",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-4.0.9.tgz",
+ "integrity": "sha512-h9btycWrsex4dNLeQfyU3y3w40LMQooJWFMm/SK9lrKguHDcFl4VMkncKKoXi2z5rM9YGWbUQABI8BT2UydIcA==",
"funding": [
{
"type": "github",
@@ -2950,9 +3134,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -2962,9 +3146,9 @@
}
},
"node_modules/@csstools/postcss-text-decoration-shorthand": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-4.0.2.tgz",
- "integrity": "sha512-8XvCRrFNseBSAGxeaVTaNijAu+FzUvjwFXtcrynmazGb/9WUdsPCpBX+mHEHShVRq47Gy4peYAoxYs8ltUnmzA==",
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-4.0.3.tgz",
+ "integrity": "sha512-KSkGgZfx0kQjRIYnpsD7X2Om9BUXX/Kii77VBifQW9Ih929hK0KNjVngHDH0bFB9GmfWcR9vJYJJRvw/NQjkrA==",
"funding": [
{
"type": "github",
@@ -2977,7 +3161,7 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/color-helpers": "^5.0.2",
+ "@csstools/color-helpers": "^5.1.0",
"postcss-value-parser": "^4.2.0"
},
"engines": {
@@ -2988,9 +3172,9 @@
}
},
"node_modules/@csstools/postcss-trigonometric-functions": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-4.0.8.tgz",
- "integrity": "sha512-YcDvYTRu7f78/91B6bX+mE1WoAO91Su7/8KSRpuWbIGUB8hmaNSRu9wziaWSLJ1lOB1aQe+bvo9BIaLKqPOo/g==",
+ "version": "4.0.9",
+ "resolved": "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-4.0.9.tgz",
+ "integrity": "sha512-Hnh5zJUdpNrJqK9v1/E3BbrQhaDTj5YiX7P61TOvUhoDHnUmsNNxcDAgkQ32RrcWx9GVUvfUNPcUkn8R3vIX6A==",
"funding": [
{
"type": "github",
@@ -3003,9 +3187,9 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3"
+ "@csstools/css-calc": "^2.1.4",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
},
"engines": {
"node": ">=18"
@@ -3647,6 +3831,28 @@
"react-dom": "^18.0.0 || ^19.0.0"
}
},
+ "node_modules/@docusaurus/theme-mermaid": {
+ "version": "3.7.0",
+ "resolved": "https://registry.npmjs.org/@docusaurus/theme-mermaid/-/theme-mermaid-3.7.0.tgz",
+ "integrity": "sha512-7kNDvL7hm+tshjxSxIqYMtsLUPsEBYnkevej/ext6ru9xyLgCed+zkvTfGzTWNeq8rJIEe2YSS8/OV5gCVaPCw==",
+ "license": "MIT",
+ "dependencies": {
+ "@docusaurus/core": "3.7.0",
+ "@docusaurus/module-type-aliases": "3.7.0",
+ "@docusaurus/theme-common": "3.7.0",
+ "@docusaurus/types": "3.7.0",
+ "@docusaurus/utils-validation": "3.7.0",
+ "mermaid": ">=10.4",
+ "tslib": "^2.6.0"
+ },
+ "engines": {
+ "node": ">=18.0"
+ },
+ "peerDependencies": {
+ "react": "^18.0.0 || ^19.0.0",
+ "react-dom": "^18.0.0 || ^19.0.0"
+ }
+ },
"node_modules/@docusaurus/theme-search-algolia": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.7.0.tgz",
@@ -3955,6 +4161,40 @@
"@hapi/hoek": "^9.0.0"
}
},
+ "node_modules/@iconify/types": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz",
+ "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==",
+ "license": "MIT"
+ },
+ "node_modules/@iconify/utils": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-3.0.2.tgz",
+ "integrity": "sha512-EfJS0rLfVuRuJRn4psJHtK2A9TqVnkxPpHY6lYHiB9+8eSuudsxbwMiavocG45ujOo6FJ+CIRlRnlOGinzkaGQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@antfu/install-pkg": "^1.1.0",
+ "@antfu/utils": "^9.2.0",
+ "@iconify/types": "^2.0.0",
+ "debug": "^4.4.1",
+ "globals": "^15.15.0",
+ "kolorist": "^1.8.0",
+ "local-pkg": "^1.1.1",
+ "mlly": "^1.7.4"
+ }
+ },
+ "node_modules/@iconify/utils/node_modules/globals": {
+ "version": "15.15.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz",
+ "integrity": "sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/@jest/schemas": {
"version": "29.6.3",
"resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz",
@@ -4101,6 +4341,15 @@
"react": ">=16"
}
},
+ "node_modules/@mermaid-js/parser": {
+ "version": "0.6.2",
+ "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.2.tgz",
+ "integrity": "sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==",
+ "license": "MIT",
+ "dependencies": {
+ "langium": "3.3.1"
+ }
+ },
"node_modules/@napi-rs/wasm-runtime": {
"version": "0.2.12",
"resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz",
@@ -4824,60 +5073,313 @@
"@types/node": "*"
}
},
- "node_modules/@types/debug": {
- "version": "4.1.12",
- "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
- "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
- "license": "MIT",
- "dependencies": {
- "@types/ms": "*"
- }
+ "node_modules/@types/d3": {
+ "version": "7.4.3",
+ "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz",
+ "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-array": "*",
+ "@types/d3-axis": "*",
+ "@types/d3-brush": "*",
+ "@types/d3-chord": "*",
+ "@types/d3-color": "*",
+ "@types/d3-contour": "*",
+ "@types/d3-delaunay": "*",
+ "@types/d3-dispatch": "*",
+ "@types/d3-drag": "*",
+ "@types/d3-dsv": "*",
+ "@types/d3-ease": "*",
+ "@types/d3-fetch": "*",
+ "@types/d3-force": "*",
+ "@types/d3-format": "*",
+ "@types/d3-geo": "*",
+ "@types/d3-hierarchy": "*",
+ "@types/d3-interpolate": "*",
+ "@types/d3-path": "*",
+ "@types/d3-polygon": "*",
+ "@types/d3-quadtree": "*",
+ "@types/d3-random": "*",
+ "@types/d3-scale": "*",
+ "@types/d3-scale-chromatic": "*",
+ "@types/d3-selection": "*",
+ "@types/d3-shape": "*",
+ "@types/d3-time": "*",
+ "@types/d3-time-format": "*",
+ "@types/d3-timer": "*",
+ "@types/d3-transition": "*",
+ "@types/d3-zoom": "*"
+ }
+ },
+ "node_modules/@types/d3-array": {
+ "version": "3.2.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz",
+ "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==",
+ "license": "MIT"
},
- "node_modules/@types/eslint": {
- "version": "9.6.1",
- "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz",
- "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==",
+ "node_modules/@types/d3-axis": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz",
+ "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==",
"license": "MIT",
"dependencies": {
- "@types/estree": "*",
- "@types/json-schema": "*"
+ "@types/d3-selection": "*"
}
},
- "node_modules/@types/eslint-scope": {
- "version": "3.7.7",
- "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz",
- "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==",
+ "node_modules/@types/d3-brush": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz",
+ "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==",
"license": "MIT",
"dependencies": {
- "@types/eslint": "*",
- "@types/estree": "*"
+ "@types/d3-selection": "*"
}
},
- "node_modules/@types/estree": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz",
- "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==",
+ "node_modules/@types/d3-chord": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz",
+ "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==",
"license": "MIT"
},
- "node_modules/@types/estree-jsx": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz",
- "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==",
+ "node_modules/@types/d3-color": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz",
+ "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-contour": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz",
+ "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==",
"license": "MIT",
"dependencies": {
- "@types/estree": "*"
+ "@types/d3-array": "*",
+ "@types/geojson": "*"
}
},
- "node_modules/@types/express": {
- "version": "4.17.21",
- "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
- "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
+ "node_modules/@types/d3-delaunay": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz",
+ "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-dispatch": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz",
+ "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-drag": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz",
+ "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==",
"license": "MIT",
"dependencies": {
- "@types/body-parser": "*",
- "@types/express-serve-static-core": "^4.17.33",
- "@types/qs": "*",
- "@types/serve-static": "*"
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/d3-dsv": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz",
+ "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-ease": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz",
+ "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-fetch": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz",
+ "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-dsv": "*"
+ }
+ },
+ "node_modules/@types/d3-force": {
+ "version": "3.0.10",
+ "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz",
+ "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-format": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz",
+ "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-geo": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz",
+ "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/geojson": "*"
+ }
+ },
+ "node_modules/@types/d3-hierarchy": {
+ "version": "3.1.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz",
+ "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-interpolate": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz",
+ "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-color": "*"
+ }
+ },
+ "node_modules/@types/d3-path": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz",
+ "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-polygon": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz",
+ "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-quadtree": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz",
+ "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-random": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz",
+ "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-scale": {
+ "version": "4.0.9",
+ "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz",
+ "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-time": "*"
+ }
+ },
+ "node_modules/@types/d3-scale-chromatic": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz",
+ "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-selection": {
+ "version": "3.0.11",
+ "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz",
+ "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-shape": {
+ "version": "3.1.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz",
+ "integrity": "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-path": "*"
+ }
+ },
+ "node_modules/@types/d3-time": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz",
+ "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-time-format": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz",
+ "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-timer": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz",
+ "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-transition": {
+ "version": "3.0.9",
+ "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz",
+ "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/d3-zoom": {
+ "version": "3.0.8",
+ "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz",
+ "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-interpolate": "*",
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/debug": {
+ "version": "4.1.12",
+ "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
+ "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/ms": "*"
+ }
+ },
+ "node_modules/@types/eslint": {
+ "version": "9.6.1",
+ "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz",
+ "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "*",
+ "@types/json-schema": "*"
+ }
+ },
+ "node_modules/@types/eslint-scope": {
+ "version": "3.7.7",
+ "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz",
+ "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/eslint": "*",
+ "@types/estree": "*"
+ }
+ },
+ "node_modules/@types/estree": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz",
+ "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/estree-jsx": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz",
+ "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "*"
+ }
+ },
+ "node_modules/@types/express": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz",
+ "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/body-parser": "*",
+ "@types/express-serve-static-core": "^4.17.33",
+ "@types/qs": "*",
+ "@types/serve-static": "*"
}
},
"node_modules/@types/express-serve-static-core": {
@@ -4904,6 +5406,12 @@
"@types/send": "*"
}
},
+ "node_modules/@types/geojson": {
+ "version": "7946.0.16",
+ "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz",
+ "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==",
+ "license": "MIT"
+ },
"node_modules/@types/gtag.js": {
"version": "0.0.12",
"resolved": "https://registry.npmjs.org/@types/gtag.js/-/gtag.js-0.0.12.tgz",
@@ -5146,6 +5654,13 @@
"@types/node": "*"
}
},
+ "node_modules/@types/trusted-types": {
+ "version": "2.0.7",
+ "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz",
+ "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==",
+ "license": "MIT",
+ "optional": true
+ },
"node_modules/@types/unist": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
@@ -5384,9 +5899,9 @@
}
},
"node_modules/acorn": {
- "version": "8.14.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz",
- "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==",
+ "version": "8.15.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
+ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
@@ -5796,6 +6311,15 @@
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"license": "MIT"
},
+ "node_modules/baseline-browser-mapping": {
+ "version": "2.8.12",
+ "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.12.tgz",
+ "integrity": "sha512-vAPMQdnyKCBtkmQA6FMCBvU9qFIppS3nzyXnEM+Lo2IAhG4Mpjv9cCxMudhgV3YdNNJv6TNqXy97dfRVL2LmaQ==",
+ "license": "Apache-2.0",
+ "bin": {
+ "baseline-browser-mapping": "dist/cli.js"
+ }
+ },
"node_modules/batch": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz",
@@ -5932,9 +6456,9 @@
}
},
"node_modules/browserslist": {
- "version": "4.24.4",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz",
- "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==",
+ "version": "4.26.3",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.3.tgz",
+ "integrity": "sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==",
"funding": [
{
"type": "opencollective",
@@ -5951,10 +6475,11 @@
],
"license": "MIT",
"dependencies": {
- "caniuse-lite": "^1.0.30001688",
- "electron-to-chromium": "^1.5.73",
- "node-releases": "^2.0.19",
- "update-browserslist-db": "^1.1.1"
+ "baseline-browser-mapping": "^2.8.9",
+ "caniuse-lite": "^1.0.30001746",
+ "electron-to-chromium": "^1.5.227",
+ "node-releases": "^2.0.21",
+ "update-browserslist-db": "^1.1.3"
},
"bin": {
"browserslist": "cli.js"
@@ -6096,9 +6621,9 @@
}
},
"node_modules/caniuse-lite": {
- "version": "1.0.30001715",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001715.tgz",
- "integrity": "sha512-7ptkFGMm2OAOgvZpwgA4yjQ5SQbrNVGdRjzH0pBdy1Fasvcr+KAeECmbCAECzTuDuoX0FCY8KzUxjf9+9kfZEw==",
+ "version": "1.0.30001748",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001748.tgz",
+ "integrity": "sha512-5P5UgAr0+aBmNiplks08JLw+AW/XG/SurlgZLgB1dDLfAw7EfRGxIwzPHxdSCGY/BTKDqIVyJL87cCN6s0ZR0w==",
"funding": [
{
"type": "opencollective",
@@ -6228,6 +6753,32 @@
"url": "https://github.com/sponsors/fb55"
}
},
+ "node_modules/chevrotain": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz",
+ "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@chevrotain/cst-dts-gen": "11.0.3",
+ "@chevrotain/gast": "11.0.3",
+ "@chevrotain/regexp-to-ast": "11.0.3",
+ "@chevrotain/types": "11.0.3",
+ "@chevrotain/utils": "11.0.3",
+ "lodash-es": "4.17.21"
+ }
+ },
+ "node_modules/chevrotain-allstar": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz",
+ "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==",
+ "license": "MIT",
+ "dependencies": {
+ "lodash-es": "^4.17.21"
+ },
+ "peerDependencies": {
+ "chevrotain": "^11.0.0"
+ }
+ },
"node_modules/chokidar": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
@@ -6525,6 +7076,12 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"license": "MIT"
},
+ "node_modules/confbox": {
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz",
+ "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==",
+ "license": "MIT"
+ },
"node_modules/config-chain": {
"version": "1.1.13",
"resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
@@ -6731,6 +7288,15 @@
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
"license": "MIT"
},
+ "node_modules/cose-base": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz",
+ "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==",
+ "license": "MIT",
+ "dependencies": {
+ "layout-base": "^1.0.0"
+ }
+ },
"node_modules/cosmiconfig": {
"version": "8.3.6",
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz",
@@ -6849,9 +7415,9 @@
}
},
"node_modules/css-has-pseudo": {
- "version": "7.0.2",
- "resolved": "https://registry.npmjs.org/css-has-pseudo/-/css-has-pseudo-7.0.2.tgz",
- "integrity": "sha512-nzol/h+E0bId46Kn2dQH5VElaknX2Sr0hFuB/1EomdC7j+OISt2ZzK7EHX9DZDY53WbIVAR7FYKSO2XnSf07MQ==",
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/css-has-pseudo/-/css-has-pseudo-7.0.3.tgz",
+ "integrity": "sha512-oG+vKuGyqe/xvEMoxAQrhi7uY16deJR3i7wwhBerVrGQKSqUC5GiOVxTpM9F9B9hw0J+eKeOWLH7E9gZ1Dr5rA==",
"funding": [
{
"type": "github",
@@ -7053,9 +7619,9 @@
}
},
"node_modules/cssdb": {
- "version": "8.2.5",
- "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-8.2.5.tgz",
- "integrity": "sha512-leAt8/hdTCtzql9ZZi86uYAmCLzVKpJMMdjbvOGVnXFXz/BWFpBmM1MHEHU/RqtPyRYmabVmEW1DtX3YGLuuLA==",
+ "version": "8.4.2",
+ "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-8.4.2.tgz",
+ "integrity": "sha512-PzjkRkRUS+IHDJohtxkIczlxPPZqRo0nXplsYXOMBRPjcVRjj1W4DfvRgshUYTVuUigU7ptVYkFJQ7abUB0nyg==",
"funding": [
{
"type": "opencollective",
@@ -7159,61 +7725,587 @@
"postcss-unique-selectors": "^6.0.4"
},
"engines": {
- "node": "^14 || ^16 || >=18.0"
+ "node": "^14 || ^16 || >=18.0"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4.31"
+ }
+ },
+ "node_modules/cssnano-utils": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.2.tgz",
+ "integrity": "sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==",
+ "license": "MIT",
+ "engines": {
+ "node": "^14 || ^16 || >=18.0"
+ },
+ "peerDependencies": {
+ "postcss": "^8.4.31"
+ }
+ },
+ "node_modules/csso": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz",
+ "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==",
+ "license": "MIT",
+ "dependencies": {
+ "css-tree": "~2.2.0"
+ },
+ "engines": {
+ "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0",
+ "npm": ">=7.0.0"
+ }
+ },
+ "node_modules/csso/node_modules/css-tree": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz",
+ "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==",
+ "license": "MIT",
+ "dependencies": {
+ "mdn-data": "2.0.28",
+ "source-map-js": "^1.0.1"
+ },
+ "engines": {
+ "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0",
+ "npm": ">=7.0.0"
+ }
+ },
+ "node_modules/csso/node_modules/mdn-data": {
+ "version": "2.0.28",
+ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz",
+ "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==",
+ "license": "CC0-1.0"
+ },
+ "node_modules/csstype": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
+ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
+ "license": "MIT"
+ },
+ "node_modules/cytoscape": {
+ "version": "3.33.1",
+ "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz",
+ "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/cytoscape-cose-bilkent": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz",
+ "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==",
+ "license": "MIT",
+ "dependencies": {
+ "cose-base": "^1.0.0"
+ },
+ "peerDependencies": {
+ "cytoscape": "^3.2.0"
+ }
+ },
+ "node_modules/cytoscape-fcose": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz",
+ "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==",
+ "license": "MIT",
+ "dependencies": {
+ "cose-base": "^2.2.0"
+ },
+ "peerDependencies": {
+ "cytoscape": "^3.2.0"
+ }
+ },
+ "node_modules/cytoscape-fcose/node_modules/cose-base": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz",
+ "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==",
+ "license": "MIT",
+ "dependencies": {
+ "layout-base": "^2.0.0"
+ }
+ },
+ "node_modules/cytoscape-fcose/node_modules/layout-base": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz",
+ "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==",
+ "license": "MIT"
+ },
+ "node_modules/d3": {
+ "version": "7.9.0",
+ "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz",
+ "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "3",
+ "d3-axis": "3",
+ "d3-brush": "3",
+ "d3-chord": "3",
+ "d3-color": "3",
+ "d3-contour": "4",
+ "d3-delaunay": "6",
+ "d3-dispatch": "3",
+ "d3-drag": "3",
+ "d3-dsv": "3",
+ "d3-ease": "3",
+ "d3-fetch": "3",
+ "d3-force": "3",
+ "d3-format": "3",
+ "d3-geo": "3",
+ "d3-hierarchy": "3",
+ "d3-interpolate": "3",
+ "d3-path": "3",
+ "d3-polygon": "3",
+ "d3-quadtree": "3",
+ "d3-random": "3",
+ "d3-scale": "4",
+ "d3-scale-chromatic": "3",
+ "d3-selection": "3",
+ "d3-shape": "3",
+ "d3-time": "3",
+ "d3-time-format": "4",
+ "d3-timer": "3",
+ "d3-transition": "3",
+ "d3-zoom": "3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-array": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz",
+ "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==",
+ "license": "ISC",
+ "dependencies": {
+ "internmap": "1 - 2"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-axis": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz",
+ "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-brush": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz",
+ "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-drag": "2 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-selection": "3",
+ "d3-transition": "3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-chord": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz",
+ "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-path": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-color": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz",
+ "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-contour": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz",
+ "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "^3.2.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-delaunay": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz",
+ "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==",
+ "license": "ISC",
+ "dependencies": {
+ "delaunator": "5"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-dispatch": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz",
+ "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-drag": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz",
+ "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-selection": "3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-dsv": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz",
+ "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==",
+ "license": "ISC",
+ "dependencies": {
+ "commander": "7",
+ "iconv-lite": "0.6",
+ "rw": "1"
+ },
+ "bin": {
+ "csv2json": "bin/dsv2json.js",
+ "csv2tsv": "bin/dsv2dsv.js",
+ "dsv2dsv": "bin/dsv2dsv.js",
+ "dsv2json": "bin/dsv2json.js",
+ "json2csv": "bin/json2dsv.js",
+ "json2dsv": "bin/json2dsv.js",
+ "json2tsv": "bin/json2dsv.js",
+ "tsv2csv": "bin/dsv2dsv.js",
+ "tsv2json": "bin/dsv2json.js"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-dsv/node_modules/commander": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
+ "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/d3-dsv/node_modules/iconv-lite": {
+ "version": "0.6.3",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
+ "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
+ "license": "MIT",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/d3-ease": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz",
+ "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==",
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-fetch": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz",
+ "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dsv": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-force": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz",
+ "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-quadtree": "1 - 3",
+ "d3-timer": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-format": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz",
+ "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-geo": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz",
+ "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "2.5.0 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-hierarchy": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz",
+ "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-interpolate": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz",
+ "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-color": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-path": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz",
+ "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-polygon": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz",
+ "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-quadtree": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz",
+ "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-random": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz",
+ "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-sankey": {
+ "version": "0.12.3",
+ "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz",
+ "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "d3-array": "1 - 2",
+ "d3-shape": "^1.2.0"
+ }
+ },
+ "node_modules/d3-sankey/node_modules/d3-array": {
+ "version": "2.12.1",
+ "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz",
+ "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "internmap": "^1.0.0"
+ }
+ },
+ "node_modules/d3-sankey/node_modules/d3-path": {
+ "version": "1.0.9",
+ "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz",
+ "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==",
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/d3-sankey/node_modules/d3-shape": {
+ "version": "1.3.7",
+ "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz",
+ "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "d3-path": "1"
+ }
+ },
+ "node_modules/d3-sankey/node_modules/internmap": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz",
+ "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==",
+ "license": "ISC"
+ },
+ "node_modules/d3-scale": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz",
+ "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "2.10.0 - 3",
+ "d3-format": "1 - 3",
+ "d3-interpolate": "1.2.0 - 3",
+ "d3-time": "2.1.1 - 3",
+ "d3-time-format": "2 - 4"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-scale-chromatic": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz",
+ "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-color": "1 - 3",
+ "d3-interpolate": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-selection": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
+ "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-shape": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz",
+ "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-path": "^3.1.0"
},
- "peerDependencies": {
- "postcss": "^8.4.31"
+ "engines": {
+ "node": ">=12"
}
},
- "node_modules/cssnano-utils": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.2.tgz",
- "integrity": "sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==",
- "license": "MIT",
+ "node_modules/d3-time": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz",
+ "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "2 - 3"
+ },
"engines": {
- "node": "^14 || ^16 || >=18.0"
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-time-format": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz",
+ "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-time": "1 - 3"
},
- "peerDependencies": {
- "postcss": "^8.4.31"
+ "engines": {
+ "node": ">=12"
}
},
- "node_modules/csso": {
- "version": "5.0.5",
- "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz",
- "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==",
- "license": "MIT",
+ "node_modules/d3-timer": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz",
+ "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-transition": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz",
+ "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==",
+ "license": "ISC",
"dependencies": {
- "css-tree": "~2.2.0"
+ "d3-color": "1 - 3",
+ "d3-dispatch": "1 - 3",
+ "d3-ease": "1 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-timer": "1 - 3"
},
"engines": {
- "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0",
- "npm": ">=7.0.0"
+ "node": ">=12"
+ },
+ "peerDependencies": {
+ "d3-selection": "2 - 3"
}
},
- "node_modules/csso/node_modules/css-tree": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz",
- "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==",
- "license": "MIT",
+ "node_modules/d3-zoom": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz",
+ "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==",
+ "license": "ISC",
"dependencies": {
- "mdn-data": "2.0.28",
- "source-map-js": "^1.0.1"
+ "d3-dispatch": "1 - 3",
+ "d3-drag": "2 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-selection": "2 - 3",
+ "d3-transition": "2 - 3"
},
"engines": {
- "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0",
- "npm": ">=7.0.0"
+ "node": ">=12"
}
},
- "node_modules/csso/node_modules/mdn-data": {
- "version": "2.0.28",
- "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz",
- "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==",
- "license": "CC0-1.0"
+ "node_modules/dagre-d3-es": {
+ "version": "7.0.11",
+ "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.11.tgz",
+ "integrity": "sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==",
+ "license": "MIT",
+ "dependencies": {
+ "d3": "^7.9.0",
+ "lodash-es": "^4.17.21"
+ }
},
- "node_modules/csstype": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
- "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
+ "node_modules/dayjs": {
+ "version": "1.11.18",
+ "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.18.tgz",
+ "integrity": "sha512-zFBQ7WFRvVRhKcWoUh+ZA1g2HVgUbsZm9sbddh8EC5iv93sui8DVVz1Npvz+r6meo9VKfa8NyLWBsQK1VvIKPA==",
"license": "MIT"
},
"node_modules/debounce": {
@@ -7223,9 +8315,9 @@
"license": "MIT"
},
"node_modules/debug": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
- "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
+ "version": "4.4.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
+ "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
@@ -7383,6 +8475,15 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/delaunator": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz",
+ "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==",
+ "license": "ISC",
+ "dependencies": {
+ "robust-predicates": "^3.0.2"
+ }
+ },
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
@@ -7553,6 +8654,15 @@
"url": "https://github.com/fb55/domhandler?sponsor=1"
}
},
+ "node_modules/dompurify": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.7.tgz",
+ "integrity": "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==",
+ "license": "(MPL-2.0 OR Apache-2.0)",
+ "optionalDependencies": {
+ "@types/trusted-types": "^2.0.7"
+ }
+ },
"node_modules/domutils": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
@@ -7634,9 +8744,9 @@
"license": "MIT"
},
"node_modules/electron-to-chromium": {
- "version": "1.5.142",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.142.tgz",
- "integrity": "sha512-Ah2HgkTu/9RhTDNThBtzu2Wirdy4DC9b0sMT1pUhbkZQ5U/iwmE+PHZX1MpjD5IkJCc2wSghgGG/B04szAx07w==",
+ "version": "1.5.230",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.230.tgz",
+ "integrity": "sha512-A6A6Fd3+gMdaed9wX83CvHYJb4UuapPD5X5SLq72VZJzxHSY0/LUweGXRWmQlh2ln7KV7iw7jnwXK7dlPoOnHQ==",
"license": "ISC"
},
"node_modules/emoji-regex": {
@@ -8172,6 +9282,12 @@
"node": ">= 0.6"
}
},
+ "node_modules/exsolve": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.7.tgz",
+ "integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==",
+ "license": "MIT"
+ },
"node_modules/extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
@@ -9020,6 +10136,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/hachure-fill": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz",
+ "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==",
+ "license": "MIT"
+ },
"node_modules/handle-thing": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz",
@@ -9741,6 +10863,15 @@
"integrity": "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==",
"license": "MIT"
},
+ "node_modules/internmap": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz",
+ "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
"node_modules/interpret": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz",
@@ -10218,6 +11349,31 @@
"graceful-fs": "^4.1.6"
}
},
+ "node_modules/katex": {
+ "version": "0.16.23",
+ "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.23.tgz",
+ "integrity": "sha512-7VlC1hsEEolL9xNO05v9VjrvWZePkCVBJqj8ruICxYjZfHaHbaU53AlP+PODyFIXEnaEIEWi3wJy7FPZ95JAVg==",
+ "funding": [
+ "https://opencollective.com/katex",
+ "https://github.com/sponsors/katex"
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "commander": "^8.3.0"
+ },
+ "bin": {
+ "katex": "cli.js"
+ }
+ },
+ "node_modules/katex/node_modules/commander": {
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
+ "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 12"
+ }
+ },
"node_modules/keyv": {
"version": "4.5.4",
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
@@ -10227,6 +11383,11 @@
"json-buffer": "3.0.1"
}
},
+ "node_modules/khroma": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz",
+ "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw=="
+ },
"node_modules/kind-of": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
@@ -10254,6 +11415,28 @@
"node": ">=6"
}
},
+ "node_modules/kolorist": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/kolorist/-/kolorist-1.8.0.tgz",
+ "integrity": "sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==",
+ "license": "MIT"
+ },
+ "node_modules/langium": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz",
+ "integrity": "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==",
+ "license": "MIT",
+ "dependencies": {
+ "chevrotain": "~11.0.3",
+ "chevrotain-allstar": "~0.3.0",
+ "vscode-languageserver": "~9.0.1",
+ "vscode-languageserver-textdocument": "~1.0.11",
+ "vscode-uri": "~3.0.8"
+ },
+ "engines": {
+ "node": ">=16.0.0"
+ }
+ },
"node_modules/latest-version": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz",
@@ -10279,6 +11462,12 @@
"shell-quote": "^1.8.1"
}
},
+ "node_modules/layout-base": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz",
+ "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==",
+ "license": "MIT"
+ },
"node_modules/leven": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz",
@@ -10329,6 +11518,23 @@
"node": ">=8.9.0"
}
},
+ "node_modules/local-pkg": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-1.1.2.tgz",
+ "integrity": "sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A==",
+ "license": "MIT",
+ "dependencies": {
+ "mlly": "^1.7.4",
+ "pkg-types": "^2.3.0",
+ "quansync": "^0.2.11"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
"node_modules/locate-path": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz",
@@ -10350,6 +11556,12 @@
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"license": "MIT"
},
+ "node_modules/lodash-es": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
+ "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==",
+ "license": "MIT"
+ },
"node_modules/lodash.debounce": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
@@ -10460,6 +11672,18 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/marked": {
+ "version": "16.3.0",
+ "resolved": "https://registry.npmjs.org/marked/-/marked-16.3.0.tgz",
+ "integrity": "sha512-K3UxuKu6l6bmA5FUwYho8CfJBlsUWAooKtdGgMcERSpF7gcBUrCGsLH7wDaaNOzwq18JzSUDyoEb/YsrqMac3w==",
+ "license": "MIT",
+ "bin": {
+ "marked": "bin/marked.js"
+ },
+ "engines": {
+ "node": ">= 20"
+ }
+ },
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
@@ -10922,6 +12146,47 @@
"node": ">= 8"
}
},
+ "node_modules/mermaid": {
+ "version": "11.12.0",
+ "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.0.tgz",
+ "integrity": "sha512-ZudVx73BwrMJfCFmSSJT84y6u5brEoV8DOItdHomNLz32uBjNrelm7mg95X7g+C6UoQH/W6mBLGDEDv73JdxBg==",
+ "license": "MIT",
+ "dependencies": {
+ "@braintree/sanitize-url": "^7.1.1",
+ "@iconify/utils": "^3.0.1",
+ "@mermaid-js/parser": "^0.6.2",
+ "@types/d3": "^7.4.3",
+ "cytoscape": "^3.29.3",
+ "cytoscape-cose-bilkent": "^4.1.0",
+ "cytoscape-fcose": "^2.2.0",
+ "d3": "^7.9.0",
+ "d3-sankey": "^0.12.3",
+ "dagre-d3-es": "7.0.11",
+ "dayjs": "^1.11.18",
+ "dompurify": "^3.2.5",
+ "katex": "^0.16.22",
+ "khroma": "^2.1.0",
+ "lodash-es": "^4.17.21",
+ "marked": "^16.2.1",
+ "roughjs": "^4.6.6",
+ "stylis": "^4.3.6",
+ "ts-dedent": "^2.2.0",
+ "uuid": "^11.1.0"
+ }
+ },
+ "node_modules/mermaid/node_modules/uuid": {
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
+ "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==",
+ "funding": [
+ "https://github.com/sponsors/broofa",
+ "https://github.com/sponsors/ctavan"
+ ],
+ "license": "MIT",
+ "bin": {
+ "uuid": "dist/esm/bin/uuid"
+ }
+ },
"node_modules/methods": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
@@ -12829,6 +14094,35 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/mlly": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz",
+ "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==",
+ "license": "MIT",
+ "dependencies": {
+ "acorn": "^8.15.0",
+ "pathe": "^2.0.3",
+ "pkg-types": "^1.3.1",
+ "ufo": "^1.6.1"
+ }
+ },
+ "node_modules/mlly/node_modules/confbox": {
+ "version": "0.1.8",
+ "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz",
+ "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==",
+ "license": "MIT"
+ },
+ "node_modules/mlly/node_modules/pkg-types": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz",
+ "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==",
+ "license": "MIT",
+ "dependencies": {
+ "confbox": "^0.1.8",
+ "mlly": "^1.7.4",
+ "pathe": "^2.0.1"
+ }
+ },
"node_modules/mrmime": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz",
@@ -12925,9 +14219,9 @@
}
},
"node_modules/node-releases": {
- "version": "2.0.19",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
- "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==",
+ "version": "2.0.23",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.23.tgz",
+ "integrity": "sha512-cCmFDMSm26S6tQSDpBCg/NR8NENrVPhAJSf+XbxBG4rPFaaonlEoE9wHQmun+cls499TQGSb7ZyPBRlzgKfpeg==",
"license": "MIT"
},
"node_modules/normalize-path": {
@@ -13280,6 +14574,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/package-manager-detector": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.4.0.tgz",
+ "integrity": "sha512-rRZ+pR1Usc+ND9M2NkmCvE/LYJS+8ORVV9X0KuNSY/gFsp7RBHJM/ADh9LYq4Vvfq6QkKrW6/weuh8SMEtN5gw==",
+ "license": "MIT"
+ },
"node_modules/param-case": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz",
@@ -13419,6 +14719,12 @@
"tslib": "^2.0.3"
}
},
+ "node_modules/path-data-parser": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz",
+ "integrity": "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==",
+ "license": "MIT"
+ },
"node_modules/path-exists": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz",
@@ -13476,6 +14782,12 @@
"node": ">=8"
}
},
+ "node_modules/pathe": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
+ "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
+ "license": "MIT"
+ },
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
@@ -13509,6 +14821,17 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/pkg-types": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz",
+ "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==",
+ "license": "MIT",
+ "dependencies": {
+ "confbox": "^0.2.2",
+ "exsolve": "^1.0.7",
+ "pathe": "^2.0.3"
+ }
+ },
"node_modules/pkg-up": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz",
@@ -13582,10 +14905,26 @@
"node": ">=4"
}
},
+ "node_modules/points-on-curve": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz",
+ "integrity": "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==",
+ "license": "MIT"
+ },
+ "node_modules/points-on-path": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz",
+ "integrity": "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==",
+ "license": "MIT",
+ "dependencies": {
+ "path-data-parser": "0.1.0",
+ "points-on-curve": "0.2.0"
+ }
+ },
"node_modules/postcss": {
- "version": "8.5.3",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
- "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
+ "version": "8.5.6",
+ "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
+ "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==",
"funding": [
{
"type": "opencollective",
@@ -13602,7 +14941,7 @@
],
"license": "MIT",
"dependencies": {
- "nanoid": "^3.3.8",
+ "nanoid": "^3.3.11",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
@@ -13680,9 +15019,9 @@
}
},
"node_modules/postcss-color-functional-notation": {
- "version": "7.0.9",
- "resolved": "https://registry.npmjs.org/postcss-color-functional-notation/-/postcss-color-functional-notation-7.0.9.tgz",
- "integrity": "sha512-WScwD3pSsIz+QP97sPkGCeJm7xUH0J18k6zV5o8O2a4cQJyv15vLUx/WFQajuJVgZhmJL5awDu8zHnqzAzm4lw==",
+ "version": "7.0.12",
+ "resolved": "https://registry.npmjs.org/postcss-color-functional-notation/-/postcss-color-functional-notation-7.0.12.tgz",
+ "integrity": "sha512-TLCW9fN5kvO/u38/uesdpbx3e8AkTYhMvDZYa9JpmImWuTE99bDQ7GU7hdOADIZsiI9/zuxfAJxny/khknp1Zw==",
"funding": [
{
"type": "github",
@@ -13695,10 +15034,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -13795,9 +15134,9 @@
}
},
"node_modules/postcss-custom-media": {
- "version": "11.0.5",
- "resolved": "https://registry.npmjs.org/postcss-custom-media/-/postcss-custom-media-11.0.5.tgz",
- "integrity": "sha512-SQHhayVNgDvSAdX9NQ/ygcDQGEY+aSF4b/96z7QUX6mqL5yl/JgG/DywcF6fW9XbnCRE+aVYk+9/nqGuzOPWeQ==",
+ "version": "11.0.6",
+ "resolved": "https://registry.npmjs.org/postcss-custom-media/-/postcss-custom-media-11.0.6.tgz",
+ "integrity": "sha512-C4lD4b7mUIw+RZhtY7qUbf4eADmb7Ey8BFA2px9jUbwg7pjTZDl4KY4bvlUV+/vXQvzQRfiGEVJyAbtOsCMInw==",
"funding": [
{
"type": "github",
@@ -13810,10 +15149,10 @@
],
"license": "MIT",
"dependencies": {
- "@csstools/cascade-layer-name-parser": "^2.0.4",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/media-query-list-parser": "^4.0.2"
+ "@csstools/cascade-layer-name-parser": "^2.0.5",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/media-query-list-parser": "^4.0.3"
},
"engines": {
"node": ">=18"
@@ -13823,9 +15162,9 @@
}
},
"node_modules/postcss-custom-properties": {
- "version": "14.0.4",
- "resolved": "https://registry.npmjs.org/postcss-custom-properties/-/postcss-custom-properties-14.0.4.tgz",
- "integrity": "sha512-QnW8FCCK6q+4ierwjnmXF9Y9KF8q0JkbgVfvQEMa93x1GT8FvOiUevWCN2YLaOWyByeDX8S6VFbZEeWoAoXs2A==",
+ "version": "14.0.6",
+ "resolved": "https://registry.npmjs.org/postcss-custom-properties/-/postcss-custom-properties-14.0.6.tgz",
+ "integrity": "sha512-fTYSp3xuk4BUeVhxCSJdIPhDLpJfNakZKoiTDx7yRGCdlZrSJR7mWKVOBS4sBF+5poPQFMj2YdXx1VHItBGihQ==",
"funding": [
{
"type": "github",
@@ -13838,9 +15177,9 @@
],
"license": "MIT",
"dependencies": {
- "@csstools/cascade-layer-name-parser": "^2.0.4",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
+ "@csstools/cascade-layer-name-parser": "^2.0.5",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
"@csstools/utilities": "^2.0.0",
"postcss-value-parser": "^4.2.0"
},
@@ -13852,9 +15191,9 @@
}
},
"node_modules/postcss-custom-selectors": {
- "version": "8.0.4",
- "resolved": "https://registry.npmjs.org/postcss-custom-selectors/-/postcss-custom-selectors-8.0.4.tgz",
- "integrity": "sha512-ASOXqNvDCE0dAJ/5qixxPeL1aOVGHGW2JwSy7HyjWNbnWTQCl+fDc968HY1jCmZI0+BaYT5CxsOiUhavpG/7eg==",
+ "version": "8.0.5",
+ "resolved": "https://registry.npmjs.org/postcss-custom-selectors/-/postcss-custom-selectors-8.0.5.tgz",
+ "integrity": "sha512-9PGmckHQswiB2usSO6XMSswO2yFWVoCAuih1yl9FVcwkscLjRKjwsjM3t+NIWpSU2Jx3eOiK2+t4vVTQaoCHHg==",
"funding": [
{
"type": "github",
@@ -13867,9 +15206,9 @@
],
"license": "MIT",
"dependencies": {
- "@csstools/cascade-layer-name-parser": "^2.0.4",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
+ "@csstools/cascade-layer-name-parser": "^2.0.5",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
"postcss-selector-parser": "^7.0.0"
},
"engines": {
@@ -13994,9 +15333,9 @@
}
},
"node_modules/postcss-double-position-gradients": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/postcss-double-position-gradients/-/postcss-double-position-gradients-6.0.1.tgz",
- "integrity": "sha512-ZitCwmvOR4JzXmKw6sZblTgwV1dcfLvClcyjADuqZ5hU0Uk4SVNpvSN9w8NcJ7XuxhRYxVA8m8AB3gy+HNBQOA==",
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/postcss-double-position-gradients/-/postcss-double-position-gradients-6.0.4.tgz",
+ "integrity": "sha512-m6IKmxo7FxSP5nF2l63QbCC3r+bWpFUWmZXZf096WxG0m7Vl1Q1+ruFOhpdDRmKrRS+S3Jtk+TVk/7z0+BVK6g==",
"funding": [
{
"type": "github",
@@ -14009,7 +15348,7 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0",
"postcss-value-parser": "^4.2.0"
},
@@ -14154,9 +15493,9 @@
}
},
"node_modules/postcss-lab-function": {
- "version": "7.0.9",
- "resolved": "https://registry.npmjs.org/postcss-lab-function/-/postcss-lab-function-7.0.9.tgz",
- "integrity": "sha512-IGbsIXbqMDusymJAKYX+f9oakPo89wL9Pzd/qRBQOVf3EIQWT9hgvqC4Me6Dkzxp3KPuIBf6LPkjrLHe/6ZMIQ==",
+ "version": "7.0.12",
+ "resolved": "https://registry.npmjs.org/postcss-lab-function/-/postcss-lab-function-7.0.12.tgz",
+ "integrity": "sha512-tUcyRk1ZTPec3OuKFsqtRzW2Go5lehW29XA21lZ65XmzQkz43VY2tyWEC202F7W3mILOjw0voOiuxRGTsN+J9w==",
"funding": [
{
"type": "github",
@@ -14169,10 +15508,10 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
+ "@csstools/css-color-parser": "^3.1.0",
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
"@csstools/utilities": "^2.0.0"
},
"engines": {
@@ -14429,9 +15768,9 @@
}
},
"node_modules/postcss-nesting": {
- "version": "13.0.1",
- "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-13.0.1.tgz",
- "integrity": "sha512-VbqqHkOBOt4Uu3G8Dm8n6lU5+9cJFxiuty9+4rcoyRPO9zZS1JIs6td49VIoix3qYqELHlJIn46Oih9SAKo+yQ==",
+ "version": "13.0.2",
+ "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-13.0.2.tgz",
+ "integrity": "sha512-1YCI290TX+VP0U/K/aFxzHzQWHWURL+CtHMSbex1lCdpXD1SoR2sYuxDu5aNI9lPoXpKTCggFZiDJbwylU0LEQ==",
"funding": [
{
"type": "github",
@@ -14444,7 +15783,7 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/selector-resolve-nested": "^3.0.0",
+ "@csstools/selector-resolve-nested": "^3.1.0",
"@csstools/selector-specificity": "^5.0.0",
"postcss-selector-parser": "^7.0.0"
},
@@ -14456,9 +15795,9 @@
}
},
"node_modules/postcss-nesting/node_modules/@csstools/selector-resolve-nested": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@csstools/selector-resolve-nested/-/selector-resolve-nested-3.0.0.tgz",
- "integrity": "sha512-ZoK24Yku6VJU1gS79a5PFmC8yn3wIapiKmPgun0hZgEI5AOqgH2kiPRsPz1qkGv4HL+wuDLH83yQyk6inMYrJQ==",
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@csstools/selector-resolve-nested/-/selector-resolve-nested-3.1.0.tgz",
+ "integrity": "sha512-mf1LEW0tJLKfWyvn5KdDrhpxHyuxpbNwTIwOYLIvsTffeyOf85j5oIzfG0yosxDgx/sswlqBnESYUcQH0vgZ0g==",
"funding": [
{
"type": "github",
@@ -14743,9 +16082,9 @@
}
},
"node_modules/postcss-preset-env": {
- "version": "10.1.6",
- "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-10.1.6.tgz",
- "integrity": "sha512-1jRD7vttKLJ7o0mcmmYWKRLm7W14rI8K1I7Y41OeXUPEVc/CAzfTssNUeJ0zKbR+zMk4boqct/gwS/poIFF5Lg==",
+ "version": "10.4.0",
+ "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-10.4.0.tgz",
+ "integrity": "sha512-2kqpOthQ6JhxqQq1FSAAZGe9COQv75Aw8WbsOvQVNJ2nSevc9Yx/IKZGuZ7XJ+iOTtVon7LfO7ELRzg8AZ+sdw==",
"funding": [
{
"type": "github",
@@ -14758,62 +16097,66 @@
],
"license": "MIT-0",
"dependencies": {
- "@csstools/postcss-cascade-layers": "^5.0.1",
- "@csstools/postcss-color-function": "^4.0.9",
- "@csstools/postcss-color-mix-function": "^3.0.9",
- "@csstools/postcss-content-alt-text": "^2.0.5",
- "@csstools/postcss-exponential-functions": "^2.0.8",
+ "@csstools/postcss-alpha-function": "^1.0.1",
+ "@csstools/postcss-cascade-layers": "^5.0.2",
+ "@csstools/postcss-color-function": "^4.0.12",
+ "@csstools/postcss-color-function-display-p3-linear": "^1.0.1",
+ "@csstools/postcss-color-mix-function": "^3.0.12",
+ "@csstools/postcss-color-mix-variadic-function-arguments": "^1.0.2",
+ "@csstools/postcss-content-alt-text": "^2.0.8",
+ "@csstools/postcss-contrast-color-function": "^2.0.12",
+ "@csstools/postcss-exponential-functions": "^2.0.9",
"@csstools/postcss-font-format-keywords": "^4.0.0",
- "@csstools/postcss-gamut-mapping": "^2.0.9",
- "@csstools/postcss-gradients-interpolation-method": "^5.0.9",
- "@csstools/postcss-hwb-function": "^4.0.9",
- "@csstools/postcss-ic-unit": "^4.0.1",
+ "@csstools/postcss-gamut-mapping": "^2.0.11",
+ "@csstools/postcss-gradients-interpolation-method": "^5.0.12",
+ "@csstools/postcss-hwb-function": "^4.0.12",
+ "@csstools/postcss-ic-unit": "^4.0.4",
"@csstools/postcss-initial": "^2.0.1",
- "@csstools/postcss-is-pseudo-class": "^5.0.1",
- "@csstools/postcss-light-dark-function": "^2.0.8",
+ "@csstools/postcss-is-pseudo-class": "^5.0.3",
+ "@csstools/postcss-light-dark-function": "^2.0.11",
"@csstools/postcss-logical-float-and-clear": "^3.0.0",
"@csstools/postcss-logical-overflow": "^2.0.0",
"@csstools/postcss-logical-overscroll-behavior": "^2.0.0",
"@csstools/postcss-logical-resize": "^3.0.0",
- "@csstools/postcss-logical-viewport-units": "^3.0.3",
- "@csstools/postcss-media-minmax": "^2.0.8",
- "@csstools/postcss-media-queries-aspect-ratio-number-values": "^3.0.4",
+ "@csstools/postcss-logical-viewport-units": "^3.0.4",
+ "@csstools/postcss-media-minmax": "^2.0.9",
+ "@csstools/postcss-media-queries-aspect-ratio-number-values": "^3.0.5",
"@csstools/postcss-nested-calc": "^4.0.0",
"@csstools/postcss-normalize-display-values": "^4.0.0",
- "@csstools/postcss-oklab-function": "^4.0.9",
- "@csstools/postcss-progressive-custom-properties": "^4.0.1",
- "@csstools/postcss-random-function": "^2.0.0",
- "@csstools/postcss-relative-color-syntax": "^3.0.9",
+ "@csstools/postcss-oklab-function": "^4.0.12",
+ "@csstools/postcss-progressive-custom-properties": "^4.2.1",
+ "@csstools/postcss-random-function": "^2.0.1",
+ "@csstools/postcss-relative-color-syntax": "^3.0.12",
"@csstools/postcss-scope-pseudo-class": "^4.0.1",
- "@csstools/postcss-sign-functions": "^1.1.3",
- "@csstools/postcss-stepped-value-functions": "^4.0.8",
- "@csstools/postcss-text-decoration-shorthand": "^4.0.2",
- "@csstools/postcss-trigonometric-functions": "^4.0.8",
+ "@csstools/postcss-sign-functions": "^1.1.4",
+ "@csstools/postcss-stepped-value-functions": "^4.0.9",
+ "@csstools/postcss-text-decoration-shorthand": "^4.0.3",
+ "@csstools/postcss-trigonometric-functions": "^4.0.9",
"@csstools/postcss-unset-value": "^4.0.0",
"autoprefixer": "^10.4.21",
- "browserslist": "^4.24.4",
+ "browserslist": "^4.26.0",
"css-blank-pseudo": "^7.0.1",
- "css-has-pseudo": "^7.0.2",
+ "css-has-pseudo": "^7.0.3",
"css-prefers-color-scheme": "^10.0.0",
- "cssdb": "^8.2.5",
+ "cssdb": "^8.4.2",
"postcss-attribute-case-insensitive": "^7.0.1",
"postcss-clamp": "^4.1.0",
- "postcss-color-functional-notation": "^7.0.9",
+ "postcss-color-functional-notation": "^7.0.12",
"postcss-color-hex-alpha": "^10.0.0",
"postcss-color-rebeccapurple": "^10.0.0",
- "postcss-custom-media": "^11.0.5",
- "postcss-custom-properties": "^14.0.4",
- "postcss-custom-selectors": "^8.0.4",
+ "postcss-custom-media": "^11.0.6",
+ "postcss-custom-properties": "^14.0.6",
+ "postcss-custom-selectors": "^8.0.5",
"postcss-dir-pseudo-class": "^9.0.1",
- "postcss-double-position-gradients": "^6.0.1",
+ "postcss-double-position-gradients": "^6.0.4",
"postcss-focus-visible": "^10.0.1",
"postcss-focus-within": "^9.0.1",
"postcss-font-variant": "^5.0.0",
"postcss-gap-properties": "^6.0.0",
"postcss-image-set-function": "^7.0.0",
- "postcss-lab-function": "^7.0.9",
+ "postcss-lab-function": "^7.0.12",
"postcss-logical": "^8.1.0",
- "postcss-nesting": "^13.0.1",
+ "postcss-nesting": "^13.0.2",
"postcss-opacity-percentage": "^3.0.0",
"postcss-overflow-shorthand": "^6.0.0",
"postcss-page-break": "^3.0.4",
@@ -15185,6 +16528,22 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/quansync": {
+ "version": "0.2.11",
+ "resolved": "https://registry.npmjs.org/quansync/-/quansync-0.2.11.tgz",
+ "integrity": "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/antfu"
+ },
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/sxzz"
+ }
+ ],
+ "license": "MIT"
+ },
"node_modules/queue": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/queue/-/queue-6.0.2.tgz",
@@ -16171,6 +17530,24 @@
"url": "https://github.com/sponsors/isaacs"
}
},
+ "node_modules/robust-predicates": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz",
+ "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==",
+ "license": "Unlicense"
+ },
+ "node_modules/roughjs": {
+ "version": "4.6.6",
+ "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz",
+ "integrity": "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==",
+ "license": "MIT",
+ "dependencies": {
+ "hachure-fill": "^0.5.2",
+ "path-data-parser": "^0.1.0",
+ "points-on-curve": "^0.2.0",
+ "points-on-path": "^0.2.1"
+ }
+ },
"node_modules/rtlcss": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-4.3.0.tgz",
@@ -16212,6 +17589,12 @@
"queue-microtask": "^1.2.2"
}
},
+ "node_modules/rw": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz",
+ "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==",
+ "license": "BSD-3-Clause"
+ },
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -17047,6 +18430,12 @@
"postcss": "^8.4.31"
}
},
+ "node_modules/stylis": {
+ "version": "4.3.6",
+ "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz",
+ "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==",
+ "license": "MIT"
+ },
"node_modules/supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
@@ -17231,6 +18620,12 @@
"integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==",
"license": "MIT"
},
+ "node_modules/tinyexec": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz",
+ "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==",
+ "license": "MIT"
+ },
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
@@ -17281,6 +18676,15 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/ts-dedent": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz",
+ "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.10"
+ }
+ },
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
@@ -17355,6 +18759,12 @@
"node": ">=14.17"
}
},
+ "node_modules/ufo": {
+ "version": "1.6.1",
+ "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz",
+ "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==",
+ "license": "MIT"
+ },
"node_modules/undici": {
"version": "7.11.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-7.11.0.tgz",
@@ -17858,6 +19268,55 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/vscode-jsonrpc": {
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz",
+ "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/vscode-languageserver": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz",
+ "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==",
+ "license": "MIT",
+ "dependencies": {
+ "vscode-languageserver-protocol": "3.17.5"
+ },
+ "bin": {
+ "installServerIntoExtension": "bin/installServerIntoExtension"
+ }
+ },
+ "node_modules/vscode-languageserver-protocol": {
+ "version": "3.17.5",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz",
+ "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==",
+ "license": "MIT",
+ "dependencies": {
+ "vscode-jsonrpc": "8.2.0",
+ "vscode-languageserver-types": "3.17.5"
+ }
+ },
+ "node_modules/vscode-languageserver-textdocument": {
+ "version": "1.0.12",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz",
+ "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==",
+ "license": "MIT"
+ },
+ "node_modules/vscode-languageserver-types": {
+ "version": "3.17.5",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz",
+ "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==",
+ "license": "MIT"
+ },
+ "node_modules/vscode-uri": {
+ "version": "3.0.8",
+ "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz",
+ "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==",
+ "license": "MIT"
+ },
"node_modules/watchpack": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
diff --git a/documentation/package.json b/documentation/package.json
index 504cf01e0..91e003fee 100644
--- a/documentation/package.json
+++ b/documentation/package.json
@@ -17,6 +17,7 @@
"dependencies": {
"@docusaurus/core": "3.7.0",
"@docusaurus/preset-classic": "3.7.0",
+ "@docusaurus/theme-mermaid": "^3.7.0",
"@easyops-cn/docusaurus-search-local": "^0.52.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
From f456dd05da3ac5abae09e6b46701cf2ef50509af Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Tue, 7 Oct 2025 17:21:42 -0700
Subject: [PATCH 04/31] update graphiql references and add swagger, mcp
---
documentation/docs/compiler.md | 5 ++++-
documentation/docs/getting-started.md | 4 ++--
2 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/documentation/docs/compiler.md b/documentation/docs/compiler.md
index 5d6d80a40..f00f1d35b 100644
--- a/documentation/docs/compiler.md
+++ b/documentation/docs/compiler.md
@@ -73,7 +73,10 @@ The run command uses the following engines:
* Postgres as the transactional database engine
* Iceberg+DuckDB as the analytic database engine
* RedPanda as the log engine: The RedPanda cluster is accessible on port 9092 (via Kafka command line tooling).
-* Vertx as the server engine: The GraphQL API is accessible at [http://localhost:8888/graphiql/](http://localhost:8888/graphiql/).
+* Vertx as the server engine:
+ * The GraphQL API is accessible at [http://localhost:8888/v1/graphiql/](http://localhost:8888/v1/graphiql/).
+ * The Swagger UI for the REST API is accessible at [http://localhost:8888/v1/swagger-ui](http://localhost:8888/v1/swagger-ui)
+ * The MCP API is accessible at `http://localhost:8888/v1/mcp/`
### Data Access
diff --git a/documentation/docs/getting-started.md b/documentation/docs/getting-started.md
index 29c4ea33e..e31e4f672 100644
--- a/documentation/docs/getting-started.md
+++ b/documentation/docs/getting-started.md
@@ -79,7 +79,7 @@ Note, that we are mapping the local directory so the compiler has access to the
## Access the API
-The pipeline is exposed through a GraphQL API that you can access at [http://localhost:8888/graphiql/](http://localhost:8888/graphiql/) in your browser.
+The pipeline is exposed through a GraphQL API that you can access at [http://localhost:8888/v1/graphiql/](http://localhost:8888/v1/graphiql/) in your browser.
To add user token requests, we run the following mutation:
```graphql
@@ -110,7 +110,7 @@ subscription {
}
}
```
-*While* the subscription is running, open a new browser tab for [GraphiQL](http://localhost:8888/graphiql/) and execute this mutation:
+*While* the subscription is running, open a new browser tab for [GraphiQL](http://localhost:8888/v1/graphiql/) and execute this mutation:
```graphql
mutation {
UserTokens(event: {userid: 2, tokens:400000}) {
From e7e7d28f424766efd5ec2c521cc45a3d6ee68bc5 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Wed, 8 Oct 2025 14:39:20 -0700
Subject: [PATCH 05/31] updating configuration documentation.
---
documentation/docs/configuration-default.md | 122 +++++++
documentation/docs/configuration.md | 334 +++++++++---------
.../cost/SimpleCostAnalysisModel.java | 15 +-
.../resources/jsonSchema/packageSchema.json | 2 +
4 files changed, 296 insertions(+), 177 deletions(-)
create mode 100644 documentation/docs/configuration-default.md
diff --git a/documentation/docs/configuration-default.md b/documentation/docs/configuration-default.md
new file mode 100644
index 000000000..a6e2e5cb3
--- /dev/null
+++ b/documentation/docs/configuration-default.md
@@ -0,0 +1,122 @@
+# Default DataSQRL `package.json` Configuration
+
+The following is the [default configuration file](https://raw.githubusercontent.com/DataSQRL/sqrl/refs/heads/main/sqrl-planner/src/main/resources/default-package.json) that user provided configuration files are merged on top of. It provides the default values for all configuration options.
+
+```json
+{
+ "version": "1",
+ "enabled-engines": ["vertx", "postgres", "kafka", "flink"],
+ "compiler": {
+ "logger": "print",
+ "extended-scalar-types": true,
+ "compile-flink-plan": true,
+ "cost-model": "DEFAULT",
+ "explain": {
+ "text": true,
+ "sql": false,
+ "logical": true,
+ "physical": false,
+ "sorted": true,
+ "visual": true
+ },
+ "api": {
+ "protocols": ["GRAPHQL", "REST", "MCP"],
+ "endpoints": "FULL",
+ "add-prefix": true,
+ "max-result-depth": 3,
+ "default-limit": 10
+ }
+ },
+ "engines": {
+ "flink": {
+ "config": {
+ "execution.runtime-mode": "STREAMING",
+ "execution.target": "local",
+ "execution.attached": true,
+ "rest.address": "localhost",
+ "rest.port": 8081,
+ "state.backend.type": "rocksdb",
+ "table.exec.resource.default-parallelism": 1,
+ "taskmanager.memory.network.max": "800m"
+ }
+ },
+ "duckdb": {
+ "url": "jdbc:duckdb:"
+ }
+ },
+ "connectors": {
+ "kafka-mutation": {
+ "connector": "kafka",
+ "format": "flexible-json",
+ "flexible-json.timestamp-format.standard": "ISO-8601",
+ "properties.bootstrap.servers": "${KAFKA_BOOTSTRAP_SERVERS}",
+ "properties.group.id": "${KAFKA_GROUP_ID}",
+ "properties.auto.offset.reset": "earliest",
+ "topic": "${sqrl:table-name}"
+ },
+ "kafka": {
+ "connector": "kafka",
+ "format": "flexible-json",
+ "flexible-json.timestamp-format.standard": "ISO-8601",
+ "properties.bootstrap.servers": "${KAFKA_BOOTSTRAP_SERVERS}",
+ "properties.group.id": "${KAFKA_GROUP_ID}",
+ "topic": "${sqrl:table-name}"
+ },
+ "iceberg": {
+ "connector": "iceberg",
+ "catalog-table": "${sqrl:table-name}"
+ },
+ "postgres": {
+ "connector": "jdbc-sqrl",
+ "username": "${POSTGRES_USERNAME}",
+ "password": "${POSTGRES_PASSWORD}",
+ "url": "jdbc:postgresql://${POSTGRES_AUTHORITY}",
+ "driver": "org.postgresql.Driver",
+ "table-name": "${sqrl:table-name}"
+ },
+ "postgres_log-source": {
+ "connector": "postgres-cdc",
+ "hostname": "${POSTGRES_HOST}",
+ "port": "5432",
+ "username": "${POSTGRES_USERNAME}",
+ "password": "${POSTGRES_PASSWORD}",
+ "database-name": "datasqrl",
+ "schema-name": "public",
+ "table-name": "${sqrl:table-name}",
+ "slot.name": "flink_slot",
+ "decoding.plugin.name": "pgoutput",
+ "debezium.slot.drop_on_stop": "false"
+ },
+ "postgres_log-sink": {
+ "connector": "jdbc-sqrl",
+ "password": "${POSTGRES_PASSWORD}",
+ "driver": "org.postgresql.Driver",
+ "username": "${POSTGRES_USERNAME}",
+ "url": "jdbc:postgresql://${POSTGRES_AUTHORITY}",
+ "table-name": "${sqrl:table-name}"
+ },
+ "print": {
+ "connector": "print",
+ "print-identifier": "${sqrl:table-name}"
+ }
+ },
+ "test-runner": {
+ "snapshot-folder": "./snapshots",
+ "test-folder": "./tests",
+ "delay-sec": 30,
+ "mutation-delay-sec": 0,
+ "required-checkpoints": 0
+ }
+}
+```
+
+## Connector Template Variables
+
+The connector templates configured under `connectors` use SQRL-specific variables like `${sqrl:table-name}`.
+
+SQRL-specific variables start with a `sqrl:` prefix and are substituted by the compiler at compile-time in configuration files. SQRL env variables `${sqrl:}` are used for templating inside connector configuration templates and support the following identifiers:
+`table-name`, `original-table-name`, `filename`, `format`, and `kafka-key`.
+
+:::warning
+Unresolved `${sqrl:*}` placeholders raise a validation error.
+:::
\ No newline at end of file
diff --git a/documentation/docs/configuration.md b/documentation/docs/configuration.md
index 1d1b804bf..3a88cad98 100644
--- a/documentation/docs/configuration.md
+++ b/documentation/docs/configuration.md
@@ -1,10 +1,171 @@
# DataSQRL Configuration (`package.json` file)
-DataSQRL projects are configured with one or more **JSON** files.
-Unless a file is passed explicitly to `datasqrl compile -c ...`, the compiler looks for a `package.json`
-in the working directory; if none is found the **built-in default** (shown [here](#default-configuration)) is applied.
+DataSQRL projects are configured with one or more `*package.json` files which are merged in the order they are provided to the [DataSQRL command](compiler) β latter files override fields in earlier ones, objects are *deep-merged*, and array values are replaced wholesale. User provided configuration files are merged on top of the [default `package.json`](configuration-default).
+
+The `version` field specifies the version of the configuration file which is currently `1`.
+
+---
+
+## Engines (`enabled-engines` and `engines`)
+
+The engines that the pipeline compiles to.
+
+```json5
+{
+ "enabled-engines": ["flink", "postgres", "kafka", "vertx"]
+}
+```
+
+DataSQRL supports the following engines:
+* **flink**: Apache Flink is a streaming and batch data processor
+* **postgres**: PostgreSQL is a realtime database
+* **kafka**: Apache Kafka is a streaming data platform (i.e. log engine)
+* **iceberg**: Apache Iceberg is an analytic database format. Iceberg must be paired with a query engine for data access
+* **duckdb**: DuckDB is a vectorized database query engine that can read Iceberg tables.
+* **snowflake**: Snowflake is an analytic database query engine that can read Iceberg tables.
+* **vertx**: Eclipse Vert.x is a reactive server framework
+
+Guidelines for choosing the enabled engines in a pipeline:
+* Always choose one data processor (i.e. "flink")
+* Choose a log engine (i.e. "kafka") to produce data streams
+* Choose a database engine (realtime or analytic) to produce data that can be queried
+* Choose a server engine (i.e. "vertx") to produce data APIs (e.g. GraphQL, REST, MCP)
+* Choose a log engine (i.e. "kafka") to support data ingestion or subscriptions in the API
+* If picking an analytic table format as the database, also choose one or more compatible query engines.
+* Choose at most one log or server engine, but choosing multiple database engines is supported.
+* When choosing a query engine that operates in the cloud (e.g. snowflake), substitute for a locally executable query engine (i.e. "duckdb") for testing and running the pipeline locally.
+
+The individual engines are configured under the `engines` field. The following example configures a Flink-specific setting:
+
+```json5
+{
+ "engines": {
+ "flink": {
+ "config": {
+ "table.exec.source.idle-timeout": "10 sec"
+ }
+ }
+ }
+}
+```
+
+Refer to the engine configuration documentation for more information on how to configure individual engines.
+
+## Source Files (`script`)
+
+Configures the main SQRL script to compile, the (optional) GraphQL schema for the exposed API, and (optional) list of operations defined as GraphQL queries.
+
+The `config` JSON object is passed to the Mustache templating engine to substitute template variable occurrences (e.g. `{{table}}`) before the script is compiled.
+
+```json5
+{
+ "script": {
+ "main": "my-project.sqrl", // Main SQRL script for pipeline
+ "graphql": "api/schema.v1.graphqls", // GraphQL schema defines the API
+ "operations": ["api/operations-v1/myop1.graphql"], //List of GraphQL queries that define operations which are exposed as API endpoints
+ "config": { //Arbitrary JSON object used by the mustache templating engine to instantiate SQRL files
+ "table": "orders",
+ "filters": [
+{ "field": "total_amount", "isNull": false },
+{ "field": "coupon_code", "isNull": true },
+ ]
+ }
+ }
+}
+```
+
+The example `script.config` above could be used to instantiate the following table definition in SQRL:
+```sql
+MyTable := SELECT
+ o.*
+ FROM {{table}} AS o
+ WHERE o.tenant_id > 0
+ {{#filters}}
+ AND o.{{field}} IS {{^quoted}}NOT{{/quoted}} NULL
+ {{/filters}}
+ ORDER BY o.tenant_id DESC;
+```
+
+## Test-Runner (`test-runner`)
+
+Configures how the DataSQRL test runner executes tests.
+For streaming pipelines, use `required-checkpoints` to set a reliable time-interval for creating snapshots. Otherwise, configure a wall-clock delay via `delay-sec`.
+
+```json5
+{
+ "test-runner": {
+ "snapshot-folder": "snapshots/myproject/", // Snapshots output directory (default: "./snapshots")
+ "test-folder": "api/tests/", // Directory containing test GraphQL queries (default: "./tests")
+ "delay-sec": 30, // Wait between data-load and taking snapshot in sec. Set -1 to disable (default: 30)
+ "mutation-delay-sec": 0, // Pause(s) between mutation queries (default: 0)
+ "required-checkpoints": 0, // Minimum completed Flink checkpoints before taking snapshots (requires delay-sec = -1)
+ "create-topics": ["topic1", "topic2"], // Kafka topics to create before tests start
+ "headers": { // Any HTTP headers to add during the test execution. For example, JWT auth header
+ "Authorization": "Bearer token"
+ }
+ }
+}
+```
+
+## Compiler (`compiler`)
+
+Configuration options that control the compiler, such as where logging output is produced, how the pipeline plan is written out, what cost model to use determine data processing step to engine allocation, and what protocols are exposed in the API.
+
+```json5
+{
+ "compiler": {
+ "logger": "print", // "print" | "none"
+ "extended-scalar-types": true, // support extended scalar types in generated GraphQL
+ "compile-flink-plan": true, // produce a Flink physical plans (not supported in batch)
+ "cost-model": "DEFAULT", // cost model to use for DAG optimization ("DEFAULT" | "READ" | "WRITE")
+
+ "explain": { // controls what and how the compiler writes pipeline plans to build/pipeline_*
+ "text": true, // create text version of the plan
+ "sql": false, // include SQL code in the plan
+ "logical": true, // include the logical plan for each table
+ "physical": false, // include the physical plan for each table
+ "sorted": true, // ensure deterministic ordering (mostly for tests)
+ "visual": true // create a visual version of the plan
+ },
+
+ "api": {
+ "protocols": [ // protocols that are being exposed by the server
+ "GRAPHQL",
+ "REST",
+ "MCP"
+ ],
+ "endpoints": "FULL", // endpoint generation strategy ("FULL", "GRAPHQL", "OPS_ONLY")
+ "add-prefix": true, // add an operation-type prefix to function names to ensure uniqueness
+ "max-result-depth": 3, // maximum depth of graph traversal when generating operations from a schema
+ "default-limit": 10 // default query result limit
+ }
+ }
+}
+```
+
+## Connector Templates (`connectors`)
+
+Connector templates are used to configure how the engines in the pipeline exchange data. The connector templates use Flink SQL connector configuration options with variables. Only very advanced use cases require adjustments to the connector templates. Refer to the [default configuration](configuration-default) for documentation of all connector templates.
+
+
+## Environment Variables (`${VAR}`)
+
+Environment variables (e.g. `${POSTGRES_PASSWORD}`) can be referenced inside the configuration files and SQRL scripts. Those are dynamically resolved by the DataSQRL runner when the pipeline is launched. If an environment variable is not configured, it is not replaced.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-Multiple files can be provided; they are merged **in order** β latter files override earlier ones, objects are *deep-merged*, and array values are replaced wholesale.
---
@@ -12,15 +173,8 @@ Multiple files can be provided; they are merged **in order** β latter files ov
| Key | Type | Default | Purpose |
|-------------------|--------------|------------------------------------------|----------------------------------------------------------------|
-| `version` | **number** | `1` | Configuration schema version β must be `1`. |
-| `enabled-engines` | **string[]** | `["vertx","postgres","kafka","flink"]` | Ordered list of engines that form the runtime pipeline. |
| `engines` | **object** | β | Engine specific configuration (see below). |
| `connectors` | **object** | [see defaults](#connectors-connectors) | External system connectors configuration (see below). |
-| `compiler` | **object** | [see defaults](#compiler-compiler) | Controls compilation, logging, and generated artifacts. |
-| `discovery` | **object** | `{}` | Rules for automatic table discovery when importing data files. |
-| `script` | **object** | β | Points to the main SQRL script and GraphQL schema. |
-| `package` | **object** | β | Optional metadata (name, description, etc.) for publishing. |
-| `test-runner` | **object** | [see defaults](#test-runner-test-runner) | Integration test execution settings (see below). |
---
@@ -152,101 +306,6 @@ Used as a *table-format* engine together with a query engine such as Flink, Snow
---
-## Compiler (`compiler`)
-
-```json5
-{
- "compiler": {
- "logger": "print", // "print" | any configured log engine | "none"
- "extended-scalar-types": true, // expose extended scalar types in generated GraphQL
- "compile-flink-plan": true, // compile Flink physical plans where supported
- "cost-model": "DEFAULT", // cost model to use for DAG optimization
-
- "explain": { // artifacts in build/pipeline_*.*
- "text": true,
- "sql": false,
- "logical": true,
- "physical": false,
- "sorted": true, // deterministic ordering (mostly for tests)
- "visual": true
- },
-
- "api": {
- "protocols": [ // protocols that are being exposed by the server
- "GRAPHQL",
- "REST",
- "MCP"
- ],
- "endpoints": "FULL", // endpoint generation strategy (FULL, GRAPHQL, OPS_ONLY)
- "add-prefix": true, // add an operation-type prefix before function names
- "max-result-depth": 3, // maximum depth of graph traversal when generating operations from a schema
- "default-limit": 10 // default endpoint result limit
- }
- }
-}
-```
-
----
-
-## Discovery (`discovery`)
-
-| Key | Type | Default | Purpose |
-|-----------|--------------------|---------|-------------------------------------------------------------------------------------------------------------|
-| `pattern` | **string (regex)** | `null` | Filters which external tables are automatically exposed in `IMPORT β¦` statements. Example: `"^public\\..*"` |
-
----
-
-## Script (`script`)
-
-| Key | Type | Description |
-|--------------|--------------|-----------------------------------------------------------------------------------------------------|
-| `main` | **string** | Path to the main `.sqrl` file. |
-| `graphql` | **string** | Optional GraphQL schema file (defaults to `schema.graphqls`). If the file does not exist, inferred. |
-| `operations` | **string[]** | Optional GraphQL operation definitions. |
-| `config` | **object** | Script related configuration options, like schema variant templating. |
-
----
-
-## Package Metadata (`package`)
-
-| Key | Required | Description |
-|-----------------|----------|------------------------------------------------------|
-| `name` | **yes** | Reverse-DNS style identifier (`org.project.module`). |
-| `description` | no | Short summary. |
-| `license` | no | SPDX license id or free-text. |
-| `homepage` | no | Web site. |
-| `documentation` | no | Docs link. |
-| `topics` | no | String array of tags/keywords. |
-
----
-
-## Test-Runner (`test-runner`)
-
-| Key | Type | Default | Meaning |
-|------------------------|--------------|---------------|----------------------------------------------------------------------------------------|
-| `snapshot-folder` | **string** | `./snapshots` | Snapshots output directory. |
-| `test-folder` | **string** | `./tests` | Tests output directory. |
-| `delay-sec` | **number** | `30` | Wait between data-load and snapshot. Set `-1` to disable. |
-| `mutation-delay-sec` | **number** | `0` | Pause(s) between mutation queries. |
-| `required-checkpoints` | **number** | `0` | Minimum completed Flink checkpoints before assertions run (requires `delay-sec = -1`). |
-| `create-topics` | **string[]** | - | Kafka topics to create before tests start. |
-| `headers` | **object** | - | Any HTTP headers to add during the test execution. For example, JWT auth header. |
-
----
-
-## Templating & Variable Resolution
-
-The DataSQRL launcher supports dynamic resolution of variable placeholders at runtime.
-
-* **Environment variables**: use `${VAR_NAME}` as a placeholder. Example: `${POSTGRES_PASSWORD}`.
-* **SQRL variables** use `${sqrl:}` and are filled automatically by the compiler, mostly inside connector templates.
- Common identifiers include `table-name`, `original-table-name`, `filename`, `format`, and `kafka-key`.
-
-:::warning
-Unresolved `${sqrl:*}` placeholders raise a validation error.
-:::
-
----
## Internal Environment Variables
@@ -268,67 +327,4 @@ we use the following environment variables internally:
---
-## Default Configuration
-
-The built-in fallback (excerpt - full version [here](https://raw.githubusercontent.com/DataSQRL/sqrl/refs/heads/main/sqrl-planner/src/main/resources/default-package.json)):
-```json5
-{
- "version": 1,
- "enabled-engines": ["vertx", "postgres", "kafka", "flink"],
- "engines": {
- "flink": {
- "config": {
- "execution.runtime-mode": "STREAMING",
- "execution.target": "local",
- "execution.attached": true,
- "rest.address": "localhost",
- "rest.port": 8081,
- "state.backend.type": "rocksdb",
- "table.exec.resource.default-parallelism": 1,
- "taskmanager.memory.network.max": "800m"
- }
- },
- "duckdb": {
- "url": "jdbc:duckdb:"
- }
- },
- "compiler": {
- "logger": "print",
- "extended-scalar-types": true,
- "compile-flink-plan": true,
- "cost-model": "DEFAULT",
- "explain": {
- "text": true,
- "sql": false,
- "logical": true,
- "physical": false,
- "sorted": true,
- "visual": true
- },
- "api": {
- "protocols": ["GRAPHQL", "REST", "MCP"],
- "endpoints": "FULL",
- "add-prefix": true,
- "max-result-depth": 3,
- "default-limit": 10
- }
- },
- "connectors": {
- "postgres": { "connector": "jdbc-sqrl", /*...*/ },
- "kafka-mutation": { "connector" : "kafka", /*...*/ },
- "kafka": { "connector" : "kafka", /*...*/ },
- "iceberg": { "connector": "iceberg", /*...*/ },
- "postgres_log-source": { "connector": "postgres-cdc", /*...*/ },
- "postgres_log-sink": { "connector": "jdbc-sqrl", /*...*/ },
- "print": { "connector": "print", /*...*/ }
- },
- "test-runner": {
- "snapshot-folder": "./snapshots",
- "test-folder": "./tests",
- "delay-sec": 30,
- "mutation-delay-sec": 0,
- "required-checkpoints": 0
- }
-}
-```
diff --git a/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java b/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
index 087c8bf72..913f3603f 100644
--- a/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
+++ b/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
@@ -26,9 +26,10 @@
public record SimpleCostAnalysisModel(@NonNull Type type) implements CostModel {
public enum Type {
- DEFAULT,
- READ,
- WRITE
+ DEFAULT, // Favors processing data at ingestion time unless the operation is too expensive (e.g.
+ // inner join)
+ READ, // Favors processing data at query time
+ WRITE // Favors processing data at ingestion time
}
public Simple getSourceSinkCost() {
@@ -51,11 +52,9 @@ public Simple getCost(ExecutionStage executionStage, TableAnalysis tableAnalysis
case STREAMS:
cost =
switch (type) {
- // We assume that pre-computing is generally cheaper (by factor of 10) unless
- // (standard)
- // joins are
- // involved which can lead to combinatorial explosion. So, we primarily cost the
- // joins
+ // We assume that pre-computing is generally cheaper (by factor of 10) unless
+ // (standard) joins are involved which can lead to combinatorial explosion.
+ // So, we primarily cost the joins
case DEFAULT -> joinCost(tableAnalysis.getCosts()) / 10;
case WRITE -> cost / 10; // Make it always cheaper than database
case READ -> cost * 2; // Make it more expensive than database to favor reads
diff --git a/sqrl-planner/src/main/resources/jsonSchema/packageSchema.json b/sqrl-planner/src/main/resources/jsonSchema/packageSchema.json
index 360039721..f798fc050 100644
--- a/sqrl-planner/src/main/resources/jsonSchema/packageSchema.json
+++ b/sqrl-planner/src/main/resources/jsonSchema/packageSchema.json
@@ -3,6 +3,7 @@
"type": "object",
"properties": {
"version": {
+ "description": "The version number for the configuration schema",
"oneOf": [
{
"type": "string"
@@ -13,6 +14,7 @@
]
},
"enabled-engines": {
+ "description": "List of engines to enable for the data pipeline. The compiler builds deployment artifacts for the enabled engines during the build.",
"oneOf": [
{
"type": "array",
From 56a690ced83e84204be8c308e8dba3fe91f5ced8 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Wed, 8 Oct 2025 14:48:52 -0700
Subject: [PATCH 06/31] updating configuration documentation.
---
documentation/docs/configuration.md | 29 -----------------------------
documentation/sidebars.ts | 14 ++++++++++++--
2 files changed, 12 insertions(+), 31 deletions(-)
diff --git a/documentation/docs/configuration.md b/documentation/docs/configuration.md
index 3a88cad98..6f4dcab56 100644
--- a/documentation/docs/configuration.md
+++ b/documentation/docs/configuration.md
@@ -166,16 +166,6 @@ Environment variables (e.g. `${POSTGRES_PASSWORD}`) can be referenced inside the
-
----
-
-## Top-Level Keys
-
-| Key | Type | Default | Purpose |
-|-------------------|--------------|------------------------------------------|----------------------------------------------------------------|
-| `engines` | **object** | β | Engine specific configuration (see below). |
-| `connectors` | **object** | [see defaults](#connectors-connectors) | External system connectors configuration (see below). |
-
---
## Engines (`engines`)
@@ -286,25 +276,6 @@ Used as a *table-format* engine together with a query engine such as Flink, Snow
| `external-volume` | **string** | β | Snowflake external volume name. |
| `url` | **string** | β | Full JDBC URL including auth params. |
----
-
-## Connectors (`connectors`)
-
-```json5
-{
- "connectors": {
- "postgres": { "connector": "jdbc-sqrl", /*...*/ },
- "kafka-mutation": { "connector" : "kafka", /*...*/ },
- "kafka": { "connector" : "kafka", /*...*/ },
- "iceberg": { "connector": "iceberg", /*...*/ },
- "postgres_log-source": { "connector": "postgres-cdc", /*...*/ },
- "postgres_log-sink": { "connector": "jdbc-sqrl", /*...*/ },
- "print": { "connector": "print", /*...*/ }
- }
-}
-```
-
----
## Internal Environment Variables
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index ac643b161..15ccd6d2e 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -62,9 +62,19 @@ const sidebars = {
label: 'π Source & Sink Connectors',
},
{
- type: 'doc',
- id: 'configuration',
+ type: 'category',
label: 'βοΈ Configuration',
+ link: {
+ type: 'doc',
+ id: 'configuration',
+ },
+ items: [
+ {
+ type: 'doc',
+ id: 'configuration-default',
+ label: 'Default Configuration',
+ },
+ ],
},
{
type: 'doc',
From 25ba00df9c92622aeb5cc42a9b60b9788e8a276d Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Wed, 8 Oct 2025 15:19:22 -0700
Subject: [PATCH 07/31] break out engine configuration
---
.../docs/configuration-engine/duckdb.md | 30 ++++
.../docs/configuration-engine/flink.md | 42 ++++++
.../docs/configuration-engine/iceberg.md | 31 ++++
.../docs/configuration-engine/kafka.md | 28 ++++
.../docs/configuration-engine/postgres.md | 29 ++++
.../docs/configuration-engine/snowflake.md | 35 +++++
.../docs/configuration-engine/vertx.md | 59 ++++++++
documentation/docs/configuration.md | 142 +-----------------
documentation/sidebars.ts | 35 +++++
9 files changed, 297 insertions(+), 134 deletions(-)
create mode 100644 documentation/docs/configuration-engine/duckdb.md
create mode 100644 documentation/docs/configuration-engine/flink.md
create mode 100644 documentation/docs/configuration-engine/iceberg.md
create mode 100644 documentation/docs/configuration-engine/kafka.md
create mode 100644 documentation/docs/configuration-engine/postgres.md
create mode 100644 documentation/docs/configuration-engine/snowflake.md
create mode 100644 documentation/docs/configuration-engine/vertx.md
diff --git a/documentation/docs/configuration-engine/duckdb.md b/documentation/docs/configuration-engine/duckdb.md
new file mode 100644
index 000000000..1b0166b77
--- /dev/null
+++ b/documentation/docs/configuration-engine/duckdb.md
@@ -0,0 +1,30 @@
+# DuckDB Engine Configuration
+
+DuckDB is a vectorized database query engine that excels at analytical queries and can read Iceberg tables efficiently.
+
+## Configuration Options
+
+| Key | Type | Default | Description |
+|-------|------------|------------------|----------------|
+| `url` | **string** | `"jdbc:duckdb:"` | Full JDBC URL for database connection |
+
+## Example Configuration
+
+```json
+{
+ "engines": {
+ "duckdb": {
+ "url": "jdbc:duckdb:"
+ }
+ }
+}
+```
+
+## Usage Notes
+
+- Ideal for local development and testing of analytical workloads
+- Excellent performance on analytical queries with vectorized execution
+- Can read Iceberg tables directly without additional infrastructure
+- Supports both in-memory and persistent database modes
+- Perfect for prototyping before deploying to cloud query engines like Snowflake
+- Lightweight alternative to larger analytical databases
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/flink.md b/documentation/docs/configuration-engine/flink.md
new file mode 100644
index 000000000..f01cff8a4
--- /dev/null
+++ b/documentation/docs/configuration-engine/flink.md
@@ -0,0 +1,42 @@
+# Flink Engine Configuration
+
+Apache Flink is a streaming and batch data processor that serves as the core data processing engine in DataSQRL pipelines.
+
+## Configuration Options
+
+| Key | Type | Default | Notes |
+|--------------|------------|-----------|----------------------------------------------------------------------------------------------------|
+| `config` | **object** | see below | Copied verbatim into the generated Flink SQL job (e.g. `"table.exec.source.idle-timeout": "5 s"`). |
+
+## Example Configuration
+
+```json
+{
+ "engines": {
+ "flink": {
+ "config": {
+ "execution.runtime-mode": "STREAMING",
+ "execution.target": "local",
+ "execution.attached": true,
+ "rest.address": "localhost",
+ "rest.port": 8081,
+ "state.backend.type": "rocksdb",
+ "table.exec.resource.default-parallelism": 1,
+ "taskmanager.memory.network.max": "800m"
+ }
+ }
+ }
+}
+```
+
+## Built-in Connector Templates
+
+The following connector templates are available when using Flink:
+- `postgres` - JDBC connector for PostgreSQL
+- `postgres_log-source` - PostgreSQL CDC source connector
+- `postgres_log-sink` - PostgreSQL sink connector
+- `kafka` - Kafka connector for streaming data
+- `kafka-keyed` - Keyed Kafka connector
+- `kafka-upsert` - Kafka upsert connector
+- `iceberg` - Apache Iceberg connector
+- `print` - Print connector for debugging
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/iceberg.md b/documentation/docs/configuration-engine/iceberg.md
new file mode 100644
index 000000000..48b5a9a54
--- /dev/null
+++ b/documentation/docs/configuration-engine/iceberg.md
@@ -0,0 +1,31 @@
+# Iceberg Engine Configuration
+
+Apache Iceberg is an analytic database format that provides ACID transactions, schema evolution, and time travel capabilities for large analytic datasets.
+
+## Configuration Options
+
+Iceberg is used as a *table-format* engine and must be paired with a query engine such as Snowflake, or DuckDB for query access.
+
+## Basic Configuration
+
+```json
+{
+ "engines": {
+ "iceberg": {
+ "config": {
+ // Iceberg-specific configuration options
+ }
+ }
+ }
+}
+```
+
+## Usage Notes
+
+- Iceberg serves as a storage format, not a query engine
+- Must be combined with compatible query engines:
+ - **DuckDB**: For local analytics and testing
+ - **Snowflake**: For cloud-scale analytics
+- Provides schema evolution capabilities for long-running pipelines
+- Supports time travel queries for historical data analysis
+- Optimized for large-scale analytical workloads
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/kafka.md b/documentation/docs/configuration-engine/kafka.md
new file mode 100644
index 000000000..3972b4056
--- /dev/null
+++ b/documentation/docs/configuration-engine/kafka.md
@@ -0,0 +1,28 @@
+# Kafka Engine Configuration
+
+Apache Kafka is a streaming data platform that serves as the log engine in DataSQRL pipelines for handling data streams and event logs.
+
+## Configuration Options
+
+The default configuration only declares the engine; topic definitions are injected at **plan** time. Additional keys (e.g. `bootstrap.servers`) may be added under `config`.
+
+## Example Configuration
+
+```json
+{
+ "engines": {
+ "kafka": {
+ "config": {
+ "bootstrap.servers": "localhost:9092"
+ }
+ }
+ }
+}
+```
+
+## Usage Notes
+
+- Kafka topics are automatically created based on your SQRL table definitions
+- Topic configurations are generated during the compilation process
+- For custom Kafka settings, add them under the `config` section
+- Kafka serves as the messaging backbone between different engines in the pipeline
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/postgres.md b/documentation/docs/configuration-engine/postgres.md
new file mode 100644
index 000000000..6caf80a0e
--- /dev/null
+++ b/documentation/docs/configuration-engine/postgres.md
@@ -0,0 +1,29 @@
+# PostgreSQL Engine Configuration
+
+PostgreSQL is a realtime database that stores the materialized views and tables generated by your DataSQRL pipeline for low-latency querying.
+
+## Configuration Options
+
+No mandatory configuration keys are required. Physical DDL (tables, indexes, views) is produced automatically by the DataSQRL compiler.
+
+## Basic Configuration
+
+```json
+{
+ "engines": {
+ "postgres": {
+ "config": {
+ // Optional PostgreSQL-specific settings
+ }
+ }
+ }
+}
+```
+
+## Usage Notes
+
+- Database schema is automatically generated from your SQRL script
+- Tables, indexes, and views are created based on the compiled data pipeline
+- Connection parameters are typically provided via environment variables
+- The engine handles both real-time data ingestion and query serving
+- Optimized for low-latency reads of materialized data
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/snowflake.md b/documentation/docs/configuration-engine/snowflake.md
new file mode 100644
index 000000000..1e4181407
--- /dev/null
+++ b/documentation/docs/configuration-engine/snowflake.md
@@ -0,0 +1,35 @@
+# Snowflake Engine Configuration
+
+Snowflake is a cloud-based analytic database query engine that can read Iceberg tables and provide enterprise-scale analytical capabilities.
+
+## Configuration Options
+
+| Key | Type | Default | Description |
+|-------------------|------------|---------|--------------------------------------|
+| `catalog-name` | **string** | β | Glue catalog name for metadata |
+| `external-volume` | **string** | β | Snowflake external volume name |
+| `url` | **string** | β | Full JDBC URL including auth params |
+
+## Example Configuration
+
+```json
+{
+ "engines": {
+ "snowflake": {
+ "catalog-name": "my-glue-catalog",
+ "external-volume": "my-external-volume",
+ "url": "jdbc:snowflake://account.snowflakecomputing.com/?user=username&password=password&warehouse=warehouse&db=database&schema=schema"
+ }
+ }
+}
+```
+
+## Usage Notes
+
+- Requires all three configuration parameters
+- Designed for large-scale analytical workloads in the cloud
+- Integrates with AWS Glue for metadata management
+- Uses external volumes for accessing Iceberg data
+- Authentication parameters should be included in the JDBC URL
+- For local development, consider using DuckDB as a substitute
+- Provides enterprise features like data sharing and governance
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/vertx.md b/documentation/docs/configuration-engine/vertx.md
new file mode 100644
index 000000000..54009e271
--- /dev/null
+++ b/documentation/docs/configuration-engine/vertx.md
@@ -0,0 +1,59 @@
+# Vert.x Engine Configuration
+
+Eclipse Vert.x is a reactive server framework that serves as the GraphQL API server, routing queries to the backing database/log engines.
+
+## Configuration Options
+
+| Key | Type | Default | Notes |
+|--------------|------------|-----------|---------------------------|
+| `authKind` | **string** | `"NONE"` | Authentication type: `"NONE"` or `"JWT"` |
+| `config` | **object** | see below | Vert.x-specific configuration including JWT settings |
+
+## Basic Configuration
+
+```json
+{
+ "engines": {
+ "vertx": {
+ "authKind": "NONE"
+ }
+ }
+}
+```
+
+## JWT Authentication Configuration
+
+For secure APIs with JWT authentication:
+
+```json5
+{
+ "engines": {
+ "vertx": {
+ "authKind": "JWT",
+ "config": {
+ "jwtAuth": {
+ "pubSecKeys": [
+ {
+ "algorithm": "HS256",
+ "buffer": "" // Base64 encoded signer secret string
+ }
+ ],
+ "jwtOptions": {
+ "issuer": "",
+ "audience": [""],
+ "expiresInSeconds": "3600",
+ "leeway": "60"
+ }
+ }
+ }
+ }
+ }
+}
+```
+
+## Usage Notes
+
+- No mandatory keys required for basic operation
+- Connection pools to databases are generated automatically from the overall plan
+- JWT authentication provides secure access to your GraphQL API
+- The server exposes GraphQL, REST, and MCP endpoints based on compiler configuration
\ No newline at end of file
diff --git a/documentation/docs/configuration.md b/documentation/docs/configuration.md
index 6f4dcab56..0aad3a670 100644
--- a/documentation/docs/configuration.md
+++ b/documentation/docs/configuration.md
@@ -17,13 +17,13 @@ The engines that the pipeline compiles to.
```
DataSQRL supports the following engines:
-* **flink**: Apache Flink is a streaming and batch data processor
-* **postgres**: PostgreSQL is a realtime database
-* **kafka**: Apache Kafka is a streaming data platform (i.e. log engine)
-* **iceberg**: Apache Iceberg is an analytic database format. Iceberg must be paired with a query engine for data access
-* **duckdb**: DuckDB is a vectorized database query engine that can read Iceberg tables.
-* **snowflake**: Snowflake is an analytic database query engine that can read Iceberg tables.
-* **vertx**: Eclipse Vert.x is a reactive server framework
+* **[flink](configuration-engine/flink)**: Apache Flink is a streaming and batch data processor
+* **[postgres](configuration-engine/postgres)**: PostgreSQL is a realtime database
+* **[kafka](configuration-engine/kafka)**: Apache Kafka is a streaming data platform (i.e. log engine)
+* **[iceberg](configuration-engine/iceberg)**: Apache Iceberg is an analytic database format. Iceberg must be paired with a query engine for data access
+* **[duckdb](configuration-engine/duckdb)**: DuckDB is a vectorized database query engine that can read Iceberg tables.
+* **[snowflake](configuration-engine/snowflake)**: Snowflake is an analytic database query engine that can read Iceberg tables.
+* **[vertx](configuration-engine/vertx)**: Eclipse Vert.x is a reactive server framework
Guidelines for choosing the enabled engines in a pipeline:
* Always choose one data processor (i.e. "flink")
@@ -152,133 +152,7 @@ Connector templates are used to configure how the engines in the pipeline exchan
Environment variables (e.g. `${POSTGRES_PASSWORD}`) can be referenced inside the configuration files and SQRL scripts. Those are dynamically resolved by the DataSQRL runner when the pipeline is launched. If an environment variable is not configured, it is not replaced.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
----
-
-## Engines (`engines`)
-
-Each sub-key below `engines` must match one of the IDs in **`enabled-engines`**.
-
-```json5
-{
- "engines": {
- "": {
- "type": "", // optional; inferred from key if omitted
- "config": { /*...*/ } // engine-specific knobs (Flink SQL options, etc.)
- }
- }
-}
-```
-
-### Flink (`flink`)
-
-| Key | Type | Default | Notes |
-|--------------|------------|-----------|----------------------------------------------------------------------------------------------------|
-| `config` | **object** | see below | Copied verbatim into the generated Flink SQL job (e.g. `"table.exec.source.idle-timeout": "5 s"`). |
-
-```json
-{
- "engines": {
- "flink": {
- "config": {
- "execution.runtime-mode": "STREAMING",
- "execution.target": "local",
- "execution.attached": true,
- "rest.address": "localhost",
- "rest.port": 8081,
- "state.backend.type": "rocksdb",
- "table.exec.resource.default-parallelism": 1,
- "taskmanager.memory.network.max": "800m"
- }
- }
- }
-}
-```
-
-> **Built-in connector templates**
-> `postgres`, `postgres_log-source`, `postgres_log-sink`,
-> `kafka`, `kafka-keyed`, `kafka-upsert`,
-> `iceberg`, `print`.
-
-### Kafka (`kafka`)
-
-The default configuration only declares the engine; topic definitions are injected at **plan** time.
-Additional keys (e.g. `bootstrap.servers`) may be added under `config`.
-
-### Vert.x (`vertx`)
-
-A GraphQL server that routes queries to the backing database/log engines.
-No mandatory keys; connection pools are generated from the overall plan.
-In terms of security, we support JWT auth, that can be specified under the `config` section.
-
-| Key | Type | Default | Notes |
-|--------------|------------|-----------|---------------------------|
-| `config` | **object** | see below | Vert.x JWT configuration. |
-
-```json5
-{
- "engines": {
- "vertx" : {
- "authKind": "JWT",
- "config": {
- "jwtAuth": {
- "pubSecKeys": [
- {
- "algorithm": "HS256",
- "buffer": "" // Base64 encoded signer secret string
- }
- ],
- "jwtOptions": {
- "issuer": "",
- "audience": [""],
- "expiresInSeconds": "3600",
- "leeway": "60"
- }
- }
- }
- }
- }
-}
-```
-
-### Postgres (`postgres`)
-
-No mandatory keys. Physical DDL (tables, indexes, views) is produced automatically.
-
-### Iceberg (`iceberg`)
-
-Used as a *table-format* engine together with a query engine such as Flink, Snowflake, or DuckDB.
-
-### DuckDB (`duckdb`)
-
-| Key | Type | Default | Description |
-|-------|------------|------------------|----------------|
-| `url` | **string** | `"jdbc:duckdb:"` | Full JDBC URL. |
-
-### Snowflake (`snowflake`)
-
-| Key | Type | Default | Description |
-|-------------------|------------|---------|--------------------------------------|
-| `catalog-name` | **string** | β | Glue catalog. |
-| `external-volume` | **string** | β | Snowflake external volume name. |
-| `url` | **string** | β | Full JDBC URL including auth params. |
-
-
-
-## Internal Environment Variables
+### Internal Environment Variables
For engines that may be running as standalone services inside the DataSQRL Docker container,
we use the following environment variables internally:
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index 15ccd6d2e..310b86dea 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -69,6 +69,41 @@ const sidebars = {
id: 'configuration',
},
items: [
+ {
+ type: 'doc',
+ id: 'configuration-engine/flink',
+ label: 'Flink Engine',
+ },
+ {
+ type: 'doc',
+ id: 'configuration-engine/kafka',
+ label: 'Kafka Engine',
+ },
+ {
+ type: 'doc',
+ id: 'configuration-engine/vertx',
+ label: 'Vert.x Engine',
+ },
+ {
+ type: 'doc',
+ id: 'configuration-engine/postgres',
+ label: 'PostgreSQL Engine',
+ },
+ {
+ type: 'doc',
+ id: 'configuration-engine/iceberg',
+ label: 'Iceberg Engine',
+ },
+ {
+ type: 'doc',
+ id: 'configuration-engine/duckdb',
+ label: 'DuckDB Engine',
+ },
+ {
+ type: 'doc',
+ id: 'configuration-engine/snowflake',
+ label: 'Snowflake Engine',
+ },
{
type: 'doc',
id: 'configuration-default',
From 1d8af7b0bd6e095785b02dee94980493ce9a7a7a Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Wed, 8 Oct 2025 15:33:25 -0700
Subject: [PATCH 08/31] finalize engine config
---
.../docs/configuration-engine/flink.md | 49 +++++++++++++++++--
.../docs/configuration-engine/kafka.md | 16 ++++--
.../docs/configuration-engine/postgres.md | 36 +++++++++++++-
.../docs/configuration-engine/vertx.md | 35 ++++++++++++-
documentation/docs/connectors.md | 2 +
5 files changed, 130 insertions(+), 8 deletions(-)
diff --git a/documentation/docs/configuration-engine/flink.md b/documentation/docs/configuration-engine/flink.md
index f01cff8a4..bd5eba31d 100644
--- a/documentation/docs/configuration-engine/flink.md
+++ b/documentation/docs/configuration-engine/flink.md
@@ -8,6 +8,16 @@ Apache Flink is a streaming and batch data processor that serves as the core dat
|--------------|------------|-----------|----------------------------------------------------------------------------------------------------|
| `config` | **object** | see below | Copied verbatim into the generated Flink SQL job (e.g. `"table.exec.source.idle-timeout": "5 s"`). |
+Frequently configured options include:
+
+* `execution.runtime-mode`: `BATCH` or `STREAMING`
+* `table.exec.source.idle-timeout`: Timeout for idle sources so watermark can advance.
+* `table.exec.mini-batch.*`: For more efficient execution in STREAMING mode by processing in small batches.
+
+Refer to the [Flink Documentation](hhttps://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/config/) for all Flink configuration options.
+
+
+
## Example Configuration
```json
@@ -16,9 +26,6 @@ Apache Flink is a streaming and batch data processor that serves as the core dat
"flink": {
"config": {
"execution.runtime-mode": "STREAMING",
- "execution.target": "local",
- "execution.attached": true,
- "rest.address": "localhost",
"rest.port": 8081,
"state.backend.type": "rocksdb",
"table.exec.resource.default-parallelism": 1,
@@ -29,6 +36,42 @@ Apache Flink is a streaming and batch data processor that serves as the core dat
}
```
+## Deployment Configuration
+
+Flink supports deployment-specific configuration options for managing cluster resources:
+
+| Key | Type | Default | Description |
+|----------------------|-------------|---------|----------------------------------------------------------------------|
+| `jobmanager-size` | **string** | - | Job manager instance size: `dev`, `small`, `medium`, `large` |
+| `taskmanager-size` | **string** | - | Task manager instance size with resource variants |
+| `taskmanager-count` | **integer** | - | Number of task manager instances (minimum: 1) |
+| `secrets` | **array** | `null` | Array of secret names to inject, or `null` if no secrets needed |
+
+### Task Manager Size Options
+
+Available `taskmanager-size` options with resource variants:
+- `dev` - Development/testing size
+- `small`, `small.mem`, `small.cpu` - Small instances with memory or CPU optimization
+- `medium`, `medium.mem`, `medium.cpu` - Medium instances with resource variants
+- `large`, `large.mem`, `large.cpu` - Large instances with resource variants
+
+### Deployment Example
+
+```json
+{
+ "engines": {
+ "flink": {
+ "deployment": {
+ "jobmanager-size": "small",
+ "taskmanager-size": "medium.mem",
+ "taskmanager-count": 2,
+ "secrets": ["flink-secrets", "db-credentials"]
+ }
+ }
+ }
+}
+```
+
## Built-in Connector Templates
The following connector templates are available when using Flink:
diff --git a/documentation/docs/configuration-engine/kafka.md b/documentation/docs/configuration-engine/kafka.md
index 3972b4056..9f697a9e1 100644
--- a/documentation/docs/configuration-engine/kafka.md
+++ b/documentation/docs/configuration-engine/kafka.md
@@ -4,7 +4,13 @@ Apache Kafka is a streaming data platform that serves as the log engine in DataS
## Configuration Options
-The default configuration only declares the engine; topic definitions are injected at **plan** time. Additional keys (e.g. `bootstrap.servers`) may be added under `config`.
+| Key | Type | Default | Description |
+|------------------------|-------------|-----------|-------------------------------------------------------------------------|
+| `retention` | **string** | `null` | Topic retention time (e.g., "7d", "24h") or indefinite when `null` |
+| `watermark` | **string** | `"0 ms"` | Watermark delay for event time processing |
+| `transaction-watermark`| **string** | `"0 ms"` | Watermark delay for event time processing when transactions are enabled |
+
+Additional custom Kafka settings can be added under the `config` section.
## Example Configuration
@@ -12,8 +18,11 @@ The default configuration only declares the engine; topic definitions are inject
{
"engines": {
"kafka": {
+ "retention": "14d",
+ "watermark": "2 sec",
+ "transaction-watermark": "10 sec",
"config": {
- "bootstrap.servers": "localhost:9092"
+ "auto.offset.reset": "earliest"
}
}
}
@@ -24,5 +33,6 @@ The default configuration only declares the engine; topic definitions are inject
- Kafka topics are automatically created based on your SQRL table definitions
- Topic configurations are generated during the compilation process
-- For custom Kafka settings, add them under the `config` section
+- Retention settings control how long data is stored in Kafka topics
+- Watermarks are used for handling late-arriving events in stream processing
- Kafka serves as the messaging backbone between different engines in the pipeline
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/postgres.md b/documentation/docs/configuration-engine/postgres.md
index 6caf80a0e..988cd52c8 100644
--- a/documentation/docs/configuration-engine/postgres.md
+++ b/documentation/docs/configuration-engine/postgres.md
@@ -20,10 +20,44 @@ No mandatory configuration keys are required. Physical DDL (tables, indexes, vie
}
```
+## Deployment Configuration
+
+PostgreSQL supports deployment-specific configuration for database scaling and high availability:
+
+| Key | Type | Default | Description |
+|------------------|-------------|---------|---------------------------------------------------------|
+| `instance-size` | **string** | - | Database instance size for compute and memory |
+| `replica-count` | **integer** | - | Number of read replicas (minimum: 0, maximum varies) |
+
+### Instance Size Options
+
+Available `instance-size` options:
+- `dev` - Development/testing size with minimal resources
+- `small` - Small production workloads
+- `medium` - Medium production workloads
+- `large` - Large production workloads
+- `xlarge` - Extra large production workloads
+
+### Deployment Example
+
+```json
+{
+ "engines": {
+ "postgres": {
+ "deployment": {
+ "instance-size": "large",
+ "replica-count": 2
+ }
+ }
+ }
+}
+```
+
## Usage Notes
- Database schema is automatically generated from your SQRL script
- Tables, indexes, and views are created based on the compiled data pipeline
- Connection parameters are typically provided via environment variables
- The engine handles both real-time data ingestion and query serving
-- Optimized for low-latency reads of materialized data
\ No newline at end of file
+- Optimized for low-latency reads of materialized data
+- Read replicas improve query performance and provide redundancy
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/vertx.md b/documentation/docs/configuration-engine/vertx.md
index 54009e271..9819cc40b 100644
--- a/documentation/docs/configuration-engine/vertx.md
+++ b/documentation/docs/configuration-engine/vertx.md
@@ -51,9 +51,42 @@ For secure APIs with JWT authentication:
}
```
+## Deployment Configuration
+
+Vert.x supports deployment-specific configuration options for scaling the API server:
+
+| Key | Type | Default | Description |
+|-------------------|-------------|---------|----------------------------------------------------------------|
+| `instance-size` | **string** | - | Server instance size with storage variants |
+| `instance-count` | **integer** | - | Number of server instances to run (minimum: 1) |
+
+### Instance Size Options
+
+Available `instance-size` options with storage variants:
+- `dev` - Development/testing size
+- `small`, `small.disk` - Small instances with optional additional disk
+- `medium`, `medium.disk` - Medium instances with optional additional disk
+- `large`, `large.disk` - Large instances with optional additional disk
+
+### Deployment Example
+
+```json
+{
+ "engines": {
+ "vertx": {
+ "deployment": {
+ "instance-size": "medium.disk",
+ "instance-count": 3
+ }
+ }
+ }
+}
+```
+
## Usage Notes
- No mandatory keys required for basic operation
- Connection pools to databases are generated automatically from the overall plan
- JWT authentication provides secure access to your GraphQL API
-- The server exposes GraphQL, REST, and MCP endpoints based on compiler configuration
\ No newline at end of file
+- The server exposes GraphQL, REST, and MCP endpoints based on compiler configuration
+- Deployment configuration allows horizontal scaling for high-availability setups
\ No newline at end of file
diff --git a/documentation/docs/connectors.md b/documentation/docs/connectors.md
index a71f6ff1c..da936a13e 100644
--- a/documentation/docs/connectors.md
+++ b/documentation/docs/connectors.md
@@ -1,5 +1,7 @@
# Connecting External Data Sources and Sinks
+ALWAYS USE EVENT TIME
+
Use `CREATE TABLE` statements to connect external data sources and sinks with your SQRL script using the `WITH` clause to provide connector configuration.
DataSQRL uses Apache Flink connectors and formats. To find a connector for your data system, use:
From a6fd88c664a2f263cd47ae881776b1bb61a36051 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 12:12:18 -0700
Subject: [PATCH 09/31] update default iceberg configuration
---
.../docs/configuration-engine/iceberg.md | 15 ++++++-----
documentation/docs/configuration.md | 4 ++-
documentation/docs/stdlib-docs | 2 +-
.../src/main/resources/default-package.json | 26 +++----------------
4 files changed, 16 insertions(+), 31 deletions(-)
diff --git a/documentation/docs/configuration-engine/iceberg.md b/documentation/docs/configuration-engine/iceberg.md
index 48b5a9a54..a115b0381 100644
--- a/documentation/docs/configuration-engine/iceberg.md
+++ b/documentation/docs/configuration-engine/iceberg.md
@@ -8,15 +8,16 @@ Iceberg is used as a *table-format* engine and must be paired with a query engin
## Basic Configuration
-```json
+Since Iceberg is not a standalone data system but a data format, the configuration for Iceberg is managed through the shared `iceberg` connector:
+
+```json5
{
- "engines": {
+ "connectors": {
"iceberg": {
- "config": {
- // Iceberg-specific configuration options
- }
- }
- }
+ "warehouse": "iceberg-data-dir", // path the Iceberg table data is written to
+ "catalog-type": "hadoop", // the catalog to use for Iceberg metadata
+ "catalog-name": "mycatalog" // the name of the catalog
+ }, }
}
```
diff --git a/documentation/docs/configuration.md b/documentation/docs/configuration.md
index 0aad3a670..fdcde9855 100644
--- a/documentation/docs/configuration.md
+++ b/documentation/docs/configuration.md
@@ -145,8 +145,10 @@ Configuration options that control the compiler, such as where logging output is
## Connector Templates (`connectors`)
-Connector templates are used to configure how the engines in the pipeline exchange data. The connector templates use Flink SQL connector configuration options with variables. Only very advanced use cases require adjustments to the connector templates. Refer to the [default configuration](configuration-default) for documentation of all connector templates.
+Connector templates are used to configure how the engines in the pipeline connect to each other for data exchange. The connector templates use Flink SQL connector configuration options which are mapped to the configuration for each engine.
+The [default connector configuration](configuration-default) works for most local use cases without adjustments.
+Refer to the individual engine configuration for connector configuration options related to that engine.
## Environment Variables (`${VAR}`)
diff --git a/documentation/docs/stdlib-docs b/documentation/docs/stdlib-docs
index 0ff2d109e..655e7a9dc 160000
--- a/documentation/docs/stdlib-docs
+++ b/documentation/docs/stdlib-docs
@@ -1 +1 @@
-Subproject commit 0ff2d109e1e0e182e2aee9801fcdcf9d0821d529
+Subproject commit 655e7a9dcb9939c7375e402fac3d6e5e5866d625
diff --git a/sqrl-planner/src/main/resources/default-package.json b/sqrl-planner/src/main/resources/default-package.json
index 8074b5a9f..6802d70f0 100644
--- a/sqrl-planner/src/main/resources/default-package.json
+++ b/sqrl-planner/src/main/resources/default-package.json
@@ -64,7 +64,10 @@
},
"iceberg": {
"connector": "iceberg",
- "catalog-table": "${sqrl:table-name}"
+ "catalog-table": "${sqrl:table-name}",
+ "warehouse": "iceberg-data",
+ "catalog-type": "hadoop",
+ "catalog-name": "mycatalog"
},
"postgres": {
"connector": "jdbc-sqrl",
@@ -74,27 +77,6 @@
"driver": "org.postgresql.Driver",
"table-name": "${sqrl:table-name}"
},
- "postgres_log-source": {
- "connector": "postgres-cdc",
- "hostname": "${POSTGRES_HOST}",
- "port": "5432",
- "username": "${POSTGRES_USERNAME}",
- "password": "${POSTGRES_PASSWORD}",
- "database-name": "datasqrl",
- "schema-name": "public",
- "table-name": "${sqrl:table-name}",
- "slot.name": "flink_slot",
- "decoding.plugin.name": "pgoutput",
- "debezium.slot.drop_on_stop": "false"
- },
- "postgres_log-sink": {
- "connector": "jdbc-sqrl",
- "password": "${POSTGRES_PASSWORD}",
- "driver": "org.postgresql.Driver",
- "username": "${POSTGRES_USERNAME}",
- "url": "jdbc:postgresql://${POSTGRES_AUTHORITY}",
- "table-name": "${sqrl:table-name}"
- },
"print": {
"connector": "print",
"print-identifier": "${sqrl:table-name}"
From b9fe01bc67099421106325222a212296289cd9f2 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 12:16:25 -0700
Subject: [PATCH 10/31] add auto-update script for default configuration.
---
.../docs/configuration-default-update.sh | 48 +++++++++++++++++++
documentation/docs/configuration-default.md | 26 ++--------
2 files changed, 52 insertions(+), 22 deletions(-)
create mode 100755 documentation/docs/configuration-default-update.sh
diff --git a/documentation/docs/configuration-default-update.sh b/documentation/docs/configuration-default-update.sh
new file mode 100755
index 000000000..209b5ee86
--- /dev/null
+++ b/documentation/docs/configuration-default-update.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# Script to update configuration-default.md with the latest default-package.json content
+# This script replaces everything between ```json and ``` with the contents of default-package.json
+
+set -e
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+MD_FILE="$SCRIPT_DIR/configuration-default.md"
+JSON_FILE="$SCRIPT_DIR/../../sqrl-planner/src/main/resources/default-package.json"
+
+# Check if files exist
+if [ ! -f "$MD_FILE" ]; then
+ echo "Error: $MD_FILE not found"
+ exit 1
+fi
+
+if [ ! -f "$JSON_FILE" ]; then
+ echo "Error: $JSON_FILE not found"
+ exit 1
+fi
+
+echo "Updating $MD_FILE with contents from $JSON_FILE..."
+
+# Create temporary files
+TEMP_FILE=$(mktemp)
+BEFORE_JSON=$(mktemp)
+AFTER_JSON=$(mktemp)
+
+# Extract the part before ```json
+sed -n '1,/^```json$/p' "$MD_FILE" > "$BEFORE_JSON"
+
+# Extract the part after the closing ```
+sed -n '/^```$/,$p' "$MD_FILE" | tail -n +2 > "$AFTER_JSON"
+
+# Combine: before + json content + after
+cat "$BEFORE_JSON" > "$TEMP_FILE"
+cat "$JSON_FILE" >> "$TEMP_FILE"
+echo '```' >> "$TEMP_FILE"
+cat "$AFTER_JSON" >> "$TEMP_FILE"
+
+# Replace the original file
+mv "$TEMP_FILE" "$MD_FILE"
+
+# Clean up temporary files
+rm -f "$BEFORE_JSON" "$AFTER_JSON"
+
+echo "Successfully updated $MD_FILE with the latest default configuration"
\ No newline at end of file
diff --git a/documentation/docs/configuration-default.md b/documentation/docs/configuration-default.md
index a6e2e5cb3..74cd754af 100644
--- a/documentation/docs/configuration-default.md
+++ b/documentation/docs/configuration-default.md
@@ -64,7 +64,10 @@ The following is the [default configuration file](https://raw.githubusercontent.
},
"iceberg": {
"connector": "iceberg",
- "catalog-table": "${sqrl:table-name}"
+ "catalog-table": "${sqrl:table-name}",
+ "warehouse": "iceberg-data",
+ "catalog-type": "hadoop",
+ "catalog-name": "mycatalog"
},
"postgres": {
"connector": "jdbc-sqrl",
@@ -74,27 +77,6 @@ The following is the [default configuration file](https://raw.githubusercontent.
"driver": "org.postgresql.Driver",
"table-name": "${sqrl:table-name}"
},
- "postgres_log-source": {
- "connector": "postgres-cdc",
- "hostname": "${POSTGRES_HOST}",
- "port": "5432",
- "username": "${POSTGRES_USERNAME}",
- "password": "${POSTGRES_PASSWORD}",
- "database-name": "datasqrl",
- "schema-name": "public",
- "table-name": "${sqrl:table-name}",
- "slot.name": "flink_slot",
- "decoding.plugin.name": "pgoutput",
- "debezium.slot.drop_on_stop": "false"
- },
- "postgres_log-sink": {
- "connector": "jdbc-sqrl",
- "password": "${POSTGRES_PASSWORD}",
- "driver": "org.postgresql.Driver",
- "username": "${POSTGRES_USERNAME}",
- "url": "jdbc:postgresql://${POSTGRES_AUTHORITY}",
- "table-name": "${sqrl:table-name}"
- },
"print": {
"connector": "print",
"print-identifier": "${sqrl:table-name}"
From 37e8594e45030408660f7d222ff1a0d66edb13bc Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 18:02:49 -0700
Subject: [PATCH 11/31] update documentation, add howtos.
---
.../docs/configuration-engine/flink.md | 16 +-
.../docs/configuration-engine/kafka.md | 12 +-
.../docs/configuration-engine/postgres.md | 18 +-
documentation/docs/configuration.md | 43 ++---
documentation/docs/connectors.md | 2 +
documentation/docs/howto.md | 81 --------
documentation/docs/howto/project-structure.md | 30 +++
documentation/docs/howto/stream-enrichment.md | 41 +++++
.../docs/howto/subgraph-elimination.md | 18 ++
documentation/docs/howto/templating.md | 69 +++++++
.../docs/howto/testing-authorization.md | 5 +
documentation/docs/howto/testing.md | 88 +++++++++
documentation/docs/interface.md | 114 ++++++++++--
documentation/docs/sqrl-language.md | 173 ++++++++++--------
documentation/sidebars.ts | 37 ++--
.../config/CompilerApiConfigImpl.java | 8 +-
16 files changed, 515 insertions(+), 240 deletions(-)
delete mode 100644 documentation/docs/howto.md
create mode 100644 documentation/docs/howto/project-structure.md
create mode 100644 documentation/docs/howto/stream-enrichment.md
create mode 100644 documentation/docs/howto/subgraph-elimination.md
create mode 100644 documentation/docs/howto/templating.md
create mode 100644 documentation/docs/howto/testing-authorization.md
create mode 100644 documentation/docs/howto/testing.md
diff --git a/documentation/docs/configuration-engine/flink.md b/documentation/docs/configuration-engine/flink.md
index bd5eba31d..b406bb437 100644
--- a/documentation/docs/configuration-engine/flink.md
+++ b/documentation/docs/configuration-engine/flink.md
@@ -16,8 +16,6 @@ Frequently configured options include:
Refer to the [Flink Documentation](hhttps://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/config/) for all Flink configuration options.
-
-
## Example Configuration
```json
@@ -70,16 +68,4 @@ Available `taskmanager-size` options with resource variants:
}
}
}
-```
-
-## Built-in Connector Templates
-
-The following connector templates are available when using Flink:
-- `postgres` - JDBC connector for PostgreSQL
-- `postgres_log-source` - PostgreSQL CDC source connector
-- `postgres_log-sink` - PostgreSQL sink connector
-- `kafka` - Kafka connector for streaming data
-- `kafka-keyed` - Keyed Kafka connector
-- `kafka-upsert` - Kafka upsert connector
-- `iceberg` - Apache Iceberg connector
-- `print` - Print connector for debugging
\ No newline at end of file
+```
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/kafka.md b/documentation/docs/configuration-engine/kafka.md
index 9f697a9e1..8d80a4b53 100644
--- a/documentation/docs/configuration-engine/kafka.md
+++ b/documentation/docs/configuration-engine/kafka.md
@@ -35,4 +35,14 @@ Additional custom Kafka settings can be added under the `config` section.
- Topic configurations are generated during the compilation process
- Retention settings control how long data is stored in Kafka topics
- Watermarks are used for handling late-arriving events in stream processing
-- Kafka serves as the messaging backbone between different engines in the pipeline
\ No newline at end of file
+- Kafka serves as the messaging backbone between different engines in the pipeline
+
+
+
+### Internal Environment Variables
+
+When running pipelines with the DataSQRL `run` command, the following environment variables are used
+in the configuration:
+
+* `KAFKA_BOOTSTRAP_SERVERS`
+* `KAFKA_GROUP_ID`
\ No newline at end of file
diff --git a/documentation/docs/configuration-engine/postgres.md b/documentation/docs/configuration-engine/postgres.md
index 988cd52c8..644233a32 100644
--- a/documentation/docs/configuration-engine/postgres.md
+++ b/documentation/docs/configuration-engine/postgres.md
@@ -60,4 +60,20 @@ Available `instance-size` options:
- Connection parameters are typically provided via environment variables
- The engine handles both real-time data ingestion and query serving
- Optimized for low-latency reads of materialized data
-- Read replicas improve query performance and provide redundancy
\ No newline at end of file
+- Read replicas improve query performance and provide redundancy
+
+
+
+### Internal Environment Variables
+
+When running pipelines with the DataSQRL `run` command, the following environment variables are used
+in the configuration:
+
+* `POSTGRES_VERSION`
+* `POSTGRES_HOST`
+* `POSTGRES_PORT`
+* `POSTGRES_DATABASE`
+* `POSTGRES_AUTHORITY`
+* `POSTGRES_JDBC_URL`
+* `POSTGRES_USERNAME`
+* `POSTGRES_PASSWORD`
diff --git a/documentation/docs/configuration.md b/documentation/docs/configuration.md
index fdcde9855..20a6a35a0 100644
--- a/documentation/docs/configuration.md
+++ b/documentation/docs/configuration.md
@@ -6,11 +6,11 @@ The `version` field specifies the version of the configuration file which is cur
---
-## Engines (`enabled-engines` and `engines`)
+## Engines (`enabled-engines`)
The engines that the pipeline compiles to.
-```json5
+```json
{
"enabled-engines": ["flink", "postgres", "kafka", "vertx"]
}
@@ -35,9 +35,9 @@ Guidelines for choosing the enabled engines in a pipeline:
* Choose at most one log or server engine, but choosing multiple database engines is supported.
* When choosing a query engine that operates in the cloud (e.g. snowflake), substitute for a locally executable query engine (i.e. "duckdb") for testing and running the pipeline locally.
-The individual engines are configured under the `engines` field. The following example configures a Flink-specific setting:
+The individual engines are configured under the **`engines`** field. The following example configures a Flink-specific setting:
-```json5
+```json
{
"engines": {
"flink": {
@@ -57,17 +57,17 @@ Configures the main SQRL script to compile, the (optional) GraphQL schema for th
The `config` JSON object is passed to the Mustache templating engine to substitute template variable occurrences (e.g. `{{table}}`) before the script is compiled.
-```json5
+```json
{
"script": {
- "main": "my-project.sqrl", // Main SQRL script for pipeline
- "graphql": "api/schema.v1.graphqls", // GraphQL schema defines the API
+ "main": "my-project.sqrl", // Main SQRL script for pipeline
+ "graphql": "api/schema.v1.graphqls", // GraphQL schema defines the API
"operations": ["api/operations-v1/myop1.graphql"], //List of GraphQL queries that define operations which are exposed as API endpoints
- "config": { //Arbitrary JSON object used by the mustache templating engine to instantiate SQRL files
+ "config": { //Arbitrary JSON object used by the mustache templating engine to instantiate SQRL files
"table": "orders",
"filters": [
-{ "field": "total_amount", "isNull": false },
-{ "field": "coupon_code", "isNull": true },
+ { "field": "total_amount", "isNull": false },
+ { "field": "coupon_code", "isNull": true }
]
}
}
@@ -91,7 +91,7 @@ MyTable := SELECT
Configures how the DataSQRL test runner executes tests.
For streaming pipelines, use `required-checkpoints` to set a reliable time-interval for creating snapshots. Otherwise, configure a wall-clock delay via `delay-sec`.
-```json5
+```json
{
"test-runner": {
"snapshot-folder": "snapshots/myproject/", // Snapshots output directory (default: "./snapshots")
@@ -111,7 +111,7 @@ For streaming pipelines, use `required-checkpoints` to set a reliable time-inter
Configuration options that control the compiler, such as where logging output is produced, how the pipeline plan is written out, what cost model to use determine data processing step to engine allocation, and what protocols are exposed in the API.
-```json5
+```json
{
"compiler": {
"logger": "print", // "print" | "none"
@@ -154,24 +154,5 @@ Refer to the individual engine configuration for connector configuration options
Environment variables (e.g. `${POSTGRES_PASSWORD}`) can be referenced inside the configuration files and SQRL scripts. Those are dynamically resolved by the DataSQRL runner when the pipeline is launched. If an environment variable is not configured, it is not replaced.
-### Internal Environment Variables
-
-For engines that may be running as standalone services inside the DataSQRL Docker container,
-we use the following environment variables internally:
-
-* **Kafka**
- * `KAFKA_BOOTSTRAP_SERVERS`
- * `KAFKA_GROUP_ID`
-* **PostgreSQL**
- * `POSTGRES_VERSION`
- * `POSTGRES_HOST`
- * `POSTGRES_PORT`
- * `POSTGRES_DATABASE`
- * `POSTGRES_AUTHORITY`
- * `POSTGRES_JDBC_URL`
- * `POSTGRES_USERNAME`
- * `POSTGRES_PASSWORD`
-
----
diff --git a/documentation/docs/connectors.md b/documentation/docs/connectors.md
index da936a13e..0320b2b68 100644
--- a/documentation/docs/connectors.md
+++ b/documentation/docs/connectors.md
@@ -2,6 +2,8 @@
ALWAYS USE EVENT TIME
+STATE vs STREAM
+
Use `CREATE TABLE` statements to connect external data sources and sinks with your SQRL script using the `WITH` clause to provide connector configuration.
DataSQRL uses Apache Flink connectors and formats. To find a connector for your data system, use:
diff --git a/documentation/docs/howto.md b/documentation/docs/howto.md
deleted file mode 100644
index 5c79448c7..000000000
--- a/documentation/docs/howto.md
+++ /dev/null
@@ -1,81 +0,0 @@
-# How-To Guides
-
-Practical guides for developing data pipelines with DataSQRL
-
-## Project Structure
-
-We recommend the following project structure to support testing and deploying to multiple environments:
-
-```text
-project-root/
-βββ sources-prod/ <-- contains source connectors for prod
-βββ sources-testdata/ <-- contains test data
-βββ snapshots/ <-- snapshots for test cases, generated by DataSQRL
-βββ tests/ <-- (optional) GraphQL test queries
-βββ components.sqrl <-- table definitions imported into main script
-βββ mainscript.sqrl
-βββ mainscript_package_prod.json <-- configuration for prod
-βββ mainscript_package_test.json <-- configuration for testing
-```
-
-* Create one folder for each collection of data sources.
- Sources that represent the same type of data but different environments (test vs prod) have the same prefix.
-* Create one separate `package.json` [configuration file](configuration.md) for each environment that references the same main script,
- but maps the data sources differently in the `dependencies` section of the configuration, and (optionally) uses different engines and configurations.
-* By default, DataSQRL uses `tests` and `snapshots` directories. If you have multiple test suites or run the same tests with different sources,
- append a distinguishing suffix (e.g. `-api` or `-regression`) to both directory names and setup specific `package.json` files
- (e.g. `api-package.json` or `regression-package.json`) that refers those directories in their [`test-runner`](configuration.md#test-runner-test-runner) section.
-
-## Testing
-
-DataSQRL supports [running automated tests](compiler#test-command) for your SQRL pipeline by annotating test cases with the `/*+test */` hint
-or placing test queries in the `tests` folder (or any other folder that's set in the `test-runner` configuration).
-
-The best practice for writing test cases is to [modularize](connectors#connector-management) your sources
-so that you dynamically link different sources for local development, testing, and production.
-In many cases, you can use the same sources for testing and local development in a single folder.
-
-That data should contain explicit event timestamps for all records. That enables completely deterministic test cases.
-It also supports reproducing failure scenarios that you experienced in production as local test cases by using the data that caused the failure with the original timesta
-That way, you don't have to externally simulate certain sequences of events that caused the failure in the first place.
-
-In addition, it allows you to build up a repository of failures and edge cases that gets executed automatically to spot regressions.
-
-## Script Imports
-
-If your main script gets too big, or you want to reuse table definitions across multiple scripts,
-move the definitions to a separate SQRL script and import it into the main script.
-
-### Inline Script Imports
-
-Inline imports place table and function definitions from another script into the current scope and requires
-that table and function names do not clash with those in the importing script.
-
-```sql
-IMPORT myscript.*;
-```
-This statement imports all tables and functions from a SQRL script called `myscript.sqrl` in the local folder.
-
-## Data Discovery
-
-DataSQRL automatically generates table definitions with connector configuration and schemas for json-line files (with extension `.jsonl`)
-and csv files (with extension `.csv`) within the project directory.
-This makes it easy to import data from such files into a SQRL project.
-
-For example, to import data from a file `orders.jsonl` in the folder `mydata` you write:
-```sql
-IMPORT mydata.orders;
-```
-
-When you run the compiler, it will create the table configuration file `orders.table.sql` which you can then import like any other source.
-The compiler reads the file and auto-discovers the schema.
-
-To disable automatic discovery of data for a directory, place a file called `.nodiscovery` into that directory.
-
-## Manual Subgraph Elimination with Noop Function
-
-Sometimes the Flink optimizer is too smart for its own good and will push down predicates that make common subgraph identification impossible.
-That can result in much larger job graphs and poor performance or high state maintenance.
-
-To inhibit predicate pushdown, SQRL uses the `noop` function that takes an arbitrary list of argument and always returns true.
-As such, the function serves no purpose other than making it impossible for the optimizer to push down predicates.
diff --git a/documentation/docs/howto/project-structure.md b/documentation/docs/howto/project-structure.md
new file mode 100644
index 000000000..ceebb62ed
--- /dev/null
+++ b/documentation/docs/howto/project-structure.md
@@ -0,0 +1,30 @@
+# Project Structure
+
+A DataSQRL project is structured as follows where `{name}` is the project name.
+
+```
+βββ {name}.sqrl # Contains the main data processing logic
+βββ {name}-[run/test/prod]-package.json # Configuration files for running locally, testing, and deploying the project
+βββ {name}-connectors/ # Contains source and sink table definitions, shared connector logic, schemas, and data files
+β βββ sources-[run/test/prod].sqrl # Contains the source table definitions, split by variant or environment
+βββ snapshots/ # Contains the snapshot data for tests
+β βββ {name}/ # One directory per project
+βββ {name}-api/ # Contains the API schema and operation definitions for the project
+β βββ schema.v1.graphqls # GraphQL schema definition
+β βββ tests/ # Contains GraphQL test queries as .graphql files
+β βββ operations-v1/ # Contains any operation definitions as .graphql files
+βββ README.md # Explain the project(s) and structure
+```
+
+A project has one or more `package.json` configuration files to configure the compiled pipeline for different environments: running locally, testing, and one or more deployment environments. The targeted environment is used in the name, e.g. `run`, `test`, `qa`, `prod`, etc.
+
+The `package.json` file is the authoritative source that defines the main SQRL script and (optional) GraphQL schema and operations. It also configures snapshot and test directories. Always consult the `package.json` files for the relative file paths to the project source files.
+
+For advanced project or when multiple projects share one directory, the structure may include:
+```
+βββ [shared/authentication]-package.json # Config file that is shared across projects
+βββ tests/ # Folder that contains test code to separate it from the main logic
+β βββ {test-name}.sqrl # This file is included inline in the main script
+βββ functions/ # User defined functions
+βββ shared.sqrl # SQRL script that's shared across projects
+```
\ No newline at end of file
diff --git a/documentation/docs/howto/stream-enrichment.md b/documentation/docs/howto/stream-enrichment.md
new file mode 100644
index 000000000..54dcaad6b
--- /dev/null
+++ b/documentation/docs/howto/stream-enrichment.md
@@ -0,0 +1,41 @@
+# Enriching Data Streams
+
+A common requirement is to enrich a stream of events with dimensional data associated with those events in a time-consistent manner.
+
+For example, suppose we want to enrich transactions with user account balances:
+
+```sql
+CREATE TABLE Transaction (
+ `txid` BIGINT NOT NULL,
+ `accountid` BIGINT NOT NULL,
+ `amount` DECIMAL(10,2) NOT NULL,
+ `timestamp` TIMESTAMP_LTZ(3) NOT NULL,
+ WATERMARK FOR `timestamp` AS `timestamp` - INTERVAL '0.001' SECOND
+) WITH (...);
+
+CREATE TABLE AccountBalanceUpdates (
+ `accountid` BIGINT NOT NULL,
+ `balance` DECIMAL(15,2) NOT NULL,
+ `lastUpdated` TIMESTAMP_LTZ(3) NOT NULL,
+ WATERMARK FOR `lastUpdated` AS `lastUpdated` - INTERVAL '0.001' SECOND
+) WITH (...);
+```
+
+Those can be internal or external table sources with or without connector configuration.
+The important piece is that Transaction is a stream of transaction events and AccountBalanceUpdates is a changelog stream for the AccountBalance entity related to the transaction stream by accountid.
+
+To join in the account balance to the transaction, we want to ensure that we get the balance **at the time of the transaction** for the join to be consistent in time.
+
+To accomplish this, we first have to convert the append-only AccountBalanceUpdates stream to a versioned state table by distincting the stream on primary key:
+
+```sql
+AccountBalance := DISTINCT AccountBalanceUpdates ON accountid ORDER BY lastUpdated DESC;
+```
+
+We can then join the versioned state table to the stream with a temporal join:
+
+```sql
+EnrichedTransaction := SELECT t.*, a.* FROM Transaction t
+ JOIN AccountBalance FOR SYSTEM_TIME AS OF t.`timetamp` a ON a.accountid = t.accountid;
+
+```
\ No newline at end of file
diff --git a/documentation/docs/howto/subgraph-elimination.md b/documentation/docs/howto/subgraph-elimination.md
new file mode 100644
index 000000000..dcb7094f8
--- /dev/null
+++ b/documentation/docs/howto/subgraph-elimination.md
@@ -0,0 +1,18 @@
+# Subgraph Elimination
+
+Sometimes the Flink optimizer is too smart for its own good and will push down predicates that make common subgraph identification impossible resulting in duplicate computation.
+That can result in much larger job graphs and poor performance or high state maintenance.
+
+To inhibit predicate pushdown, SQRL provides the `noop` function that takes an arbitrary list of argument and always returns true.
+As such, the function serves no purpose other than making it impossible for the optimizer to push down predicates.
+
+Consider the following schematic example:
+
+```sql
+MyComputedTable := SELECT a, b, expensive_function(data) AS c FROM InputData;
+
+ResultA := SELECT a, c FROM MyComputedTable WHERE noop(a,b,c);
+ResultB := SELECT b, c FROM MyCOmputedTable WHERE noop(a,b,c);
+```
+
+Because `ResultA` and `ResultB` select different subsets of columns, those selections can get optimized down to the source `InputData` table resulting in `expensive_function` being executed twice because the relational trees are slightly different. By adding the `noop` function we inhibit that push-down optimization.
\ No newline at end of file
diff --git a/documentation/docs/howto/templating.md b/documentation/docs/howto/templating.md
new file mode 100644
index 000000000..3a8ddfb6d
--- /dev/null
+++ b/documentation/docs/howto/templating.md
@@ -0,0 +1,69 @@
+# Templating
+
+DataSQRL uses the Mustache templating engine to substitute configuration variables in SQRL scripts, making them reusable and configurable without modifying the code.
+
+## How It Works
+
+Variables in your SQRL script are wrapped in double curly braces `{{variableName}}`. When DataSQRL compiles the script, it replaces these placeholders with values defined in the `script.config` section of your [`package.json`](../configuration) configuration file.
+
+## Example
+
+### Configuration File (package.json)
+
+```json
+{
+ "version": "1",
+ "script": {
+ "main": "query.sqrl",
+ "config": {
+ "tableName": "Users",
+ "idColumn": "user_id",
+ "timestampColumn": "created_at",
+ "minAge": 18
+ }
+ }
+}
+```
+
+### SQRL Script (query.sqrl)
+
+```sql
+IMPORT tables.{{tableName}};
+
+FilteredUsers :=
+SELECT {{idColumn}},
+ name,
+ {{timestampColumn}}
+FROM {{tableName}}
+WHERE age >= {{minAge}};
+```
+
+### After Substitution
+
+When DataSQRL processes this script, it replaces all `{{variableName}}` placeholders:
+
+```sql
+IMPORT tables.Users;
+
+FilteredUsers :=
+SELECT user_id,
+ name,
+ created_at
+FROM Users
+WHERE age >= 18;
+```
+
+## Benefits
+
+1. **Reusability**: The same SQRL script can work with different tables and columns by changing the config
+2. **Maintainability**: Configuration is centralized in one place (package.json)
+3. **Type Safety**: You can specify types like `partitionColType: "bigint"` and use them in the script
+4. **Environment-Specific**: Easy to have different configs for dev, test, and production
+
+## Special Variables
+
+DataSQRL also provides built-in variables:
+- `${DEPLOYMENT_ID}`: Unique identifier for each deployment
+- `${DEPLOYMENT_TIMESTAMP}`: Timestamp when the job was deployed
+
+These are substituted at deployment time and are useful for tracking and versioning.
\ No newline at end of file
diff --git a/documentation/docs/howto/testing-authorization.md b/documentation/docs/howto/testing-authorization.md
new file mode 100644
index 000000000..4e4de83ae
--- /dev/null
+++ b/documentation/docs/howto/testing-authorization.md
@@ -0,0 +1,5 @@
+# Testing Authorization
+
+You can test record filtering, data masking, and other types of authorization based data access control with DataSQRL's automated test runner via the [`run` command](../compiler#test-command).
+
+TODO: Please describe how to generate claims, tokens, and configure those in the package.json and via .properties files.
\ No newline at end of file
diff --git a/documentation/docs/howto/testing.md b/documentation/docs/howto/testing.md
new file mode 100644
index 000000000..9d8d2157c
--- /dev/null
+++ b/documentation/docs/howto/testing.md
@@ -0,0 +1,88 @@
+# Testing
+
+DataSQRL's automated test runner can execute two types of snapshot test via the [`run` command](../compiler#test-command):
+
+1. Test tables annotated with the `/*+test */` hint
+2. GraphQL operations in the `test-folder` configured under the `test-runner` in the [`package.json`](../configuration).
+
+You can combine both types of tests. All snapshots are written to the same `snapshot-folder`.
+
+## Modularizing Sources
+
+In order to separate test data from the real-world data sources of the pipeline, move all `CREATE TABLE` statements into a `connectors/source-prod.sqrl` file which gets imported into the main SQRL script with:
+
+```sql
+IMPORT connectors.source-{{variant}}.*; -- or you can import into a separate namespace and adjust references
+```
+
+Note, that `{{variant}}` is a template variable that is used to switch out the source definition and needs to be defined in the [`package.json`](../configuration):
+```json
+{
+ "script": {
+ "main": "my-project.sqrl",
+ "config": {
+ "variant": "prod"
+ }
+ }
+}
+```
+
+## Creating Test Configuration
+
+With the sources modularized out, create a separate test configuration (e.g. `my_project-test-package.json`) for the project:
+```json
+{
+ "script": {
+ "main": "my-project.sqrl",
+ "config": {
+ "variant": "test"
+ }
+ }
+}
+```
+
+## Test Data Sources
+
+Now, create a new sqrl script for the test data sources `connectors/source-test.sqrl` which defines the same tables as the production sources but using static test data in `.jsonl` files. Generate a JSONL file for each table and place it in the `connectors/test-data` folder. Then define the tables based on the originals:
+
+```sql
+CREATE TABLE MyTable (
+ ...same columns as original source but replacing METADATA and non-deterministic columns regular columns and data ...
+ WATERMARK FOR `my_timestamp` AS `my_timestamp` - INTERVAL '0.001' SECOND
+)
+ WITH (
+ 'format' = 'flexible-json',
+ 'path' = '${DATA_PATH}/my_table.jsonl',
+ 'source.monitor-interval' = '10000', -- remove for batch sources
+ 'connector' = 'filesystem'
+ );
+```
+
+Refer to the [connectors documentation](../connectors) for more information on how to source table connectors.
+
+Guidelines for generating test data:
+* Generate realistic test data that covers the relationship/joins that you want to test
+* Make sure the test data covers common failure scenarios and edge cases
+* Set a static event timestamp on test data so that test execution is deterministic
+* If the original `CREATE TABLE` definition used `METADATA` columns, convert those to regular columns and add static test data
+* If the original `CREATE TABLE` definition used non-deterministic computed columns (e.g. `NOW()`), convert those to regular columns and add static test data.
+* To advance the watermark during test execution, add a dummy record at the end with an event timestamp that is much larger than all test records' timestamps to ensure the watermark advances enough to close windows and flush out all events.
+
+## API Tests
+
+Optionally, define GraphQL mutations, subscriptions, and queries in `.graphql` files inside the configured `test-folder` for `test-runner`. Those queries must be valid queries against the GraphQL schema for the compiled pipeline.
+
+## Run Tests Locally
+
+Use the [`test` command](../compiler#test-command) to execute the tests.
+The first time you run the tests, snapshots are created in the configured `snapshot-folder`. Validate that those snapshots are correct.
+Subsequent executions of the test will compare snapshots and fail if they are unequal.
+
+## Automate Testing
+
+Check the snapshots into the version control system alongside the code and configure your build or CI/CD tool to run the tests automatically.
+
+## Update Tests
+
+As you encounter production issues or discover failures during manual testing, convert those scenarios to test data and execute them as part of your test suite to avoid regressions.
+By setting the event timestamp explicitly, you can precisely recreate a failure scenario and replay it deterministically.
\ No newline at end of file
diff --git a/documentation/docs/interface.md b/documentation/docs/interface.md
index 1c59a310f..c0f4a0dd5 100644
--- a/documentation/docs/interface.md
+++ b/documentation/docs/interface.md
@@ -2,28 +2,58 @@
Based on the SQRL script, DataSQRL generates the interface for the compiled data pipeline. DataSQRL supports the following interfaces:
+* Data Product (Data Lake Views and Database Views)
* GraphQL (Mutations, Queries, and Subscriptions)
* MCP (Tooling and Resources)
* REST (GET and POST)
-* Data Product (Data Lake and Database Views)
-
-The first three are APIs that can be invoked programmatically. These generated API interfaces are configured by the `protocols` [compiler configuration](configuration.md#compiler-compiler).
For data products, DataSQRL generates view definitions as deployment assets in `build/deploy/plan` which can be queried directly.
+The last three are APIs that can be invoked programmatically. The `protocols` [compiler configuration](configuration.md#compiler-compiler) controls which API protocols are exposed by the server.
+
## Data Products
For data products, each visible table defined in the SQRL script is exposed as a view or physical table depending on the pipeline optimization. The mapping between visible tables in the SQRL script and exposed tables in the interface is 1-to-1.
-We recommend generating unique table names for the physical tables by configuring a table-name suffix in the [connector configuration](configuration#connectors-connectors), e.g. by configuring the `table-name` for `postgres` or the `catalog-name` for `iceberg` to `${sqrl:table-name}_MY_SUFFIX` . This separates views from physical tables to provide modularity and support updates without impacting downstream consumers.
+We recommend generating unique table names for the physical tables by configuring a table-name suffix in the [connector configuration](configuration-default), e.g. by configuring the `table-name` for `postgres` or the `catalog-table` for `iceberg` to `${sqrl:table-name}_MY_SUFFIX` . This separates views from physical tables to provide modularity and support updates without impacting downstream consumers.
## APIs
+When a server engine is configured, the tables, relationships, and functions defined in a SQRL script map to API endpoints exposed by the server.
+DataSQRL builds an object-relationship model from the tables and relationships between them. Tables are mapped to objects with each scalar column as a field. Fields that are nested rows are mapped to child objects with field name as the parent-to-child relationship. Relationships defined between tables are mapped to relationships between the corresponding objects.
+
### GraphQL
-DataSQRL uses the GraphQL data model as the base model for all API access to data. The [SQRL language specification](sqrl-language.md#api-mapping) defines how the tables and relationships in the SQRL script map to GraphQL types and fields, as well as how tables map to queries, mutations, and subscriptions.
+DataSQRL uses the GraphQL data model as the base model for all API access to data because there is a natural 1-to-1 mapping between the object-relationship model of a SQRL project and GraphQL schema:
+Each object maps to a type or input and each relationship maps to a relationship field on the respective types.
+
+#### Model-to-Schema Mapping
+
+Specifically, tables and functions are exposed as query endpoints of the same name and argument signature (i.e. the argument names and types match).
+Tables/functions defined with the `SUBSCRIBE` keyword are exposed as subscriptions.
+Internal table sources are exposed as mutations with the input type identical to the columns in the table excluding computed columns.
+
+In addition, the result type of the endpoint matches the schema of the table or function. That means, each field of the result type matches a column or relationship on the table/function by name and the field type is compatible.
+The field type is compatible with the column/relationship type iff:
+* For scalar or collection types there is a native mapping from one type system to the other
+* For structured types (i.e. nested or relationship), the mapping applies recursively.
+
+#### Base Tables
+
+To avoid generating multiple redundant result types in the API interface, the compiler infers the base table for each defined table and function.
+
+The base table for a defined table or function is the right-most table in the relational tree of the SELECT query from the definition body if and only if that table type is equal to the defined table type. If no such table exists, the base table is the table itself.
+
+The result type for a table or function is the result type generated for that table's base table.
+Hidden columns, i.e. columns where the name starts with an underscore `_`, are not included in the generated result type.
+
+#### Schema Generation
-To generate the GraphQL schema from a SQRL script, add the `--api graphql` option to the [compile command](compiler.md#compile-command).
+The compiler generates the GraphQL schema automatically from the SQRL script if no API schema is defined in the [`package.json`](configuration). Add the `--api graphql` flag to the [`compile` command](compiler.md#compile-command) to write the schema to the `schema.v1.graphqls` file in the same directory for inspection or fine-tuning.
+
+#### Schema Customization
+
+If a GraphQL schema is defined, the compiler maps the object-relationship model onto the provided schema. You can write your own GraphQL schema or modify the generated GraphQL schema to control the exposed interface. Any modifications must preserve the mapping to the object-relationship model described above.
You can customize the GraphQL schema by:
* Changing field cardinalities (e.g. `[Person]!` to `Person!`)
@@ -34,26 +64,53 @@ You can customize the GraphQL schema by:
* Adding interfaces and structuring types with interfaces
:::warning
-Changes must be compatible with the underlying data as defined in the SQRL script and the [API mapping](sqrl-language.md#api-mapping).
+The compiler raises errors when the provided GraphQL schema is not compatible with the object-relationship model.
:::
-To use a customized GraphQL schema, configure the schema explicitly in the [script configuration](configuration.md#script-script) or by providing it as a [command argument](compiler.md).
+#### Authoritative Model
+
+DataSQRL uses the GraphQL schemas the authoritative model for all API protocols. It serves as the foundational model on which operations, endpoints, and access patterns are defined. This simplifies the conceptual model and server execution since any API operation maps to a GraphQL query which is executed by a centralized and optimized GraphQL engine.
+
+The GraphQL query execution engine sits at the core of the DataSQRL server engine and executes all requests even if the GraphQL API is not exposed. This ensures uniform execution of all requests and a shared authentication and authorization mechanism for security.
+
+```mermaid
+flowchart TD
+ A[Incoming Request] --> B[HTTP + Authentication]
+ B --> C[Router]
+
+ C --> D[GraphQL]
+ C --> E[REST]
+ C --> F[MCP]
+
+ F --> G[Operations]
+ E --> G
+ D --> H[GraphQL Query Engine]
+ G --> H
+
+ subgraph Server
+ B
+ C
+ D
+ E
+ F
+ G
+ H
+ end
+```
### MCP and REST
+DataSQRL exposes an endpoint in MCP or REST for each GraphQL operation.
DataSQRL generates a list of operations from the GraphQL schema: one for each query and mutation endpoint.
-Queries are mapped to MCP tools with a `Get` prefix and REST endpoints under `rest/queries`. If the arguments are simple scalars, the REST endpoint is GET with URL parameters, otherwise POST with the arguments as payload.
-Mutations are mapped to MCP tools with an `Add` prefix and REST POST endpoints under `rest/mutations`.
-For the result set, DataSQRL follows relationship fields up to a configured depth (and without loops).
+* Queries are mapped to MCP tools with a `Get` prefix and REST endpoints under `rest/queries`. If the arguments are simple scalars, the REST endpoint is GET with URL parameters, otherwise POST with the arguments as payload. For the result set, DataSQRL follows relationship fields up to a configured depth `max-result-depth` (and without loops).
+* Mutations are mapped to MCP tools with an `Add` prefix and REST POST endpoints under `rest/mutations`.
-You can control the default operation generation with the [compiler configuration](configuration.md#compiler-compiler).
+For complete control over the exposed MCP tools and resources as well as REST endpoints, you can define the GraphQL operations explicitly in one or multiple `.graphql` files which configured under `operations` in the [`package.json`](configuration).
-For complete control over the exposed MCP tools and resources as well as REST endpoints, you can define the operations explicitly in a separate GraphQL file (or multiple files).
+The GraphQL file defining the operations contains named queries or mutations.
+The name of the operation is the name of the MCP tool and REST endpoint and must be unique.
-The GraphQL file defining the operations contains one or multiple query or mutation queries.
-The name of the operation is the name of the MCP tool and REST endpoint.
-
-The `@api` directive controls how the operation is exposed:
+The `@api` directive is applied to the directive to control how the operation is exposed:
* `rest`: `NONE`, `GET`, or `POST` to configure the HTTP method or not expose as REST endpoint.
* `mcp`: `NONE`, `TOOL`, or `RESOURCE` to configure how the query is exposed in MCP.
* `uri`: AN RFC 6570 template to configure the REST path and MCP resource path. Any operation arguments that are not defined in the uri template are considered part of the payload for REST (and the method must be POST).
@@ -67,4 +124,25 @@ query GetPersonByAge($age: Int!) @api(rest: GET, mcp: TOOL, uri: "/queries/perso
}
```
-This defines an operation `GetPersonByAge` which is the name of the MCP tool and REST endpoint with the path `/queries/personByAge/{age}` using GET method.
\ No newline at end of file
+This defines an operation `GetPersonByAge` which is the name of the MCP tool and REST endpoint with the path `/queries/personByAge/{age}` using GET method.
+
+By default, DataSQRL will add the custom operations to the generated ones. To only expose explicitly defined operations set `endpoints` option to `OPS_ONLY` in the [`package.json`](configuration).
+
+### Testing
+
+DataSQRL's automated testing via the [`test` command](compiler#test-command) executes all GraphQL queries inside the [configured](configuration) `test-folder` and snapshots the returned results. Queries are executed in this order:
+
+1. All subscription queries are registered
+2. Mutations are executed sequentially in alphabetical order of filename. The test runner waits the configured `mutation-delay-sec` between mutations. Results are written as snapshots to the snapshot folder.
+3. The test runner waits until the configured timeout.
+4. Queries are executed and results written as snapshots.
+5. All subscription results are sorted and written as snapshots.
+
+If a snapshot already exists, results are compared and the test fails if they are unequal.
+
+The test runner uses the configured `headers` for accessing the API. To test authentication and authorization with different access tokens, create a properties file with the same name as the GraphQL file to configure header properties per query.
+
+For example, if your test folder contains `myquery.graphql` you can configure custom headers for this query in `myquery.properties`:
+```text
+Authorization: Bearer XYZ
+```
\ No newline at end of file
diff --git a/documentation/docs/sqrl-language.md b/documentation/docs/sqrl-language.md
index 025a4dd91..76df3c14a 100644
--- a/documentation/docs/sqrl-language.md
+++ b/documentation/docs/sqrl-language.md
@@ -1,10 +1,9 @@
# SQRL Language Specification
-SQRL is an extension of ANSI SQL βspecifically FlinkSQL β that adds language features for **reactive data processing and serving**: table / function / relationship definitions, built-in source & sink management, and an opinionated DAG planner.
+SQRL is an extension of FlinkSQL that adds support for table functions and convenience syntax to build reactive data processing and serving applications.
The βRβ in **SQRL** stands for *Reactive* and *Relationships*.
-Readers are expected to know basic SQL and the [FlinkSQL syntax](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/overview/).
-This document focuses only on features **unique to SQRL**; when SQRL accepts FlinkSQL verbatim we simply refer to the upstream spec.
+This document focuses only on features **unique to SQRL**; when SQRL accepts FlinkSQL verbatim we simply refer to the [upstream spec](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/overview/).
## Script Structure
@@ -14,14 +13,14 @@ Only one statement is allowed per line, but a statement may span multiple lines.
Typical order of statements:
```text
-IMPORT ... -- import sources from external definitions
-CREATE TABLE ... -- define internal & external sources
-other statements (definitions, hints, inserts, exports...)
-EXPORT ... -- explicit sinks
+IMPORT ... -- import other SQRL scripts
+CREATE TABLE ... -- define internal & external sources
+MyTable := SELECT ... -- define tables or functions (with hints)
+EXPORT MyTable TO ... -- write table data to sinks
```
At compile time the statements form a *directed-acyclic graph* (DAG).
-Each node is then assigned to an execution engine according to the optimizer and the compiler generates the data processing code for that engine.
+Each node is then assigned to an enabled execution engine according to the optimizer and the compiler generates the data processing code for that engine.
## FlinkSQL
@@ -30,6 +29,7 @@ SQRL inherits full FlinkSQL grammar for
* `CREATE {TABLE | VIEW | FUNCTION | CATALOG | DATABASE}`
* `SELECT` queries inside any of the above
* `USE ...`
+* `INSERT INTO`
...with the caveat that SQRL currently tracks **Flink 1.19**; later features may not parse.
@@ -42,42 +42,36 @@ SQRL assigns one of the following types to tables based on the definition:
- **STREAM**: Represents a stream of immutable records with an assigned timestamp (often referred to as the "event time"). Streams are append-only. Stream tables represent events or actions over time.
- **VERSIONED_STATE**: Contains records with a natural primary key and a timestamp, tracking changes over time to each record, thereby creating a change-stream.
- **STATE**: Similar to VERSIONED_STATE but without tracking the history of changes. Each record is uniquely identified by its natural primary key.
-- **LOOKUP**: Supports lookup operations using a primary key but does not allow further processing of the data.
-- **STATIC**: Consists of data that does not change over time, such as constants.
+- **LOOKUP**: Supports lookup operations using a primary key against external data systems but does not allow further processing of the data.
+- **STATIC**: For data that does not change over time, such as constants.
+
+The table type determines what operators a table supports and how those operators are applied.
## IMPORT Statement
```
IMPORT qualifiedPath (AS identifier)?;
IMPORT qualifiedPath.*; -- wildcard
-IMPORT qualifiedPath.* AS _; -- hidden wildcard
```
-* **Resolution:** the dotted path maps to a relative directory; the final element is the filename stem (e.g. `IMPORT datasqrl.Customer` β `datasqrl/Customer.table.sql`).
-* **Aliases:** rename the imported object (`AS MyTable`).
-* **Hidden imports:** prefix the alias with `_` *or* alias a wildcard to `_` to import objects without exposing them in the interface.
+Imports another SQRL script into the current script. The `qualifiedPath` is a `.` separated path that maps to the local file system relative to the current script, e.g. `IMPORT my.custom.script` maps to the relative path `./my/custom/script.sqrl`.
-Examples:
-
-```sql
-IMPORT ecommerceTs.Customer; -- visible
-IMPORT ecommerceTs.Customer AS _Hidden; -- hidden
-IMPORT ecommerceTs.* AS _; -- hide entire package
-```
-
-Wild-card imports with aliases *prefix* the alias to all imported table names.
+Imports that end in `.*` are imported inline which means that the statement from that script are executed verbatim in the current script. Otherwise, imports are available within a namespace that's equal to the name of the script or the optional `AS` identifier.
+Examples:
+* `IMPORT my.custom.script.*`: All table definitions from the script are imported inline and can be referenced directly as `MyTable` in `FROM` clauses.
+* `IMPORT my.custom.script`: Tables are imported into the `script` namespace and can be referenced as `script.MyTable`
+* `IMPORT my.custom.script AS myNamespace`: Tables are imported into the `myNamespace` namespace and can be referenced as `myNamespace.MyTable`
## CREATE TABLE (internal vs external)
-SQRL understands the complete FlinkSQL `CREATE TABLE` syntax, but distinguishes between **internal** and **external** source tables. External source tables are standard FlinkSQL tables that connect to an internal data source. Internal tables connect to a data source that is managed by SQRL (depending on the configured `log` engine, e.g. a Kafka topic) and exposed for inserts in the interface.
+SQRL understands the complete FlinkSQL `CREATE TABLE` syntax, but distinguishes between **internal** and **external** source tables. External source tables are standard FlinkSQL tables that connect to an external data source (e.g. database or Kafka cluster). Internal tables connect to a data source that is managed by SQRL (depending on the configured `log` engine, e.g. a Kafka topic) and exposed for data ingestion in the interface.
| Feature | Internal source (managed by SQRL) | External Source (connector) |
|-------------------------------|-----------------------------------------------------------------|-----------------------------|
| Connector clause `WITH (...)` | **omitted** | **required** |
-| Computed columns | Evaluated **on insert** | Delegated to connector |
| Metadata columns | `METADATA FROM 'uuid'`, `'timestamp'` are recognised by planner | Passed through |
-| Watermark spec | Optional | Passed through |
+| Watermark spec | **generated** | **required** |
| Primary key | *Unenforced* upsert semantics | Same as Flink |
Example (internal):
@@ -182,7 +176,7 @@ Customer.highValueOrders(minAmount BIGINT) := SELECT * FROM Orders o WHERE o.cus
TableName.new_col := expression;
```
-Must appear **immediately after** the table it extends, and may reference previously added columns of the same table.
+Must appear **immediately after** the table definition it extends, and may reference previously added columns of the same table. Cannot be applied after `CREATE TABLE` statements.
### Passthrough definitions
@@ -202,6 +196,7 @@ TableName RETURNS (column TYPE [NOT NULL], ...) :=
- SQRL will not validate, parse, or optimize the query
- Only use when SQRL lacks native support for the required functionality
- Return type must be explicitly declared using the `RETURNS` clause
+- Passthrough queries must be assigned to a database engine for execution
The following defines a relationship definition with a recursive CTE that is passed through to the
database engine for execution.
@@ -238,7 +233,7 @@ How a table or function is exposed in the interface depends on the access type.
|---------------------|-------------------------------------------------------|---------------------------------------------|
| **Query** (default) | no modifier | GraphQL query / SQL view / log topic (pull) |
| **Subscription** | prefix body with `SUBSCRIBE` | GraphQL subscription / push topic |
-| **None** | object name starts with `_` *or* `/*+no_query*/` hint | hidden |
+| **None** | object name starts with `_` *or* `/*+no_query*/` hint | not exposed |
Example:
@@ -246,6 +241,22 @@ Example:
HighTempAlert := SUBSCRIBE
SELECT * FROM SensorReading WHERE temperature > 50;
```
+Defines a subscription endpoint that is exposed as a GraphQL subscription or Kafka topic depending on engine configuration.
+
+```sql
+HighTemperatures := SELECT * FROM SensorReading WHERE temperature > 50;
+```
+Defines a query table that is exposed as a query in the API or view in the database.
+
+```sql
+HighTemperatures(temp BIGINT NOT NULL) := SELECT * FROM SensorReading WHERE temperature > :temp;
+```
+Defines a query function that is exposed as a parametrized query in the API.
+
+```sql
+_HighTemps := SELECT * FROM SensorReading WHERE temperature > 50;
+```
+Defines an internal table that is not exposed in the interface.
### CREATE TABLE
@@ -279,8 +290,8 @@ Hints live in a `/*+ ... */` comment placed **immediately before** the definitio
| **index** | `index(type, col, ...)`
Multiple `index(...)` can be comma-separated | table | override automatic index selection. `type` β `HASH`, `BTREE`, `TEXT`, `VECTOR_COSINE`, `VECTOR_EUCLID`.
`index` *alone* disables all automatic indexes |
| **partition_key** | `partition_key(col, ...)` | table | define partition columns for sinks that support partitioning |
| **vector_dim** | `vector_dim(col, 1536)` | table | declare fixed vector length. This is required when using vector indexes. |
-| **query_by_all** | `query_by_all(col, ...)` | table | generate interface with *required* filter arguments |
-| **query_by_any** | `query_by_any(col, ...)` | table | generate interface with *optional* filter arguments |
+| **query_by_all** | `query_by_all(col, ...)` | table | generate interface with *required* filter arguments for all listed columns |
+| **query_by_any** | `query_by_any(col, ...)` | table | generate interface with *optional* filter arguments for all listed columns |
| **no_query** | `no_query` | table | hide from interface |
| **insert** | `insert(type)` | table | controls the way how mutations will be written to their target sink. `type` β `SINGLE` (default), `BATCH`, `TRANSACTION` |
| **ttl** | `ttl(duration)` | table | specifies how long the records for this table are retained in the underlying data system before it can be discarded. Expects a duration string like `5 week`. Disabled by default. |
@@ -290,13 +301,55 @@ Hints live in a `/*+ ... */` comment placed **immediately before** the definitio
| **test** | `test` | table | marks test case, only executed with [`test` command](compiler#test-command). |
| **workload** | `workload` | table | retained as sink for DAG optimization but hidden from interface |
-Example:
+This example configures a primary key and vector index for the `SensorTempByHour` table:
```sql
/*+primary_key(sensorid, time_hour), index(VECTOR_COSINE, embedding) */
SensorTempByHour := SELECT ... ;
```
+### Testing
+
+Add test cases to SQRL scripts with the `/*+test */` hint in front of a table definition:
+
+```sql
+/*+test */
+InvalidCustomers := SELECT * FROM Customer WHERE name = '' OR email IS NULL ORDER BY customerid;
+```
+
+Test annotated tables are only executed when running the [`test` command](compiler#test-command) and otherwise ignored.
+DataSQRL queries all test tables at the end of the test and snapshots the results in the [configured](configuration) `snapshot-folder`.
+
+:::warning
+Ensure that test tables have a well-defined order and that only predictable columns are selected for the results are stable between test runs.
+:::
+
+
+---
+## NEXT_BATCH
+
+```sql
+NEXT_BATCH;
+```
+
+For SQRL pipelines where the data processing is in batch (i.e. `"execution.runtime-mode": "BATCH"` in the [Flink configuration](configuration-engine/flink)), use the `NEXT_BATCH` statement to break the data processing into multiple sub-batches that are executed sequentially, proceeding with the next sub-batch only if the previous one succeeded.
+
+The batch allocation only applies to `EXPORT .. TO ..` statements. All interfaces are computed in the last batch.
+
+In the following example, the `NEXT_BATCH` guarantees that the `PreprocessedData` is written completely to the sink before processing continues in the next sub-batch.
+
+```sql
+PreprocessedData := SELECT ...;
+EXPORT PreprocessedData TO PreprocessorSink;
+NEXT_BATCH;
+...continue processing...
+```
+
+:::warning
+Sub-batches are executed stand-alone, meaning each sub-batch reads the data from source and not from the intermediate results of the previous sub-batch. If you wish to start with those, you need to explicitly write them out and read them.
+:::
+
+
---
## Comments & Doc-strings
@@ -320,51 +373,25 @@ The following produce compile time errors:
---
-## Cheat Sheet
-
-| Construct | Example |
-|------------------|------------------------------------------------------------------------|
-| Import package | `IMPORT ecommerceTs.* ;` |
-| Hidden import | `IMPORT ecommerceTs.* AS _ ;` |
-| Internal table | `CREATE TABLE Orders ( ... );` |
-| External table | `CREATE TABLE kafka_table (...) WITH ('connector'='kafka');` |
-| Table def. | `BigOrders := SELECT * FROM Orders WHERE amount > 100;` |
-| Distinct | `Dedup := DISTINCT Events ON id ORDER BY ts DESC;` |
-| Function | `OrdersById(id BIGINT) := SELECT * FROM Orders WHERE id = :id;` |
-| Relationship | `Customer.orders := SELECT * FROM Orders WHERE this.id = customerid;` |
-| Column add | `Orders.total := quantity * price;` |
-| Passthrough func | `Func(id BIGINT) RETURNS (col INT) := SELECT raw_sql WHERE id = :id;` |
-| Subscription | `Alerts := SUBSCRIBE SELECT * FROM Dedup WHERE level='WARN';` |
-| Export | `EXPORT Alerts TO logger.Warnings;` |
-| Hint | `/*+index(hash,id)*/` |
-
-## API Mapping
-
-When a server engine is configured, the tables, relationships, and functions defined in a SQRL script map to API endpoints exposed by the server.
-
-### GraphQL
+
-Tables and functions are exposed as query endpoints of the same name and argument signature (i.e. the argument names and types match).
-Tables/functions defined with the `SUBSCRIBE` keyword are exposed as subscriptions.
-Internal table sources are exposed as mutations with the input type identical to the columns in the table excluding computed columns.
-
-In addition, the result type of the endpoint matches the schema of the table or function. That means, each field of the result type matches a column or relationship on the table/function by name and the field type is compatible.
-The field type is compatible with the column/relationship type iff:
-* For scalar or collection types there is a native mapping from one type system to the other
-* For structured types (i.e. nested or relationship), the mapping applies recursively.
-
-The compiler generates the GraphQL schema automatically from the SQRL script. Add the `--api graphql` flag to the [compile command](compiler.md#compile-command) to write the schema to the `schema.graphqls` file.
-
-You can modify the GraphQL schema and pass it as an additional argument to the compiler to fully control the interface. Any modifications must preserve the mapping described above.
-
-### Base Tables
-
-To avoid generating multiple redundant result types in the API interface, the compiler infers the base table.
+## Cheat Sheet
-The base table for a defined table or function is the right-most table in the relational tree of the SELECT query from the definition body if and only if that table type is equal to the defined table type. If no such table exists, the base table is the table itself.
+| Construct | Example |
+|------------------|-----------------------------------------------------------------------|
+| Import package | `IMPORT mypackage.sources AS mySources;` |
+| Internal table | `CREATE TABLE Orders ( ... );` |
+| External table | `CREATE TABLE kafka_table (...) WITH ('connector'='kafka');` |
+| Table def. | `BigOrders := SELECT * FROM Orders WHERE amount > 100;` |
+| Distinct | `Dedup := DISTINCT Events ON id ORDER BY ts DESC;` |
+| Function | `OrdersById(id BIGINT) := SELECT * FROM Orders WHERE id = :id;` |
+| Relationship | `Customer.orders := SELECT * FROM Orders WHERE this.id = customerid;` |
+| Column add | `Orders.total := quantity * price;` |
+| Passthrough func | `Func(id BIGINT) RETURNS (col INT) := SELECT raw_sql WHERE id = :id;` |
+| Subscription | `Alerts := SUBSCRIBE SELECT * FROM Dedup WHERE level='WARN';` |
+| Export | `EXPORT Alerts TO logger.Warnings;` |
+| Hint | `/*+index(hash,id)*/` |
-The result type for a table or function is the result type generated for that table's base table.
-Hidden columns, i.e. columns where the name starts with an underscore `_`, are not included in the generated result type.
## More Information
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index 310b86dea..0594ae998 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -53,13 +53,13 @@ const sidebars = {
},
{
type: 'doc',
- id: 'compiler',
- label: 'π οΈ Compiler',
+ id: 'connectors',
+ label: 'π Source & Sink Connectors',
},
{
type: 'doc',
- id: 'connectors',
- label: 'π Source & Sink Connectors',
+ id: 'interface',
+ label: 'π Interface',
},
{
type: 'category',
@@ -111,11 +111,6 @@ const sidebars = {
},
],
},
- {
- type: 'doc',
- id: 'interface',
- label: 'π Interface',
- },
{
type: 'category',
label: 'π’ Functions',
@@ -136,6 +131,11 @@ const sidebars = {
},
],
},
+ {
+ type: 'doc',
+ id: 'compiler',
+ label: 'π οΈ Compiler',
+ },
{
type: 'doc',
id: 'concepts',
@@ -143,19 +143,22 @@ const sidebars = {
}
],
},
+ {
+ type: 'doc',
+ id: 'tutorials',
+ label: 'π Tutorials',
+ },
{
type: 'category',
label: 'π How To',
+ link: {
+ type: 'generated-index',
+ description: 'Practical guides for developing data pipelines with DataSQRL',
+ },
items: [
{
- type: 'doc',
- id: 'tutorials',
- label: 'π Tutorials',
- },
- {
- type: 'doc',
- id: 'howto',
- label: 'π§© How To Guides',
+ type: 'autogenerated',
+ dirName: 'howto',
},
],
},
diff --git a/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java b/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java
index 77f5a0d9b..49eb8ab7f 100644
--- a/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java
+++ b/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java
@@ -58,8 +58,10 @@ public int getDefaultLimit() {
}
public enum Endpoints {
- OPS_ONLY,
- GRAPHQL,
- FULL;
+ OPS_ONLY, // only support the pre-defined operations in the GraphQL API, do not support flexible
+ // GraphQL queries TODO: not yet implemented
+ GRAPHQL, // support flexible GraphQL API but only pre-defined operations for other protocols
+ FULL; // support flexible GraphQL API and add generated operations from GraphQL schema to
+ // pre-defined ones for other protocols
}
}
From 317894ab8fdc49a05d50731649bf4f3b41c1b012 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 18:04:51 -0700
Subject: [PATCH 12/31] update stream enrichment howto
---
documentation/docs/howto/stream-enrichment.md | 62 +++++++++++++++----
1 file changed, 50 insertions(+), 12 deletions(-)
diff --git a/documentation/docs/howto/stream-enrichment.md b/documentation/docs/howto/stream-enrichment.md
index 54dcaad6b..3b9a5ae78 100644
--- a/documentation/docs/howto/stream-enrichment.md
+++ b/documentation/docs/howto/stream-enrichment.md
@@ -1,41 +1,79 @@
# Enriching Data Streams
-A common requirement is to enrich a stream of events with dimensional data associated with those events in a time-consistent manner.
+A common requirement in stream processing is to enrich a **STREAM** of events with dimensional data in a time-consistent manner. This pattern is particularly useful when you need to join real-time events with slowly changing dimensional data while maintaining temporal consistency.
-For example, suppose we want to enrich transactions with user account balances:
+## Use Case: Transaction Enrichment
+
+Suppose we want to enrich transaction events with the account balance that was valid **at the time of the transaction**. This ensures we get consistent, point-in-time data for analysis.
+
+## Defining Source Tables
+
+First, define your data sources. These can be internal tables (managed by DataSQRL) or external tables with connector configuration:
```sql
+-- Transaction events stream (STREAM type)
CREATE TABLE Transaction (
`txid` BIGINT NOT NULL,
`accountid` BIGINT NOT NULL,
`amount` DECIMAL(10,2) NOT NULL,
`timestamp` TIMESTAMP_LTZ(3) NOT NULL,
WATERMARK FOR `timestamp` AS `timestamp` - INTERVAL '0.001' SECOND
-) WITH (...);
+);
+-- Account balance updates stream (STREAM type)
CREATE TABLE AccountBalanceUpdates (
`accountid` BIGINT NOT NULL,
`balance` DECIMAL(15,2) NOT NULL,
`lastUpdated` TIMESTAMP_LTZ(3) NOT NULL,
WATERMARK FOR `lastUpdated` AS `lastUpdated` - INTERVAL '0.001' SECOND
-) WITH (...);
+);
```
-Those can be internal or external table sources with or without connector configuration.
-The important piece is that Transaction is a stream of transaction events and AccountBalanceUpdates is a changelog stream for the AccountBalance entity related to the transaction stream by accountid.
+## Creating a Versioned State Table
-To join in the account balance to the transaction, we want to ensure that we get the balance **at the time of the transaction** for the join to be consistent in time.
+The `Transaction` table is a **STREAM** of immutable transaction events. The `AccountBalanceUpdates` table is also a **STREAM** representing changes to account balances over time.
-To accomplish this, we first have to convert the append-only AccountBalanceUpdates stream to a versioned state table by distincting the stream on primary key:
+To perform temporal joins, we need to convert the balance updates stream into a **VERSIONED_STATE** table that tracks the current and historical values for each account:
```sql
AccountBalance := DISTINCT AccountBalanceUpdates ON accountid ORDER BY lastUpdated DESC;
```
-We can then join the versioned state table to the stream with a temporal join:
+This `DISTINCT` operation:
+- Converts the append-only stream into a **VERSIONED_STATE** table
+- Deduplicates on the `accountid` primary key
+- Orders by `lastUpdated DESC` to ensure the most recent balance is kept for each account
+- Maintains the version history for temporal lookups
+
+## Temporal Join for Stream Enrichment
+
+Now we can perform a temporal join to enrich each transaction with the account balance that was valid at the transaction timestamp:
```sql
-EnrichedTransaction := SELECT t.*, a.* FROM Transaction t
- JOIN AccountBalance FOR SYSTEM_TIME AS OF t.`timetamp` a ON a.accountid = t.accountid;
+EnrichedTransaction := SELECT t.*, a.balance, a.lastUpdated as balance_timestamp
+ FROM Transaction t
+ JOIN AccountBalance FOR SYSTEM_TIME AS OF t.`timestamp` AS a
+ ON a.accountid = t.accountid;
+```
+
+The `FOR SYSTEM_TIME AS OF` syntax ensures that:
+- Each transaction gets the account balance that was valid at `t.timestamp`
+- If no balance record exists at that time, the join returns no result for that transaction
+- The join is temporally consistent and deterministic
+
+## Key Benefits
+
+This approach provides:
+
+1. **Temporal Consistency**: Each transaction is enriched with the balance that existed at transaction time
+2. **Late Data Handling**: The watermark configuration allows for slightly out-of-order events
+3. **Efficient Processing**: The VERSIONED_STATE table enables fast temporal lookups
+4. **Scalability**: The pattern works with high-volume streams and frequent balance updates
+
+## Variations
+
+For different requirements, you might adjust the pattern:
-```
\ No newline at end of file
+- **Left Join**: Use `LEFT JOIN` to include transactions even when no balance is available
+- **Multiple Dimensions**: Join with multiple VERSIONED_STATE tables for comprehensive enrichment
+- **Window-based Enrichment**: Combine with time windows for aggregated enrichment data
\ No newline at end of file
From 62e5546d41042777f8c492c0b9cdee2335281765 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 18:22:28 -0700
Subject: [PATCH 13/31] polishing connectors.md
---
documentation/docs/connectors.md | 46 ++++++--------------------------
1 file changed, 8 insertions(+), 38 deletions(-)
diff --git a/documentation/docs/connectors.md b/documentation/docs/connectors.md
index 0320b2b68..f7abdf2f7 100644
--- a/documentation/docs/connectors.md
+++ b/documentation/docs/connectors.md
@@ -1,9 +1,5 @@
# Connecting External Data Sources and Sinks
-ALWAYS USE EVENT TIME
-
-STATE vs STREAM
-
Use `CREATE TABLE` statements to connect external data sources and sinks with your SQRL script using the `WITH` clause to provide connector configuration.
DataSQRL uses Apache Flink connectors and formats. To find a connector for your data system, use:
@@ -15,36 +11,13 @@ DataSQRL uses Apache Flink connectors and formats. To find a connector for your
## Connector Management
-The best practice for managing connectors in your DataSQRL project is to create a folder for each system that you are
-connecting to and place all source or sink `CREATE TABLE` statements in separate files ending in `.table.sql` in that folder.
-You can then import from and export to those sources and sinks in the SQRL script.
+The best practice for managing connectors in your DataSQRL project is to place all `CREATE TABLE` statements for one data source in a single `.sqrl` file inside the `connectors` folder. This provides a modular structure for sources and sinks that supports reusability and replacing connectors for testing or different environments. [Import](sqrl-language#import-statement) those connector files into your main script.
+
+We **strongly encourage** the use of event-time processing and ensuring that all sources are configured with a proper watermark. While processing-time is supported, only event-time ensures consistent data processing and sane, reproducible results.
-For example, to ingest data from the `User` and `Transaction` topics of a Kafka cluster, you would:
-1. Create a sub-directory `kafka-sources` in your project directory that contains your SQRL script
-2. Create two files `user.table.sql` and `transaction.table.sql`.
-3. Each file contains a `CREATE TABLE` statement that defines columns for each field in the message and a `WITH` clause
- that contains the connector configuration. They will look like this:
- ```sql
- CREATE TABLE User (
- user_id BIGINT,
- user_name STRING,
- last_updated TIMESTAMP_LTZ(3) NOT NULL METADATA FROM 'timestamp',
- WATERMARK FOR last_updated AS last_updated - INTERVAL '1' SECOND
- WATERMARK
- ) WITH (
- 'connector' = 'kafka',
- 'topic' = 'user',
- 'properties.bootstrap.servers' = 'localhost:9092',
- 'properties.group.id' = 'user-consumer-group',
- 'scan.startup.mode' = 'earliest-offset',
- 'format' = 'avro',
- );
- ```
-4. Import those sources into your SQRL script with `IMPORT kafak-sources.User;`
-5. Keep sources and sinks in separate folders (e.g. `kafka-sink`)
+When ingesting data from external data sources it is important to note the [type of table](sqrl-language#type-system) you are creating: an append-only `STREAM` (e.g. with the `filesystem` connector), `VERSIONED_STATE` retraction stream (e.g. with the `upsert-kafka` connector), or a `LOOKUP` table (e.g. with the `jdbc` connector). The table type determines what operations a table supports and how it should be processed.
-By following this structure, you modularize your sources and sinks from your processing logic
-which makes it easier to read and maintain.
+Specifically, entity data is often ingested as a stream of updates. To re-create the underlying entity `VERSIONED_STATE` table, use the `DISTINCT` statement with the entity's primary key.
## External Schemas
@@ -53,15 +26,12 @@ For example, Avro is a popular schema language for encoding messages in Kafka to
It can be very cumbersome to convert that schema to SQL and maintain that translation.
With DataSQRL, you can easily create a table that fetches the schema from a given avro schema file.
-Following the example above and assuming that the schema for the `User` topic is `user.avsc`,
-and that file is placed next to the `user.table.sql` file, then a `LIKE ` statement can be added
-to the `CREATE TABLE` statement, so in the `user.table.sql` we only need to define the metadata, the watermark,
-and the connector options:
+Assuming that the schema for the `User` topic is `user.avsc`,
+and that file is placed next to the `sources.sqrl` file in the `connectors` folder, add `LIKE ` to the `CREATE TABLE` statement which populates the table schema from the avro file:
```sql
CREATE TABLE User (
last_updated TIMESTAMP_LTZ(3) NOT NULL METADATA FROM 'timestamp',
WATERMARK FOR last_updated AS last_updated - INTERVAL '1' SECOND
- WATERMARK
) WITH (
'connector' = 'kafka',
'topic' = 'user',
@@ -74,4 +44,4 @@ CREATE TABLE User (
:::info
We can even include files from other folder via relative path, but in most cases it makes sense to put the schema file next to the table sql.
-:::
+:::
\ No newline at end of file
From 07cb6ed4b5f469d922675e236677016eb9eb60aa Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 18:30:59 -0700
Subject: [PATCH 14/31] polishing connectors.md
---
documentation/docs/connectors.md | 60 +++++++++++++++++++++++---------
1 file changed, 44 insertions(+), 16 deletions(-)
diff --git a/documentation/docs/connectors.md b/documentation/docs/connectors.md
index f7abdf2f7..c00fc6312 100644
--- a/documentation/docs/connectors.md
+++ b/documentation/docs/connectors.md
@@ -19,29 +19,57 @@ When ingesting data from external data sources it is important to note the [type
Specifically, entity data is often ingested as a stream of updates. To re-create the underlying entity `VERSIONED_STATE` table, use the `DISTINCT` statement with the entity's primary key.
-## External Schemas
+## LIKE Clause for Schema Loading
-When ingesting data from external systems, the schema is often defined in or by those systems.
-For example, Avro is a popular schema language for encoding messages in Kafka topics.
-It can be very cumbersome to convert that schema to SQL and maintain that translation.
+DataSQRL supports automatic schema loading from external schema files using the `LIKE` clause. This feature eliminates the need to manually define column definitions when the schema already exists in a supported format.
+
+```sql
+CREATE TABLE MyTable (
+ ...
+) WITH (
+ ...
+) LIKE 'mytable.avsc';
+```
+
+The `LIKE` clause:
+- **Automatically populates** column definitions from the referenced schema file
+- **Maintains schema consistency** between your data pipeline and external systems
+- **Reduces maintenance overhead** by eliminating manual schema translations
+- **Supports relative paths** to schema files in your project structure
+
+DataSQRL currently supports loading schemas from:
+- **Avro schema files** (`.avsc`)
+
+Assuming you have an Avro schema file `user.avsc` for a Kafka topic:
-With DataSQRL, you can easily create a table that fetches the schema from a given avro schema file.
-Assuming that the schema for the `User` topic is `user.avsc`,
-and that file is placed next to the `sources.sqrl` file in the `connectors` folder, add `LIKE ` to the `CREATE TABLE` statement which populates the table schema from the avro file:
```sql
CREATE TABLE User (
last_updated TIMESTAMP_LTZ(3) NOT NULL METADATA FROM 'timestamp',
WATERMARK FOR last_updated AS last_updated - INTERVAL '1' SECOND
) WITH (
'connector' = 'kafka',
- 'topic' = 'user',
- 'properties.bootstrap.servers' = 'localhost:9092',
- 'properties.group.id' = 'user-consumer-group',
- 'scan.startup.mode' = 'earliest-offset',
- 'format' = 'avro',
-) LIKE `user.avsc`;
+ ...
+) LIKE 'user.avsc';
+```
+
+In this example:
+- The `LIKE 'user.avsc'` clause loads all column definitions from the Avro schema
+- You add **metadata columns** (like `last_updated`) and **watermark specifications**
+- The **connector configuration** remains in the `WITH` clause as usual
+
+### Inferring Schema from Data Files
+
+To automatically discover the schema of a JSONL or CSV file, add the filename in the `LIKE` clause.
+In addition to generating the table columns based on the inferred schema of the data, this also configures the `filesystem` connector to access the data.
+
+For example, suppose you have a `users.jsonl` file in the `connectors` directory, you can define the `User` table simply as:
+
+```sql
+CREATE TABLE User (
+ WATERMARK FOR last_updated AS last_updated - INTERVAL '1' SECOND
+) WITH (
+ 'source.monitor-interval' = '10 sec', -- remove for batch processing
+) LIKE 'users.jsonl';
```
-:::info
-We can even include files from other folder via relative path, but in most cases it makes sense to put the schema file next to the table sql.
-:::
\ No newline at end of file
+This syntax is useful when building DataSQRL projects from data files since it eliminates the manual schema creation.
\ No newline at end of file
From aacf8a51bd993a87c77ac627891f6807b62cbd0d Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 18:37:49 -0700
Subject: [PATCH 15/31] update formatting
---
.../docs/configuration-default-update.sh | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/documentation/docs/configuration-default-update.sh b/documentation/docs/configuration-default-update.sh
index 209b5ee86..256d8c63f 100755
--- a/documentation/docs/configuration-default-update.sh
+++ b/documentation/docs/configuration-default-update.sh
@@ -1,4 +1,20 @@
#!/bin/bash
+#
+# Copyright Β© 2021 DataSQRL (contact@datasqrl.com)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
# Script to update configuration-default.md with the latest default-package.json content
# This script replaces everything between ```json and ``` with the contents of default-package.json
From 5de49cd8479fe373f9ea7a0b5bab1dec95a94db1 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 18:52:22 -0700
Subject: [PATCH 16/31] update formatting
---
documentation/docs/interface.md | 3 +++
.../main/java/com/datasqrl/config/CompilerApiConfigImpl.java | 4 ++--
.../planner/analyzer/cost/SimpleCostAnalysisModel.java | 2 +-
3 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/documentation/docs/interface.md b/documentation/docs/interface.md
index c0f4a0dd5..ed405b60d 100644
--- a/documentation/docs/interface.md
+++ b/documentation/docs/interface.md
@@ -116,6 +116,7 @@ The `@api` directive is applied to the directive to control how the operation is
* `uri`: AN RFC 6570 template to configure the REST path and MCP resource path. Any operation arguments that are not defined in the uri template are considered part of the payload for REST (and the method must be POST).
```graphql
+""" Returns up to 10 people for a given age """
query GetPersonByAge($age: Int!) @api(rest: GET, mcp: TOOL, uri: "/queries/personByAge/{age}") {
Person(age: $age, limit: 10, offset: 0) {
name
@@ -126,6 +127,8 @@ query GetPersonByAge($age: Int!) @api(rest: GET, mcp: TOOL, uri: "/queries/perso
This defines an operation `GetPersonByAge` which is the name of the MCP tool and REST endpoint with the path `/queries/personByAge/{age}` using GET method.
+The doc strings for the operations are used in the API and tooling documentation.
+
By default, DataSQRL will add the custom operations to the generated ones. To only expose explicitly defined operations set `endpoints` option to `OPS_ONLY` in the [`package.json`](configuration).
### Testing
diff --git a/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java b/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java
index 49eb8ab7f..ad4b022e0 100644
--- a/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java
+++ b/sqrl-planner/src/main/java/com/datasqrl/config/CompilerApiConfigImpl.java
@@ -59,9 +59,9 @@ public int getDefaultLimit() {
public enum Endpoints {
OPS_ONLY, // only support the pre-defined operations in the GraphQL API, do not support flexible
- // GraphQL queries TODO: not yet implemented
+ // GraphQL queries TODO: not yet implemented
GRAPHQL, // support flexible GraphQL API but only pre-defined operations for other protocols
FULL; // support flexible GraphQL API and add generated operations from GraphQL schema to
- // pre-defined ones for other protocols
+ // pre-defined ones for other protocols
}
}
diff --git a/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java b/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
index 913f3603f..51816a6af 100644
--- a/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
+++ b/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
@@ -27,7 +27,7 @@ public record SimpleCostAnalysisModel(@NonNull Type type) implements CostModel {
public enum Type {
DEFAULT, // Favors processing data at ingestion time unless the operation is too expensive (e.g.
- // inner join)
+ // inner join)
READ, // Favors processing data at query time
WRITE // Favors processing data at ingestion time
}
From d501c0bae0b39a99f2efe0aca9a9420175ef04a8 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Thu, 9 Oct 2025 19:32:41 -0700
Subject: [PATCH 17/31] update intro
---
documentation/docs/intro.md | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
diff --git a/documentation/docs/intro.md b/documentation/docs/intro.md
index c30d9ea2c..9a9b5c281 100644
--- a/documentation/docs/intro.md
+++ b/documentation/docs/intro.md
@@ -31,14 +31,22 @@ SQRL extends Flink SQL with features specifically designed for reactive data pro
- **Subscription syntax** for real-time data streaming
- **Type system** for stream processing semantics
-### 2. [Configuration](configuration)
+### 2. [Interface Design](interface)
+DataSQRL automatically generates interfaces from your SQRL script for multiple protocols:
+- **Data Products** as database/data lake views and tables
+- **GraphQL APIs** with queries, mutations, and subscriptions
+- **REST endpoints** with GET/POST operations
+- **MCP tools/resources** for AI agent integration
+- **Schema customization** and operation control
+
+### 3. [Configuration](configuration)
JSON configuration files that define:
- **Engines**: Data technologies (Flink, Postgres, Kafka, etc.)
- **Connectors**: Templates for data sources and sinks
- **Dependencies**: External data packages and libraries
- **Compiler options**: Optimization and deployment settings
-### 3. [Compiler](compiler)
+### 4. [Compiler](compiler)
The DataSQRL compiler:
- **Transpiles** SQRL scripts into deployment assets
- **Optimizes** data processing DAGs across multiple engines
@@ -53,6 +61,7 @@ The DataSQRL compiler:
### π **Core Documentation**
- [**SQRL Language**](sqrl-language) - Complete language specification and syntax
+- [**Interface Design**](interface) - API generation and data product interfaces
- [**Configuration**](configuration) - Engine setup and project configuration
- [**Compiler**](compiler) - Command-line interface and compilation options
- [**Functions**](functions) - Built-in functions and custom function libraries
From 4c73aa9aa294500f0987ee16f1191740a41f0bd5 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Fri, 10 Oct 2025 10:42:19 -0700
Subject: [PATCH 18/31] remove outdated context generator
---
.../docs/documentation_context_generator.sh | 17 -----------------
1 file changed, 17 deletions(-)
delete mode 100644 documentation/docs/documentation_context_generator.sh
diff --git a/documentation/docs/documentation_context_generator.sh b/documentation/docs/documentation_context_generator.sh
deleted file mode 100644
index 27f01920f..000000000
--- a/documentation/docs/documentation_context_generator.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright Β© 2021 DataSQRL (contact@datasqrl.com)
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-cat sqrl-language.md configuration.md compiler.md concepts.md connectors.md howto.md functions.md functions-docs/function-docs/library-functions.md functions-docs/function-docs/system-functions.md getting-started.md > datasqrl_documentation.md
\ No newline at end of file
From 5441bde3a2caff95ed27e29166478a332b074191 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Fri, 10 Oct 2025 11:22:14 -0700
Subject: [PATCH 19/31] updated stdlib-docs to latest main
---
documentation/docs/stdlib-docs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/documentation/docs/stdlib-docs b/documentation/docs/stdlib-docs
index 655e7a9dc..26e095b58 160000
--- a/documentation/docs/stdlib-docs
+++ b/documentation/docs/stdlib-docs
@@ -1 +1 @@
-Subproject commit 655e7a9dcb9939c7375e402fac3d6e5e5866d625
+Subproject commit 26e095b582be571a28dea1207fb45063754fa530
From a548a9f5236f413cce9c37306758546a5e4345ca Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Fri, 10 Oct 2025 11:45:11 -0700
Subject: [PATCH 20/31] function generation
---
documentation/docs/functions.md | 8 +-
documentation/package-lock.json | 1 +
documentation/package.json | 7 +-
.../scripts/generate-function-docs.js | 137 ++++++++++++++++++
documentation/sidebars.ts | 4 +-
5 files changed, 150 insertions(+), 7 deletions(-)
create mode 100644 documentation/scripts/generate-function-docs.js
diff --git a/documentation/docs/functions.md b/documentation/docs/functions.md
index 12f481a0f..e21a35c4d 100644
--- a/documentation/docs/functions.md
+++ b/documentation/docs/functions.md
@@ -4,16 +4,16 @@
SQRL supports all of [Flink's built-in system functions](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/functions/systemfunctions/).
-SQRL adds [system functions](stdlib-docs/stdlib-docs/system-functions) with support for:
+SQRL adds [system functions](functions-system) with support for:
* a binary JSON type (JSONB) to represent semi-structured data efficiently.
* a vector type to represent embeddings.
* text manipulation and full text search.
-System functions are always available and do not need to be imported. Take a look at the [full list of SQRL system function](stdlib-docs/stdlib-docs/system-functions).
+System functions are always available and do not need to be imported. Take a look at the [full list of SQRL system function](functions-system).
## Function Libraries
-SQRL includes [standard libraries](stdlib-docs/stdlib-docs/library-functions) that can be imported into a SQRL script as follows:
+SQRL includes [standard libraries](functions-library) that can be imported into a SQRL script as follows:
```sql
IMPORT stdlib.math;
@@ -25,7 +25,7 @@ IMPORT stdlib.math.hypot AS hypotenuse;
```
Imports a single function `hypot` from the `math` library under the name `hypotenuse`. The renaming with `AS` is optional and is omitted when you want to use the original name.
-Check out the [full list of function libraries](stdlib-docs/stdlib-docs/library-functions).
+Check out the [full list of function libraries](functions-library).
## User Defined Functions
diff --git a/documentation/package-lock.json b/documentation/package-lock.json
index 123df9f35..2b1b0c48a 100644
--- a/documentation/package-lock.json
+++ b/documentation/package-lock.json
@@ -22,6 +22,7 @@
"@docusaurus/module-type-aliases": "3.7.0",
"@docusaurus/tsconfig": "3.7.0",
"@docusaurus/types": "3.7.0",
+ "js-yaml": "^4.1.0",
"typescript": "~5.6.2"
},
"engines": {
diff --git a/documentation/package.json b/documentation/package.json
index 91e003fee..593a0763a 100644
--- a/documentation/package.json
+++ b/documentation/package.json
@@ -5,14 +5,18 @@
"scripts": {
"docusaurus": "docusaurus",
"start": "docusaurus start",
+ "prestart": "npm run generate-docs",
"build": "docusaurus build",
+ "prebuild": "npm run generate-docs",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
+ "predeploy": "npm run generate-docs",
"clear": "docusaurus clear",
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids",
- "typecheck": "tsc"
+ "typecheck": "tsc",
+ "generate-docs": "node scripts/generate-function-docs.js"
},
"dependencies": {
"@docusaurus/core": "3.7.0",
@@ -29,6 +33,7 @@
"@docusaurus/module-type-aliases": "3.7.0",
"@docusaurus/tsconfig": "3.7.0",
"@docusaurus/types": "3.7.0",
+ "js-yaml": "^4.1.0",
"typescript": "~5.6.2"
},
"browserslist": {
diff --git a/documentation/scripts/generate-function-docs.js b/documentation/scripts/generate-function-docs.js
new file mode 100644
index 000000000..ddde767a1
--- /dev/null
+++ b/documentation/scripts/generate-function-docs.js
@@ -0,0 +1,137 @@
+const fs = require('fs');
+const path = require('path');
+const yaml = require('js-yaml');
+
+// File paths
+const SYSTEM_FUNCTIONS_YAML = path.join(__dirname, '../docs/stdlib-docs/stdlib-docs/system-functions.yml');
+const LIBRARY_FUNCTIONS_YAML = path.join(__dirname, '../docs/stdlib-docs/stdlib-docs/library-functions.yml');
+const SYSTEM_FUNCTIONS_OUTPUT = path.join(__dirname, '../docs/functions-system.md');
+const LIBRARY_FUNCTIONS_OUTPUT = path.join(__dirname, '../docs/functions-library.md');
+
+/**
+ * Generates a markdown table row for a function
+ */
+function generateSystemFunctionRow(func) {
+ const name = func.name || '';
+ const description = func.description || '';
+ const example = func.example || '';
+
+ return `| \`${name}\` | ${description} | \`${example}\` |`;
+}
+
+function generateLibraryFunctionRow(func, categoryName) {
+ const name = func.name || '';
+ const description = func.description || '';
+ const identifier = func.identifier || '';
+ const requirement = func.requirement || '';
+ const importStatement = `IMPORT ${categoryName}.${identifier};`;
+
+ return `| \`${name}\` | ${description} | \`${importStatement}\` | ${requirement} |`;
+}
+
+/**
+ * Generates a markdown section for a category of functions
+ */
+function generateSystemSection(categoryName, functions) {
+ const header = `## ${categoryName.charAt(0).toUpperCase() + categoryName.slice(1)} Functions
+
+| Function | Description | Example |
+|----------|-------------|---------|
+`;
+
+ const rows = functions.map(generateSystemFunctionRow).join('\n');
+
+ return header + rows + '\n';
+}
+
+function generateLibrarySection(categoryName, functions) {
+ const header = `## ${categoryName.charAt(0).toUpperCase() + categoryName.slice(1)} Functions
+
+| Function | Description | Import | Requirements |
+|----------|-------------|------------|--------------|
+`;
+
+ const rows = functions.map(func => generateLibraryFunctionRow(func, categoryName)).join('\n');
+
+ return header + rows + '\n';
+}
+
+/**
+ * Generates the complete system functions markdown file
+ */
+function generateSystemFunctionsDoc(data) {
+ const frontmatter = `# System Functions
+
+DataSQRL provides built-in system functions that are available in all SQRL scripts. These functions are grouped by functionality and provide essential operations for data processing, JSON manipulation, vector operations, and text processing.
+
+`;
+
+ const sections = Object.entries(data)
+ .map(([category, functions]) => generateSystemSection(category, functions))
+ .join('\n');
+
+ return frontmatter + sections;
+}
+
+/**
+ * Generates the complete library functions markdown file
+ */
+function generateLibraryFunctionsDoc(data) {
+ const frontmatter = `# Library Functions
+
+DataSQRL provides extended library functions that can be imported into your SQRL scripts. These functions offer specialized capabilities for mathematical operations, data processing, AI integration, and external system connectivity.
+
+Library functions must be imported into the SQRL script via one of the following \`IMPORT\` statements:
+
+\`\`\`sql
+IMPORT library-name.*; --imports all functions in the library
+IMPORT library-name.function-name; --imports a single function by name
+IMPORT library-name.function-name AS myName; --imports a single function under a given name
+\`\`\`
+`;
+
+ const sections = Object.entries(data)
+ .map(([category, functions]) => generateLibrarySection(category, functions))
+ .join('\n');
+
+ return frontmatter + sections;
+}
+
+/**
+ * Main generator function
+ */
+function generateFunctionDocs() {
+ try {
+ console.log('π§ Generating function documentation...');
+
+ // Read and parse YAML files
+ console.log('π Reading YAML files...');
+ const systemFunctionsData = yaml.load(fs.readFileSync(SYSTEM_FUNCTIONS_YAML, 'utf8'));
+ const libraryFunctionsData = yaml.load(fs.readFileSync(LIBRARY_FUNCTIONS_YAML, 'utf8'));
+
+ // Generate markdown content
+ console.log('π Generating markdown content...');
+ const systemFunctionsMarkdown = generateSystemFunctionsDoc(systemFunctionsData);
+ const libraryFunctionsMarkdown = generateLibraryFunctionsDoc(libraryFunctionsData);
+
+ // Write output files
+ console.log('πΎ Writing output files...');
+ fs.writeFileSync(SYSTEM_FUNCTIONS_OUTPUT, systemFunctionsMarkdown);
+ fs.writeFileSync(LIBRARY_FUNCTIONS_OUTPUT, libraryFunctionsMarkdown);
+
+ console.log('β
Function documentation generated successfully!');
+ console.log(` - ${SYSTEM_FUNCTIONS_OUTPUT}`);
+ console.log(` - ${LIBRARY_FUNCTIONS_OUTPUT}`);
+
+ } catch (error) {
+ console.error('β Error generating function documentation:', error);
+ process.exit(1);
+ }
+}
+
+// Run the generator if called directly
+if (require.main === module) {
+ generateFunctionDocs();
+}
+
+module.exports = { generateFunctionDocs };
\ No newline at end of file
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index 0594ae998..e69492a6d 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -121,12 +121,12 @@ const sidebars = {
items: [
{
type: 'doc',
- id: 'stdlib-docs/stdlib-docs/system-functions',
+ id: 'functions-system',
label: 'System Functions',
},
{
type: 'doc',
- id: 'stdlib-docs/stdlib-docs/library-functions',
+ id: 'functions-library',
label: 'Library Functions',
},
],
From 241241c22197d8973ba3d3896f4960ce3a51d777 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Fri, 10 Oct 2025 11:46:58 -0700
Subject: [PATCH 21/31] updated function name generation
---
.gitignore | 5 ++++-
documentation/docs/functions.md | 8 ++++----
documentation/scripts/generate-function-docs.js | 4 ++--
documentation/sidebars.ts | 4 ++--
4 files changed, 12 insertions(+), 9 deletions(-)
diff --git a/.gitignore b/.gitignore
index 4607bb05d..536921baa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -129,4 +129,7 @@ node_modules/
npm-debug.log*
yarn-debug.log*
-yarn-error.log*
\ No newline at end of file
+yarn-error.log*
+
+#Generated markdown files
+*-generated.md
\ No newline at end of file
diff --git a/documentation/docs/functions.md b/documentation/docs/functions.md
index e21a35c4d..36723d410 100644
--- a/documentation/docs/functions.md
+++ b/documentation/docs/functions.md
@@ -4,16 +4,16 @@
SQRL supports all of [Flink's built-in system functions](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/functions/systemfunctions/).
-SQRL adds [system functions](functions-system) with support for:
+SQRL adds [system functions](functions-system-generated) with support for:
* a binary JSON type (JSONB) to represent semi-structured data efficiently.
* a vector type to represent embeddings.
* text manipulation and full text search.
-System functions are always available and do not need to be imported. Take a look at the [full list of SQRL system function](functions-system).
+System functions are always available and do not need to be imported. Take a look at the [full list of SQRL system function](functions-system-generated).
## Function Libraries
-SQRL includes [standard libraries](functions-library) that can be imported into a SQRL script as follows:
+SQRL includes [standard libraries](functions-library-generated) that can be imported into a SQRL script as follows:
```sql
IMPORT stdlib.math;
@@ -25,7 +25,7 @@ IMPORT stdlib.math.hypot AS hypotenuse;
```
Imports a single function `hypot` from the `math` library under the name `hypotenuse`. The renaming with `AS` is optional and is omitted when you want to use the original name.
-Check out the [full list of function libraries](functions-library).
+Check out the [full list of function libraries](functions-library-generated).
## User Defined Functions
diff --git a/documentation/scripts/generate-function-docs.js b/documentation/scripts/generate-function-docs.js
index ddde767a1..30c713fc0 100644
--- a/documentation/scripts/generate-function-docs.js
+++ b/documentation/scripts/generate-function-docs.js
@@ -5,8 +5,8 @@ const yaml = require('js-yaml');
// File paths
const SYSTEM_FUNCTIONS_YAML = path.join(__dirname, '../docs/stdlib-docs/stdlib-docs/system-functions.yml');
const LIBRARY_FUNCTIONS_YAML = path.join(__dirname, '../docs/stdlib-docs/stdlib-docs/library-functions.yml');
-const SYSTEM_FUNCTIONS_OUTPUT = path.join(__dirname, '../docs/functions-system.md');
-const LIBRARY_FUNCTIONS_OUTPUT = path.join(__dirname, '../docs/functions-library.md');
+const SYSTEM_FUNCTIONS_OUTPUT = path.join(__dirname, '../docs/functions-system-generated.md');
+const LIBRARY_FUNCTIONS_OUTPUT = path.join(__dirname, '../docs/functions-library-generated.md');
/**
* Generates a markdown table row for a function
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index e69492a6d..c11e588f7 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -121,12 +121,12 @@ const sidebars = {
items: [
{
type: 'doc',
- id: 'functions-system',
+ id: 'functions-system-generated',
label: 'System Functions',
},
{
type: 'doc',
- id: 'functions-library',
+ id: 'functions-library-generated',
label: 'Library Functions',
},
],
From 6451e32091d6642bf2b849418fa1d8e00901fcf5 Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Sat, 11 Oct 2025 09:37:38 -0700
Subject: [PATCH 22/31] moving pages
---
documentation/CLAUDE.md | 3 +-
documentation/docs/{ => intro}/concepts.md | 0
.../docs/{ => intro}/getting-started.md | 6 ++--
documentation/docs/{ => intro}/intro.md | 28 +++++++++----------
documentation/docs/{ => intro}/tutorials.md | 0
documentation/docs/sqrl-language.md | 2 +-
documentation/sidebars.ts | 8 +++---
7 files changed, 24 insertions(+), 23 deletions(-)
rename documentation/docs/{ => intro}/concepts.md (100%)
rename documentation/docs/{ => intro}/getting-started.md (97%)
rename documentation/docs/{ => intro}/intro.md (84%)
rename documentation/docs/{ => intro}/tutorials.md (100%)
diff --git a/documentation/CLAUDE.md b/documentation/CLAUDE.md
index d67dea35f..f2abec75d 100644
--- a/documentation/CLAUDE.md
+++ b/documentation/CLAUDE.md
@@ -77,7 +77,8 @@ npm run swizzle
### Content Structure
- **docs/**: Main documentation content in Markdown format
- - Core documentation files include intro.md, getting-started.md, sqrl-language.md, etc.
+ - **intro/**: Introductory documentation (intro.md, getting-started.md, concepts.md, tutorials.md)
+ - Core documentation files include sqrl-language.md, interface.md, configuration.md, etc.
- **stdlib-docs/**: Embedded function library documentation
- **blog/**: Release notes, updates, and technical blog posts
- **src/**: React components and custom pages
diff --git a/documentation/docs/concepts.md b/documentation/docs/intro/concepts.md
similarity index 100%
rename from documentation/docs/concepts.md
rename to documentation/docs/intro/concepts.md
diff --git a/documentation/docs/getting-started.md b/documentation/docs/intro/getting-started.md
similarity index 97%
rename from documentation/docs/getting-started.md
rename to documentation/docs/intro/getting-started.md
index e31e4f672..eb393c473 100644
--- a/documentation/docs/getting-started.md
+++ b/documentation/docs/intro/getting-started.md
@@ -165,7 +165,7 @@ You can run the script and access the API like we did above.
Note, that when you compile or run the script, the compiler automatically created a connector table from the data in the `local-data/userinfo.table.sql` file. DataSQRL can automatically infer the schema from JSONL or CSV files and generate connectors.
-You can also define connectors manually to ingest data from Kafka, Kinesis, Postgres, Apache Iceberg, and many other sources. Check out the [Connectors Documentation](connectors.md) to learn how to ingest data from and sink data to many external data systems.
+You can also define connectors manually to ingest data from Kafka, Kinesis, Postgres, Apache Iceberg, and many other sources. Check out the [Connectors Documentation](../connectors) to learn how to ingest data from and sink data to many external data systems.
## Testing
@@ -243,8 +243,8 @@ docker run --rm -v $PWD:/build datasqrl/cmd:latest compile usertokens.sqrl usert
Congratulations, you made the first big step toward building production-grade data pipelines the easy way.
Next, check out:
-* **[Full Documentation](intro.md)** for the complete reference, language spec, and more.
-* **[Tutorials](tutorials.md)** if you prefer learning by doing.
+* **[Full Documentation](intro)** for the complete reference, language spec, and more.
+* **[Tutorials](tutorials)** if you prefer learning by doing.
## 4. Troubleshooting Common Issues
diff --git a/documentation/docs/intro.md b/documentation/docs/intro/intro.md
similarity index 84%
rename from documentation/docs/intro.md
rename to documentation/docs/intro/intro.md
index 9a9b5c281..df1656cf7 100644
--- a/documentation/docs/intro.md
+++ b/documentation/docs/intro/intro.md
@@ -23,15 +23,15 @@ Take a look at the [DataSQRL Examples Repository](https://github.com/DataSQRL/da
DataSQRL consists of three main components that work together:
-### 1. [SQRL Language](sqrl-language)
+### 1. [SQRL Language](../sqrl-language)
SQRL extends Flink SQL with features specifically designed for reactive data processing:
- **IMPORT/EXPORT** statements for connecting data systems
-- **Table functions and relationships** for interface definitions
+- **Table functions and relationships** for interface definitions
- **Hints** to control pipeline structure and execution
- **Subscription syntax** for real-time data streaming
- **Type system** for stream processing semantics
-### 2. [Interface Design](interface)
+### 2. [Interface Design](../interface)
DataSQRL automatically generates interfaces from your SQRL script for multiple protocols:
- **Data Products** as database/data lake views and tables
- **GraphQL APIs** with queries, mutations, and subscriptions
@@ -39,14 +39,14 @@ DataSQRL automatically generates interfaces from your SQRL script for multiple p
- **MCP tools/resources** for AI agent integration
- **Schema customization** and operation control
-### 3. [Configuration](configuration)
+### 3. [Configuration](../configuration)
JSON configuration files that define:
- **Engines**: Data technologies (Flink, Postgres, Kafka, etc.)
- **Connectors**: Templates for data sources and sinks
- **Dependencies**: External data packages and libraries
- **Compiler options**: Optimization and deployment settings
-### 4. [Compiler](compiler)
+### 4. [Compiler](../compiler)
The DataSQRL compiler:
- **Transpiles** SQRL scripts into deployment assets
- **Optimizes** data processing DAGs across multiple engines
@@ -60,20 +60,20 @@ The DataSQRL compiler:
- [**Tutorials**](tutorials) - Practical examples for specific use cases
### π **Core Documentation**
-- [**SQRL Language**](sqrl-language) - Complete language specification and syntax
-- [**Interface Design**](interface) - API generation and data product interfaces
-- [**Configuration**](configuration) - Engine setup and project configuration
-- [**Compiler**](compiler) - Command-line interface and compilation options
-- [**Functions**](functions) - Built-in functions and custom function libraries
+- [**SQRL Language**](../sqrl-language) - Complete language specification and syntax
+- [**Interface Design**](../interface) - API generation and data product interfaces
+- [**Configuration**](../configuration) - Engine setup and project configuration
+- [**Compiler**](../compiler) - Command-line interface and compilation options
+- [**Functions**](../functions) - Built-in functions and custom function libraries
### π **Integration & Deployment**
-- [**Connectors**](connectors) - Ingest from and export to external systems
+- [**Connectors**](../connectors) - Ingest from and export to external systems
- [**Concepts**](concepts) - Key concepts in stream processing (time, watermarks, etc.)
-- [**How-To Guides**](howto) - Best practices and implementation patterns
+- [**How-To Guides**](../howto) - Best practices and implementation patterns
### π οΈ **Advanced Topics**
-- [**Developer Documentation**](deepdive) - Internal architecture and advanced customization
-- [**Compatibility**](compatibility) - Version compatibility and migration guides
+- [**Developer Documentation**](../deepdive) - Internal architecture and advanced customization
+- [**Compatibility**](../compatibility) - Version compatibility and migration guides
## Use Cases
diff --git a/documentation/docs/tutorials.md b/documentation/docs/intro/tutorials.md
similarity index 100%
rename from documentation/docs/tutorials.md
rename to documentation/docs/intro/tutorials.md
diff --git a/documentation/docs/sqrl-language.md b/documentation/docs/sqrl-language.md
index 76df3c14a..9edb14b28 100644
--- a/documentation/docs/sqrl-language.md
+++ b/documentation/docs/sqrl-language.md
@@ -398,6 +398,6 @@ The following produce compile time errors:
* Refer to the [Configuration documentation](configuration.md) for engine configuration.
* See [Command documentation](compiler.md) for CLI usage of the compiler.
* Read the [How-to guides](howto.md) for best-practices and implementation guidance.
-* Follow the [Tutorials](tutorials.md) for practical SQRL examples.
+* Follow the [Tutorials](intro/tutorials) for practical SQRL examples.
For engine configuration, see **configuration.md**; for CLI usage, see **compiler.md**.
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index c11e588f7..c6bc9832c 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -33,12 +33,12 @@ const sidebars = {
tutorialSidebar: [
{
type: 'doc',
- id: 'intro',
+ id: 'intro/intro',
label: 'π Overview',
},
{
type: 'doc',
- id: 'getting-started',
+ id: 'intro/getting-started',
label: 'π Getting Started',
},
{
@@ -138,14 +138,14 @@ const sidebars = {
},
{
type: 'doc',
- id: 'concepts',
+ id: 'intro/concepts',
label: 'π§ Streaming Concepts',
}
],
},
{
type: 'doc',
- id: 'tutorials',
+ id: 'intro/tutorials',
label: 'π Tutorials',
},
{
From 05567694cd13c49dc975533a915b198dccd674ce Mon Sep 17 00:00:00 2001
From: Matthias Broecheler <758061+mbroecheler@users.noreply.github.com>
Date: Sat, 11 Oct 2025 10:08:31 -0700
Subject: [PATCH 23/31] fixed broken links
---
...3-05-15-lets-uplevel-database-datasqrl.mdx | 2 +-
.../blog/2023-07-10-temporal-join.mdx | 2 +-
.../blog/2025-05-09-flink-sql-extensions.md | 2 +-
documentation/blog/2025-07-27-datasqrl-0.7.md | 2 +-
documentation/blog/tags.yml | 20 +++++++++++++++++++
documentation/docs/intro/getting-started.md | 2 +-
.../docs/intro/{intro.md => index.md} | 2 +-
documentation/docs/sqrl-language.md | 2 +-
documentation/docusaurus.config.ts | 2 +-
documentation/sidebars.ts | 2 +-
documentation/src/pages/ai.tsx | 4 ++--
documentation/src/pages/flink.tsx | 6 +++---
documentation/src/pages/index.tsx | 4 ++--
13 files changed, 36 insertions(+), 16 deletions(-)
rename documentation/docs/intro/{intro.md => index.md} (98%)
diff --git a/documentation/blog/2023-05-15-lets-uplevel-database-datasqrl.mdx b/documentation/blog/2023-05-15-lets-uplevel-database-datasqrl.mdx
index 7d1eb0046..49bedd04e 100644
--- a/documentation/blog/2023-05-15-lets-uplevel-database-datasqrl.mdx
+++ b/documentation/blog/2023-05-15-lets-uplevel-database-datasqrl.mdx
@@ -76,7 +76,7 @@ We [just released](https://github.com/DataSQRL/sqrl/releases/tag/v0.1.0) the fir
Here are some ideas for how you can contribute:
* Share your thoughts: Do you have ideas on how we can improve the SQRL language or the DataSQRL compiler? Jump into [our community](/community) and let us know!
-* Test the waters: Do you like playing with new technologies? Try out [DataSQRL](/docs/getting-started) and let us know if you find any bugs or missing features.
+* Test the waters: Do you like playing with new technologies? Try out [DataSQRL](/docs/intro/getting-started) and let us know if you find any bugs or missing features.
* Spread the word: Think DataSQRL has potential? Share this blog post and [star](https://github.com/DataSQRL/sqrl) DataSQRL on [Github](https://github.com/DataSQRL/sqrl). Your support can help us reach more like-minded individuals.
* Code with us: Do you enjoy contributing to open-source projects? Dive into [the code](https://github.com/DataSQRL/sqrl) with us and pick up a [ticket](https://github.com/DataSQRL/sqrl/issues).
diff --git a/documentation/blog/2023-07-10-temporal-join.mdx b/documentation/blog/2023-07-10-temporal-join.mdx
index f210e08e7..4372d9e36 100644
--- a/documentation/blog/2023-07-10-temporal-join.mdx
+++ b/documentation/blog/2023-07-10-temporal-join.mdx
@@ -135,7 +135,7 @@ Temporal joins help us avoid the pitfalls of time-alignment problems when joinin
And thatβs why the temporal join is stream processing's secret superpower.
-DataSQRL makes using temporal joins a breeze. With its simplified syntax and smart defaults, it's like having a personal tour guide leading you through the sometimes bewildering landscape of stream processing. Take a look at our [Getting Started](/docs/getting-started) to see a complete example of temporal joins in action or take a look at our [other tutorials](/docs/tutorials) for a step-by-step guide to stream processing including temporal joins.
+DataSQRL makes using temporal joins a breeze. With its simplified syntax and smart defaults, it's like having a personal tour guide leading you through the sometimes bewildering landscape of stream processing. Take a look at our [Getting Started](/docs/intro/getting-started) to see a complete example of temporal joins in action or take a look at our [other tutorials](/docs/intro/tutorials) for a step-by-step guide to stream processing including temporal joins.
Happy data time-traveling, folks!
diff --git a/documentation/blog/2025-05-09-flink-sql-extensions.md b/documentation/blog/2025-05-09-flink-sql-extensions.md
index 84292a2a3..25395a248 100644
--- a/documentation/blog/2025-05-09-flink-sql-extensions.md
+++ b/documentation/blog/2025-05-09-flink-sql-extensions.md
@@ -95,4 +95,4 @@ In addition to breaking out the sink configuration from the main script, the `EX
[FlinkSQL](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/overview/) is phenomenal extension of the SQL ecosystem to stream processing. With DataSQRL, we are trying to make it easier to build end-to-end data pipelines and complete data applications with FlinkSQL.
-Check out the [complete example](/docs/getting-started) which also covers testing, customization, and deployment. Or read the [documentation](/docs/sqrl-language) to learn more.
\ No newline at end of file
+Check out the [complete example](/docs/intro/getting-started) which also covers testing, customization, and deployment. Or read the [documentation](/docs/sqrl-language) to learn more.
\ No newline at end of file
diff --git a/documentation/blog/2025-07-27-datasqrl-0.7.md b/documentation/blog/2025-07-27-datasqrl-0.7.md
index 42e21bffb..d595f585a 100644
--- a/documentation/blog/2025-07-27-datasqrl-0.7.md
+++ b/documentation/blog/2025-07-27-datasqrl-0.7.md
@@ -33,7 +33,7 @@ docker pull datasqrl/cmd:0.7.0
Data delivery is the final and most visible stage of any data pipeline. It's how users, applications, and AI agents actually access and consume data. Most enterprise data interactions happen through APIs, making the delivery interface a critical component. At DataSQRL, we've invested heavily in automating the upstream parts of the pipeline: from Flink-powered data processing to Postgres-backed storage. With version 0.7, we turn our focus to the serving layer: introducing support for the Model Context Protocol (MCP) and REST APIs, as well as JWT-based authentication and authorization. These additions ensure seamless integration with most authentication providers and enable secure, token-based data access, with fine-grained authorization logic enforced directly in the SQRL script. This completes our vision of end-to-end pipeline automation, where consumption patterns inform data storage and processingβclosing the loop between data production and usage.
-Check out the [interface documentation](../docs/interface) for more information.
+Check out the [interface documentation](/docs/interface) for more information.
diff --git a/documentation/blog/tags.yml b/documentation/blog/tags.yml
index ea5b8e48b..0dc3e645e 100644
--- a/documentation/blog/tags.yml
+++ b/documentation/blog/tags.yml
@@ -28,3 +28,23 @@ feature:
label: feature
permalink: /feature
description: Feature descriptions
+
+DataSQRL:
+ label: DataSQRL
+ permalink: /datasqrl
+ description: Posts about DataSQRL
+
+community:
+ label: Community
+ permalink: /community
+ description: Community updates and announcements
+
+Join:
+ label: Join
+ permalink: /join
+ description: Posts about SQL joins and temporal joins
+
+Flink:
+ label: Flink
+ permalink: /flink
+ description: Apache Flink related posts
diff --git a/documentation/docs/intro/getting-started.md b/documentation/docs/intro/getting-started.md
index eb393c473..7706bc74c 100644
--- a/documentation/docs/intro/getting-started.md
+++ b/documentation/docs/intro/getting-started.md
@@ -243,7 +243,7 @@ docker run --rm -v $PWD:/build datasqrl/cmd:latest compile usertokens.sqrl usert
Congratulations, you made the first big step toward building production-grade data pipelines the easy way.
Next, check out:
-* **[Full Documentation](intro)** for the complete reference, language spec, and more.
+* **[Full Documentation](/docs/intro)** for the complete reference, language spec, and more.
* **[Tutorials](tutorials)** if you prefer learning by doing.
## 4. Troubleshooting Common Issues
diff --git a/documentation/docs/intro/intro.md b/documentation/docs/intro/index.md
similarity index 98%
rename from documentation/docs/intro/intro.md
rename to documentation/docs/intro/index.md
index df1656cf7..25818970a 100644
--- a/documentation/docs/intro/intro.md
+++ b/documentation/docs/intro/index.md
@@ -69,7 +69,7 @@ The DataSQRL compiler:
### π **Integration & Deployment**
- [**Connectors**](../connectors) - Ingest from and export to external systems
- [**Concepts**](concepts) - Key concepts in stream processing (time, watermarks, etc.)
-- [**How-To Guides**](../howto) - Best practices and implementation patterns
+- [**How-To Guides**](/docs/category/-how-to) - Best practices and implementation patterns
### π οΈ **Advanced Topics**
- [**Developer Documentation**](../deepdive) - Internal architecture and advanced customization
diff --git a/documentation/docs/sqrl-language.md b/documentation/docs/sqrl-language.md
index 9edb14b28..501ac894e 100644
--- a/documentation/docs/sqrl-language.md
+++ b/documentation/docs/sqrl-language.md
@@ -397,7 +397,7 @@ The following produce compile time errors:
* Refer to the [Configuration documentation](configuration.md) for engine configuration.
* See [Command documentation](compiler.md) for CLI usage of the compiler.
-* Read the [How-to guides](howto.md) for best-practices and implementation guidance.
+* Read the [How-to guides](/docs/category/-how-to) for best-practices and implementation guidance.
* Follow the [Tutorials](intro/tutorials) for practical SQRL examples.
For engine configuration, see **configuration.md**; for CLI usage, see **compiler.md**.
diff --git a/documentation/docusaurus.config.ts b/documentation/docusaurus.config.ts
index 3dd564f9f..57e0ba31f 100644
--- a/documentation/docusaurus.config.ts
+++ b/documentation/docusaurus.config.ts
@@ -130,7 +130,7 @@ const config: Config = {
items: [
{
label: 'Getting Started',
- to: '/docs/getting-started',
+ to: '/docs/intro/getting-started',
},
{
label: 'User Documentation',
diff --git a/documentation/sidebars.ts b/documentation/sidebars.ts
index c6bc9832c..aa753d754 100644
--- a/documentation/sidebars.ts
+++ b/documentation/sidebars.ts
@@ -33,7 +33,7 @@ const sidebars = {
tutorialSidebar: [
{
type: 'doc',
- id: 'intro/intro',
+ id: 'intro/index',
label: 'π Overview',
},
{
diff --git a/documentation/src/pages/ai.tsx b/documentation/src/pages/ai.tsx
index dd666ed5c..ab1446882 100644
--- a/documentation/src/pages/ai.tsx
+++ b/documentation/src/pages/ai.tsx
@@ -21,7 +21,7 @@ const header: HomepageHeaderProps = {
for GenAI and ML applications from all your data sources.
>
),
- buttonLink: 'docs/getting-started',
+ buttonLink: 'docs/intro/getting-started',
buttonText: 'Build Flink Apps in 10 min',
image: "/img/diagrams/ai_infra_summary.png"
};
@@ -139,7 +139,7 @@ TokenAnalysis := SELECT orgid, userid, sum(tokens) as total_tokens,
introspection, debugging - DataSQRL brings developer convenience and automation.
Get Started
+ to="/docs/intro/getting-started">Get Started
Learn More
diff --git a/documentation/src/pages/flink.tsx b/documentation/src/pages/flink.tsx
index fe2d143fe..1f74b7dee 100644
--- a/documentation/src/pages/flink.tsx
+++ b/documentation/src/pages/flink.tsx
@@ -21,7 +21,7 @@ const header: HomepageHeaderProps = {
realtime data apps faster and easier. Batteries included.
>
),
- buttonLink: 'docs/getting-started',
+ buttonLink: 'docs/intro/getting-started',
buttonText: 'Build Flink Apps in 10 min',
image: "/img/landingpage/flink_on_rails.png"
};
@@ -139,7 +139,7 @@ docker run --rm -v $PWD:/build \\
introspection, debugging - DataSQRL brings developer convenience and happiness.
Get Started
+ to="/docs/intro/getting-started">Get Started
Learn More
@@ -164,7 +164,7 @@ docker run --rm -v $PWD:/build \\
Learn how DataSQRL simplifies building real-time data applications. This quick demo shows how to define your pipeline in SQL and go from source to API in minutes.
Try Now
+ to="/docs/intro/getting-started">Try Now
diff --git a/documentation/src/pages/index.tsx b/documentation/src/pages/index.tsx
index 9c8e992ba..37afac3c4 100644
--- a/documentation/src/pages/index.tsx
+++ b/documentation/src/pages/index.tsx
@@ -21,7 +21,7 @@ const header: HomepageHeaderProps = {
Build data APIs or data products, serve data via MCP or RAG.
>
),
- buttonLink: 'docs/getting-started',
+ buttonLink: 'docs/intro/getting-started',
buttonText: 'Automate Your Data Pipelines',
image: "/img/diagrams/architecture_overview.png"
};
@@ -414,7 +414,7 @@ EnrichedTransactions := SELECT
{/*Watch the video to see for yourself.*/}
Get Started
+ to="/docs/intro/getting-started">Get Started
Learn More
From 273907a3d0cf4cac1b506226c38d941840cbcc1a Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Fri, 17 Oct 2025 12:26:13 +0200
Subject: [PATCH 24/31] update compatibility table
---
documentation/docs/compatibility.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/documentation/docs/compatibility.md b/documentation/docs/compatibility.md
index 8bceeb548..f55c85824 100644
--- a/documentation/docs/compatibility.md
+++ b/documentation/docs/compatibility.md
@@ -6,6 +6,6 @@ DataSQRL builds on top of Flink and the Flink connector ecosystem.
|----------|----------|------------------------------|----------------------|----------|----------------------------|------------------------|
| 0.6.x | 1.19.x | 1.19+
1.19.1+2 tested | 13+
14 tested | 0.8.0 | 1.9.0+
1.9.0 tested | 3+
3.4.0 tested |
| 0.7.x | 1.19.x | 1.19+
1.19.2+3 tested | 15+
17 tested | 0.8.0 | 1.9.0+
1.9.0 tested | 3+
3.4.0 tested |
-| | | | | | | |
+| 0.8.x | 1.19.x | 1.19+
1.19.3 tested | 15+
17 tested | 0.8.0 | 1.9.0+
1.9.2 tested | 3+
3.4.0 tested |
| | | | | | | |
| | | | | | | |
From 55369687f61e174dde5fc9d4dadf381597afa331 Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Fri, 17 Oct 2025 13:18:59 +0200
Subject: [PATCH 25/31] minor formatting improvements
---
.../docs/configuration-engine/duckdb.md | 4 +-
.../docs/configuration-engine/flink.md | 12 ++--
.../docs/configuration-engine/kafka.md | 10 ++--
.../docs/configuration-engine/postgres.md | 8 +--
.../docs/configuration-engine/vertx.md | 16 ++---
documentation/docs/deepdive.md | 15 ++---
documentation/docs/functions.md | 2 -
documentation/docs/howto/project-structure.md | 34 ++++++-----
documentation/docs/howto/stream-enrichment.md | 9 ++-
.../docs/howto/subgraph-elimination.md | 3 +-
documentation/docs/howto/templating.md | 3 +-
documentation/docs/sqrl-language.md | 18 ++++--
.../server/GenericJavaServerEngineTest.java | 59 ++++++++-----------
.../usecases/jwt-authorized/package.json | 4 +-
14 files changed, 98 insertions(+), 99 deletions(-)
diff --git a/documentation/docs/configuration-engine/duckdb.md b/documentation/docs/configuration-engine/duckdb.md
index 1b0166b77..0fd0b928d 100644
--- a/documentation/docs/configuration-engine/duckdb.md
+++ b/documentation/docs/configuration-engine/duckdb.md
@@ -4,8 +4,8 @@ DuckDB is a vectorized database query engine that excels at analytical queries a
## Configuration Options
-| Key | Type | Default | Description |
-|-------|------------|------------------|----------------|
+| Key | Type | Default | Description |
+|-------|------------|------------------|---------------------------------------|
| `url` | **string** | `"jdbc:duckdb:"` | Full JDBC URL for database connection |
## Example Configuration
diff --git a/documentation/docs/configuration-engine/flink.md b/documentation/docs/configuration-engine/flink.md
index b406bb437..acc419f73 100644
--- a/documentation/docs/configuration-engine/flink.md
+++ b/documentation/docs/configuration-engine/flink.md
@@ -38,12 +38,12 @@ Refer to the [Flink Documentation](hhttps://nightlies.apache.org/flink/flink-doc
Flink supports deployment-specific configuration options for managing cluster resources:
-| Key | Type | Default | Description |
-|----------------------|-------------|---------|----------------------------------------------------------------------|
-| `jobmanager-size` | **string** | - | Job manager instance size: `dev`, `small`, `medium`, `large` |
-| `taskmanager-size` | **string** | - | Task manager instance size with resource variants |
-| `taskmanager-count` | **integer** | - | Number of task manager instances (minimum: 1) |
-| `secrets` | **array** | `null` | Array of secret names to inject, or `null` if no secrets needed |
+| Key | Type | Default | Description |
+|---------------------|-------------|---------|-----------------------------------------------------------------|
+| `jobmanager-size` | **string** | - | Job manager instance size: `dev`, `small`, `medium`, `large` |
+| `taskmanager-size` | **string** | - | Task manager instance size with resource variants |
+| `taskmanager-count` | **integer** | - | Number of task manager instances (minimum: 1) |
+| `secrets` | **array** | `null` | Array of secret names to inject, or `null` if no secrets needed |
### Task Manager Size Options
diff --git a/documentation/docs/configuration-engine/kafka.md b/documentation/docs/configuration-engine/kafka.md
index 8d80a4b53..9c0186396 100644
--- a/documentation/docs/configuration-engine/kafka.md
+++ b/documentation/docs/configuration-engine/kafka.md
@@ -4,11 +4,11 @@ Apache Kafka is a streaming data platform that serves as the log engine in DataS
## Configuration Options
-| Key | Type | Default | Description |
-|------------------------|-------------|-----------|-------------------------------------------------------------------------|
-| `retention` | **string** | `null` | Topic retention time (e.g., "7d", "24h") or indefinite when `null` |
-| `watermark` | **string** | `"0 ms"` | Watermark delay for event time processing |
-| `transaction-watermark`| **string** | `"0 ms"` | Watermark delay for event time processing when transactions are enabled |
+| Key | Type | Default | Description |
+|-------------------------|------------|----------|-------------------------------------------------------------------------|
+| `retention` | **string** | `null` | Topic retention time (e.g., "7d", "24h") or indefinite when `null` |
+| `watermark` | **string** | `"0 ms"` | Watermark delay for event time processing |
+| `transaction-watermark` | **string** | `"0 ms"` | Watermark delay for event time processing when transactions are enabled |
Additional custom Kafka settings can be added under the `config` section.
diff --git a/documentation/docs/configuration-engine/postgres.md b/documentation/docs/configuration-engine/postgres.md
index 644233a32..902790591 100644
--- a/documentation/docs/configuration-engine/postgres.md
+++ b/documentation/docs/configuration-engine/postgres.md
@@ -24,10 +24,10 @@ No mandatory configuration keys are required. Physical DDL (tables, indexes, vie
PostgreSQL supports deployment-specific configuration for database scaling and high availability:
-| Key | Type | Default | Description |
-|------------------|-------------|---------|---------------------------------------------------------|
-| `instance-size` | **string** | - | Database instance size for compute and memory |
-| `replica-count` | **integer** | - | Number of read replicas (minimum: 0, maximum varies) |
+| Key | Type | Default | Description |
+|-----------------|-------------|---------|------------------------------------------------------|
+| `instance-size` | **string** | - | Database instance size for compute and memory |
+| `replica-count` | **integer** | - | Number of read replicas (minimum: 0, maximum varies) |
### Instance Size Options
diff --git a/documentation/docs/configuration-engine/vertx.md b/documentation/docs/configuration-engine/vertx.md
index 9819cc40b..7db5c4eff 100644
--- a/documentation/docs/configuration-engine/vertx.md
+++ b/documentation/docs/configuration-engine/vertx.md
@@ -4,10 +4,10 @@ Eclipse Vert.x is a reactive server framework that serves as the GraphQL API ser
## Configuration Options
-| Key | Type | Default | Notes |
-|--------------|------------|-----------|---------------------------|
-| `authKind` | **string** | `"NONE"` | Authentication type: `"NONE"` or `"JWT"` |
-| `config` | **object** | see below | Vert.x-specific configuration including JWT settings |
+| Key | Type | Default | Notes |
+|------------|------------|-----------|------------------------------------------------------|
+| `authKind` | **string** | `"NONE"` | Authentication type: `"NONE"` or `"JWT"` |
+| `config` | **object** | see below | Vert.x-specific configuration including JWT settings |
## Basic Configuration
@@ -55,10 +55,10 @@ For secure APIs with JWT authentication:
Vert.x supports deployment-specific configuration options for scaling the API server:
-| Key | Type | Default | Description |
-|-------------------|-------------|---------|----------------------------------------------------------------|
-| `instance-size` | **string** | - | Server instance size with storage variants |
-| `instance-count` | **integer** | - | Number of server instances to run (minimum: 1) |
+| Key | Type | Default | Description |
+|------------------|-------------|---------|------------------------------------------------|
+| `instance-size` | **string** | - | Server instance size with storage variants |
+| `instance-count` | **integer** | - | Number of server instances to run (minimum: 1) |
### Instance Size Options
diff --git a/documentation/docs/deepdive.md b/documentation/docs/deepdive.md
index 60ab6e094..71fdc15f1 100644
--- a/documentation/docs/deepdive.md
+++ b/documentation/docs/deepdive.md
@@ -1,6 +1,6 @@
# Deep Dive: How DataSQRL Works
-The DataSQRL Compiler executes the following steps:
+The DataSQRL `compile` executes the following steps:
1. **Read Configuration**: Read and combine all package.json configuration files to initialize the configuration for the compiler
2. **Build Project**: The [packager](#packager) builds the project structure in `build/` directory.
@@ -12,21 +12,20 @@ The DataSQRL Compiler executes the following steps:
8. **Generate Physical Plans**: The [Physical Planner](#physical-planner) generates deployment assets for each engine and connector configuration to move data between engines.
9. **Write Deployment Artifacts**: The deployment artifacts are written to the `build/deploy` folder with the engine plans in `build/deploy/plan`.
-The DataSQRL run command executes all compilation steps above and:
+The DataSQRL `run` command executes all compilation steps above and:
1. **Launch**: Launches all engines in docker
2. **Deploy**: Deploys the deployment assets to the engines, e.g. installs the database schema, passes the GraphQL execution plan to Vert.x, creates topics in RedPanda, and executes the compiled plan in Flink.
3. **Runs**: Runs and monitors the engines as they execute the pipeline.
The running data pipeline and the individual engines running each component are accessible locally via the mapped ports.
-The DataSQRL test command executes all compilation and run steps above and:
+The DataSQRL `test` command executes all compilation and run steps above and:
1. **Subscriptions**: Installs subscription queries to listen for test results (if any)
2. **Mutations**: Runs the mutation queries against the API in order (if any) and snapshots the results.
- * Waits for the configured interval, number of checkpoints, or Flink job completion based on configuration.
+3. **Await**: Waits for the configured interval, number of checkpoints, or Flink job completion based on configuration.
4. **Queries**: Runs the queries against the API to snapshot the results.
5. **Snapshots**: Snapshots all subscription results in string order.
-
## Architecture
DataSQRL supports a pluggable engine architecture. A data pipeline or microservice
@@ -54,7 +53,8 @@ DataSQRL supports the following types of stages:
* [GraphQL Java](https://www.graphql-java.com/)
* Cache: For caching data on the server (coming soon)
-Currently, DataSQRL is closely tied to Flink as the stream processing engine. The other engines are modular, making it simple to add additional engines.
+Currently, DataSQRL is closely tied to Flink as the stream processing engine.
+The other engines are modular, making it simple to add additional engines.
A data pipeline topology is a sequence of stages. A pipeline topology may contain
multiple stages of the same type (e.g. two different database stages).
@@ -88,8 +88,6 @@ In addition, the packager executes the following special purpose preprocessors:
Preprocessors are internal to DataSQRL and can be extended within the framework.
-
-
### Parser
The parser is the first stage of the compiler. The parser parses the
@@ -114,7 +112,6 @@ that contains information needed by the planner.
2. It analyzes the SQL to identify potential issues, semantic inconsistencies, or optimization potential and produces warnings or notices.
3. It extracts cost information for the optimizer.
-
### DAG Planner
The DAG planner takes all the individual table and function definitions and assembles them into
diff --git a/documentation/docs/functions.md b/documentation/docs/functions.md
index 36723d410..3d733e353 100644
--- a/documentation/docs/functions.md
+++ b/documentation/docs/functions.md
@@ -27,7 +27,6 @@ Imports a single function `hypot` from the `math` library under the name `hypote
Check out the [full list of function libraries](functions-library-generated).
-
## User Defined Functions
Users can define custom functions and import them into a SQRL script.
@@ -45,7 +44,6 @@ IMPORT myjavafunction.target.MyScalarFunction;
Check out this [complete example](https://github.com/DataSQRL/datasqrl-examples/tree/main/user-defined-function).
-
### JavaScript
Support for JavaScript functions is currently being implemented and is targeted for the 0.7 release.
diff --git a/documentation/docs/howto/project-structure.md b/documentation/docs/howto/project-structure.md
index ceebb62ed..467ffb321 100644
--- a/documentation/docs/howto/project-structure.md
+++ b/documentation/docs/howto/project-structure.md
@@ -3,28 +3,30 @@
A DataSQRL project is structured as follows where `{name}` is the project name.
```
-βββ {name}.sqrl # Contains the main data processing logic
+βββ {name}.sqrl # Contains the main data processing logic
βββ {name}-[run/test/prod]-package.json # Configuration files for running locally, testing, and deploying the project
-βββ {name}-connectors/ # Contains source and sink table definitions, shared connector logic, schemas, and data files
-β βββ sources-[run/test/prod].sqrl # Contains the source table definitions, split by variant or environment
-βββ snapshots/ # Contains the snapshot data for tests
-β βββ {name}/ # One directory per project
-βββ {name}-api/ # Contains the API schema and operation definitions for the project
-β βββ schema.v1.graphqls # GraphQL schema definition
-β βββ tests/ # Contains GraphQL test queries as .graphql files
-β βββ operations-v1/ # Contains any operation definitions as .graphql files
-βββ README.md # Explain the project(s) and structure
+βββ {name}-connectors/ # Contains source and sink table definitions, shared connector logic, schemas, and data files
+β βββ sources-[run/test/prod].sqrl # Contains the source table definitions, split by variant or environment
+βββ snapshots/ # Contains the snapshot data for tests
+β βββ {name}/ # One directory per project
+βββ {name}-api/ # Contains the API schema and operation definitions for the project
+β βββ schema.v1.graphqls # GraphQL schema definition
+β βββ tests/ # Contains GraphQL test queries as .graphql files
+β βββ operations-v1/ # Contains any operation definitions as .graphql files
+βββ README.md # Explain the project(s) and structure
```
-A project has one or more `package.json` configuration files to configure the compiled pipeline for different environments: running locally, testing, and one or more deployment environments. The targeted environment is used in the name, e.g. `run`, `test`, `qa`, `prod`, etc.
+A project has one or more `package.json` configuration files to configure the compiled pipeline for different environments: running locally, testing, and one or more deployment environments.
+The targeted environment is used in the name, e.g. `run`, `test`, `qa`, `prod`, etc.
-The `package.json` file is the authoritative source that defines the main SQRL script and (optional) GraphQL schema and operations. It also configures snapshot and test directories. Always consult the `package.json` files for the relative file paths to the project source files.
+The `package.json` file is the authoritative source that defines the main SQRL script and (optional) GraphQL schema and operations.
+It also configures snapshot and test directories. Always consult the `package.json` files for the relative file paths to the project source files.
For advanced project or when multiple projects share one directory, the structure may include:
```
βββ [shared/authentication]-package.json # Config file that is shared across projects
-βββ tests/ # Folder that contains test code to separate it from the main logic
-β βββ {test-name}.sqrl # This file is included inline in the main script
-βββ functions/ # User defined functions
-βββ shared.sqrl # SQRL script that's shared across projects
+βββ tests/ # Folder that contains test code to separate it from the main logic
+β βββ {test-name}.sqrl # This file is included inline in the main script
+βββ functions/ # User defined functions
+βββ shared.sqrl # SQRL script that's shared across projects
```
\ No newline at end of file
diff --git a/documentation/docs/howto/stream-enrichment.md b/documentation/docs/howto/stream-enrichment.md
index 3b9a5ae78..5e93df617 100644
--- a/documentation/docs/howto/stream-enrichment.md
+++ b/documentation/docs/howto/stream-enrichment.md
@@ -1,14 +1,17 @@
# Enriching Data Streams
-A common requirement in stream processing is to enrich a **STREAM** of events with dimensional data in a time-consistent manner. This pattern is particularly useful when you need to join real-time events with slowly changing dimensional data while maintaining temporal consistency.
+A common requirement in stream processing is to enrich a **STREAM** of events with dimensional data in a time-consistent manner.
+This pattern is particularly useful when you need to join real-time events with slowly changing dimensional data while maintaining temporal consistency.
## Use Case: Transaction Enrichment
-Suppose we want to enrich transaction events with the account balance that was valid **at the time of the transaction**. This ensures we get consistent, point-in-time data for analysis.
+Suppose we want to enrich transaction events with the account balance that was valid **at the time of the transaction**.
+This ensures we get consistent, point-in-time data for analysis.
## Defining Source Tables
-First, define your data sources. These can be internal tables (managed by DataSQRL) or external tables with connector configuration:
+First, define your data sources.
+These can be internal tables (managed by DataSQRL) or external tables with connector configuration:
```sql
-- Transaction events stream (STREAM type)
diff --git a/documentation/docs/howto/subgraph-elimination.md b/documentation/docs/howto/subgraph-elimination.md
index dcb7094f8..ead806b75 100644
--- a/documentation/docs/howto/subgraph-elimination.md
+++ b/documentation/docs/howto/subgraph-elimination.md
@@ -15,4 +15,5 @@ ResultA := SELECT a, c FROM MyComputedTable WHERE noop(a,b,c);
ResultB := SELECT b, c FROM MyCOmputedTable WHERE noop(a,b,c);
```
-Because `ResultA` and `ResultB` select different subsets of columns, those selections can get optimized down to the source `InputData` table resulting in `expensive_function` being executed twice because the relational trees are slightly different. By adding the `noop` function we inhibit that push-down optimization.
\ No newline at end of file
+Because `ResultA` and `ResultB` select different subsets of columns, those selections can get optimized down to the source `InputData` table resulting in `expensive_function` being executed twice because the relational trees are slightly different.
+By adding the `noop` function we inhibit that push-down optimization.
\ No newline at end of file
diff --git a/documentation/docs/howto/templating.md b/documentation/docs/howto/templating.md
index 3a8ddfb6d..7b64518ee 100644
--- a/documentation/docs/howto/templating.md
+++ b/documentation/docs/howto/templating.md
@@ -4,7 +4,8 @@ DataSQRL uses the Mustache templating engine to substitute configuration variabl
## How It Works
-Variables in your SQRL script are wrapped in double curly braces `{{variableName}}`. When DataSQRL compiles the script, it replaces these placeholders with values defined in the `script.config` section of your [`package.json`](../configuration) configuration file.
+Variables in your SQRL script are wrapped in double curly braces `{{variableName}}`.
+When DataSQRL compiles the script, it replaces these placeholders with values defined in the `script.config` section of your [`package.json`](../configuration) configuration file.
## Example
diff --git a/documentation/docs/sqrl-language.md b/documentation/docs/sqrl-language.md
index 501ac894e..0d7b1609e 100644
--- a/documentation/docs/sqrl-language.md
+++ b/documentation/docs/sqrl-language.md
@@ -36,7 +36,8 @@ SQRL inherits full FlinkSQL grammar for
Refer to the [FlinkSQL documentation](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/dev/table/sql/overview/) for a detailed specification.
## Type System
-In SQRL, every table and function has a type based on how the table represents data. The type determines the semantic validity of queries against tables and how data is processed by different engines.
+In SQRL, every table and function has a type based on how the table represents data.
+The type determines the semantic validity of queries against tables and how data is processed by different engines.
SQRL assigns one of the following types to tables based on the definition:
- **STREAM**: Represents a stream of immutable records with an assigned timestamp (often referred to as the "event time"). Streams are append-only. Stream tables represent events or actions over time.
@@ -56,7 +57,8 @@ IMPORT qualifiedPath.*; -- wildcard
Imports another SQRL script into the current script. The `qualifiedPath` is a `.` separated path that maps to the local file system relative to the current script, e.g. `IMPORT my.custom.script` maps to the relative path `./my/custom/script.sqrl`.
-Imports that end in `.*` are imported inline which means that the statement from that script are executed verbatim in the current script. Otherwise, imports are available within a namespace that's equal to the name of the script or the optional `AS` identifier.
+Imports that end in `.*` are imported inline which means that the statement from that script are executed verbatim in the current script.
+Otherwise, imports are available within a namespace that's equal to the name of the script or the optional `AS` identifier.
Examples:
* `IMPORT my.custom.script.*`: All table definitions from the script are imported inline and can be referenced directly as `MyTable` in `FROM` clauses.
@@ -65,7 +67,9 @@ Examples:
## CREATE TABLE (internal vs external)
-SQRL understands the complete FlinkSQL `CREATE TABLE` syntax, but distinguishes between **internal** and **external** source tables. External source tables are standard FlinkSQL tables that connect to an external data source (e.g. database or Kafka cluster). Internal tables connect to a data source that is managed by SQRL (depending on the configured `log` engine, e.g. a Kafka topic) and exposed for data ingestion in the interface.
+SQRL understands the complete FlinkSQL `CREATE TABLE` syntax, but distinguishes between **internal** and **external** source tables.
+External source tables are standard FlinkSQL tables that connect to an external data source (e.g. database or Kafka cluster).
+Internal tables connect to a data source that is managed by SQRL (depending on the configured `log` engine, e.g. a Kafka topic) and exposed for data ingestion in the interface.
| Feature | Internal source (managed by SQRL) | External Source (connector) |
|-------------------------------|-----------------------------------------------------------------|-----------------------------|
@@ -225,7 +229,10 @@ Employees.allReports RETURNS (employeeid BIGINT NOT NULL, name STRING NOT NULL,
## Interfaces
-The tables and functions defined in a SQRL script are exposed through an interface. The term "interface" is used generically to describe a means by which a client, user, or external system can access the processed data. The interface depends on the [configured engines](configuration.md#engines-engines): API endpoints for servers, queries and views for databases, and topics for logs. An interface is a sink in the data processing DAG that's defined by a SQRL script.
+The tables and functions defined in a SQRL script are exposed through an interface.
+The term "interface" is used generically to describe a means by which a client, user, or external system can access the processed data.
+The interface depends on the [configured engines](configuration.md#engines-enabled-engines): API endpoints for servers, queries and views for databases, and topics for logs.
+An interface is a sink in the data processing DAG that's defined by a SQRL script.
How a table or function is exposed in the interface depends on the access type. The access type is one of the following:
@@ -346,7 +353,8 @@ NEXT_BATCH;
```
:::warning
-Sub-batches are executed stand-alone, meaning each sub-batch reads the data from source and not from the intermediate results of the previous sub-batch. If you wish to start with those, you need to explicitly write them out and read them.
+Sub-batches are executed stand-alone, meaning each sub-batch reads the data from source and not from the intermediate results of the previous sub-batch.
+If you wish to start with those, you need to explicitly write them out and read them.
:::
diff --git a/sqrl-cli/src/test/java/com/datasqrl/engine/server/GenericJavaServerEngineTest.java b/sqrl-cli/src/test/java/com/datasqrl/engine/server/GenericJavaServerEngineTest.java
index 6b36c32c1..6d586d5e8 100644
--- a/sqrl-cli/src/test/java/com/datasqrl/engine/server/GenericJavaServerEngineTest.java
+++ b/sqrl-cli/src/test/java/com/datasqrl/engine/server/GenericJavaServerEngineTest.java
@@ -36,25 +36,7 @@ class GenericJavaServerEngineTest {
@Test
void test() {
// innermost object: a single pub-/sec key
- Map pubSecKey =
- Map.of(
- "algorithm", "HS256",
- "buffer", "dGVzdFNlY3JldA==");
-
- // JWT options
- Map jwtOptions =
- Map.of(
- "issuer", "my-test-issuer",
- "audience", List.of("my-test-audience"),
- "expiresInSeconds", "3600",
- "leeway", "60");
-
- // jwtAuth node
- Map jwtAuth =
- Map.of("pubSecKeys", List.of(pubSecKey), "jwtOptions", jwtOptions);
-
- // root
- Map config = Map.of("jwtAuth", jwtAuth);
+ var config = getConfigMap();
var defaultConfig = underTest.readDefaultConfig();
assertThat(defaultConfig.getJwtAuth()).isNull();
@@ -72,22 +54,7 @@ void test() {
@SneakyThrows
void givenJwtConfiguration_whenConfigMerged_thenBuffersAreStringValues() {
// Create JWT configuration with buffer as simple string (not complex object)
- Map pubSecKey =
- Map.of(
- "algorithm", "HS256",
- "buffer", "dGVzdFNlY3JldA==");
-
- Map jwtOptions =
- Map.of(
- "issuer", "my-test-issuer",
- "audience", List.of("my-test-audience"),
- "expiresInSeconds", "3600",
- "leeway", "60");
-
- Map jwtAuth =
- Map.of("pubSecKeys", List.of(pubSecKey), "jwtOptions", jwtOptions);
-
- Map config = Map.of("jwtAuth", jwtAuth);
+ var config = getConfigMap();
// Test the configuration merging that would happen during serverConfig() generation
var defaultConfig = underTest.readDefaultConfig();
@@ -115,6 +82,28 @@ void givenJwtConfiguration_whenConfigMerged_thenBuffersAreStringValues() {
assertThat(bufferNode.has("bytes")).isFalse();
}
+ private static Map getConfigMap() {
+ var pubSecKey =
+ Map.of(
+ "algorithm", "HS256",
+ "buffer", "dGVzdFNlY3JldA==");
+
+ var jwtOptions =
+ Map.of(
+ "issuer",
+ "my-test-issuer",
+ "audience",
+ List.of("my-test-audience"),
+ "expiresInSeconds",
+ 3600,
+ "leeway",
+ 30);
+
+ var jwtAuth = Map.of("pubSecKeys", List.of(pubSecKey), "jwtOptions", jwtOptions);
+
+ return Map.of("jwtAuth", jwtAuth);
+ }
+
private static class DummyConverter implements QueryEngineConfigConverter {
@Override
diff --git a/sqrl-testing/sqrl-testing-integration/src/test/resources/usecases/jwt-authorized/package.json b/sqrl-testing/sqrl-testing-integration/src/test/resources/usecases/jwt-authorized/package.json
index 118fe24c8..8a22f8e9e 100644
--- a/sqrl-testing/sqrl-testing-integration/src/test/resources/usecases/jwt-authorized/package.json
+++ b/sqrl-testing/sqrl-testing-integration/src/test/resources/usecases/jwt-authorized/package.json
@@ -17,8 +17,8 @@
"jwtOptions": {
"issuer": "my-test-issuer",
"audience": ["my-test-audience"],
- "expiresInSeconds": "3600",
- "leeway": "60"
+ "expiresInSeconds": 3600,
+ "leeway": 30
}
}
}
From 3f8fac06d189dcfebbcefc3948afd5891336478a Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Fri, 17 Oct 2025 16:39:56 +0200
Subject: [PATCH 26/31] auth testing docs and some more JWT guidance
---
.../docs/configuration-engine/vertx.md | 20 ++++-
.../docs/howto/testing-authorization.md | 81 ++++++++++++++++++-
2 files changed, 96 insertions(+), 5 deletions(-)
diff --git a/documentation/docs/configuration-engine/vertx.md b/documentation/docs/configuration-engine/vertx.md
index 7db5c4eff..0de11324d 100644
--- a/documentation/docs/configuration-engine/vertx.md
+++ b/documentation/docs/configuration-engine/vertx.md
@@ -35,14 +35,14 @@ For secure APIs with JWT authentication:
"pubSecKeys": [
{
"algorithm": "HS256",
- "buffer": "" // Base64 encoded signer secret string
+ "buffer": "" // Base64 encoded signer secret string
}
],
"jwtOptions": {
"issuer": "",
"audience": [""],
- "expiresInSeconds": "3600",
- "leeway": "60"
+ "expiresInSeconds": 3600,
+ "leeway": 30
}
}
}
@@ -51,6 +51,20 @@ For secure APIs with JWT authentication:
}
```
+As these config fields will be mapped to Vert.x Java POJOs, the name of the key fields are very important.
+For `pubSecKeys`, it is also possible to use different algorithms, thet requires the key in a different (mostly PEM) format.
+For example, for `ES256`, this would look something like this:
+```json
+{
+ "pubSecKeys": [
+ {
+ "algorithm": "ES256",
+ "buffer": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhk...restOfBase64...\n-----END PUBLIC KEY-----"
+ }
+ ]
+}
+```
+
## Deployment Configuration
Vert.x supports deployment-specific configuration options for scaling the API server:
diff --git a/documentation/docs/howto/testing-authorization.md b/documentation/docs/howto/testing-authorization.md
index 4e4de83ae..3b16112f2 100644
--- a/documentation/docs/howto/testing-authorization.md
+++ b/documentation/docs/howto/testing-authorization.md
@@ -1,5 +1,82 @@
# Testing Authorization
-You can test record filtering, data masking, and other types of authorization based data access control with DataSQRL's automated test runner via the [`run` command](../compiler#test-command).
+You can test record filtering, data masking, and other types of authorization based data access control with DataSQRL's automated test runner via the [`test` command](../compiler#test-command).
-TODO: Please describe how to generate claims, tokens, and configure those in the package.json and via .properties files.
\ No newline at end of file
+## Generating Tokens
+
+To generate test tokens, one of the most straightforward ways would be to use the **JWT Encoder** functionality of https://jwt.io.
+The **Payload: Data** can be shaped to our testing needs.
+For testing purposes, using the `HS256` algorithm probably should be completely fine.
+Then, we only need to define a long enough signer secret string on the website.
+
+The only manual step that is required in case of `HS265` is to apply Base64 encoding to your given secret, for example:
+```sh
+echo mySuperSecretSignerStringThatIsLongEnough | base64
+```
+
+And then we need to set the encoded secret as the `buffer` in the `package.json` file `vertx` config section:
+
+```json
+{
+ ...
+ "engines" : {
+ "vertx" : {
+ "authKind": "JWT",
+ "config": {
+ "jwtAuth": {
+ "pubSecKeys": [
+ {
+ "algorithm": "HS256",
+ "buffer": "bXlTdXBlclNlY3JldFNpZ25lclN0cmluZ1RoYXRJc0xvbmdFbm91Z2gK"
+ }
+ ],
+ ...
+ }
+ }
+ }
+ },
+ ...
+}
+```
+
+## Default Test Runner Token
+
+We can set one token directly to the `test-runner` configuration that the deployed test server will pick up by default.
+Any valid HTTP headers can be defined in `headers` if necessary, but in this context the important one is `Authorization`.
+These headers will be added to any request that will be executed during the test.
+
+```json
+{
+ "test-runner": {
+ "headers": {
+ "Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJteS10ZXN0LWlzc3VlciIsImF1ZCI6WyJteS10ZXN0LWF1ZGllbmNlIl0sImV4cCI6OTk5OTk5OTk5OSwidmFsIjoxfQ.cvgte5Lfhrsr2OPoRM9ecJbxehBQzwHaghANY6MvhqE"
+ }
+ }
+}
+```
+
+## Additional Tests
+
+To be able to test different scenarios, it is mandatory to be able to provide different tokens that simulate them.
+To achieve this, we can define any new test case under the project's `test-folder`, the test execution will pick them up and also compare it with their respective snapshots.
+A custom JWT test case will require two files, which share the same name that will function as the name of the test case:
+* A `.graphql` file that should define a query, mutation, or subscription.
+* A `.properties` file if the test case requires a different token than the one defined in `test-runner
+
+A sample test case structure with three different test cases looks like the below file tree.
+
+```
+βββ tests/
+β βββ mutationWithSameToken.graphql
+β βββ subscriptionWithSameToken.graphql
+β βββ differentUserQuery.graphql
+β βββ differentUserQuery.properties
+β ...
+```
+
+The content of the `.properties` override the applied `headers` tor the matching `.graphql` requests, making it possible to define different scenarios.
+A simple JWT override header properties file would look like this:
+
+```properties
+Authorization: Bearer
+```
From 0fe897dae2563c57643de75bdfe0fed2d4ebc4ed Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Mon, 20 Oct 2025 14:10:34 +0200
Subject: [PATCH 27/31] fix license headers
---
.../scripts/generate-function-docs.js | 18 +++++++++++++++++-
1 file changed, 17 insertions(+), 1 deletion(-)
diff --git a/documentation/scripts/generate-function-docs.js b/documentation/scripts/generate-function-docs.js
index 30c713fc0..81d6f851a 100644
--- a/documentation/scripts/generate-function-docs.js
+++ b/documentation/scripts/generate-function-docs.js
@@ -1,3 +1,19 @@
+/*
+ * Copyright Β© 2021 DataSQRL (contact@datasqrl.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
const fs = require('fs');
const path = require('path');
const yaml = require('js-yaml');
@@ -134,4 +150,4 @@ if (require.main === module) {
generateFunctionDocs();
}
-module.exports = { generateFunctionDocs };
\ No newline at end of file
+module.exports = { generateFunctionDocs };
From 75990c6a5389112f58eccd3f11da61d970cec3f5 Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Mon, 20 Oct 2025 14:57:30 +0200
Subject: [PATCH 28/31] fix snapshots
---
.../com/datasqrl/UseCaseCompileTest/loan-loan-package.txt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt
index 43b443e90..fdf5c9653 100644
--- a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt
+++ b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/UseCaseCompileTest/loan-loan-package.txt
@@ -102,7 +102,7 @@ Schema:
- message: VARCHAR(2147483647) CHARACTER SET "UTF-16LE"
- event_time: TIMESTAMP_LTZ(3) *ROWTIME*
Plan:
-LogicalWatermarkAssigner(rowtime=[event_time], watermark=[-($4, 15000:INTERVAL SECOND)])
+LogicalWatermarkAssigner(rowtime=[event_time], watermark=[-($4, 0:INTERVAL SECOND)])
LogicalProject(_uuid=[$0], loan_application_id=[$1], status=[$2], message=[$3], event_time=[CAST($4):TIMESTAMP_LTZ(3) *ROWTIME*])
LogicalTableScan(table=[[default_catalog, default_database, ApplicationUpdates, metadata=[timestamp]]])
SQL: CREATE VIEW `ApplicationUpdates__view`
@@ -371,7 +371,7 @@ CREATE TABLE `ApplicationUpdates` (
`status` STRING NOT NULL,
`message` STRING,
`event_time` TIMESTAMP_LTZ(3) METADATA FROM 'timestamp',
- WATERMARK FOR `event_time` AS `event_time` - INTERVAL '15.0' SECOND
+ WATERMARK FOR `event_time` AS `event_time` - INTERVAL '0.0' SECOND
) WITH (
'connector' = 'kafka',
'flexible-json.timestamp-format.standard' = 'ISO-8601',
From 78a579b369712cce4b7d160cdb12d60e30418152 Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Wed, 29 Oct 2025 17:21:29 +0100
Subject: [PATCH 29/31] code format
---
.../planner/analyzer/cost/SimpleCostAnalysisModel.java | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java b/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
index 51816a6af..98736d0be 100644
--- a/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
+++ b/sqrl-planner/src/main/java/com/datasqrl/planner/analyzer/cost/SimpleCostAnalysisModel.java
@@ -52,9 +52,9 @@ public Simple getCost(ExecutionStage executionStage, TableAnalysis tableAnalysis
case STREAMS:
cost =
switch (type) {
- // We assume that pre-computing is generally cheaper (by factor of 10) unless
- // (standard) joins are involved which can lead to combinatorial explosion.
- // So, we primarily cost the joins
+ // We assume that pre-computing is generally cheaper (by factor of 10) unless
+ // (standard) joins are involved which can lead to combinatorial explosion.
+ // So, we primarily cost the joins
case DEFAULT -> joinCost(tableAnalysis.getCosts()) / 10;
case WRITE -> cost / 10; // Make it always cheaper than database
case READ -> cost * 2; // Make it more expensive than database to favor reads
From 7151e5549fd4b92cf345b359677793a55eb76c57 Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Wed, 29 Oct 2025 17:47:36 +0100
Subject: [PATCH 30/31] add env var resolution docs
---
documentation/docs/configuration-default.md | 26 +++++++++++++++++----
1 file changed, 22 insertions(+), 4 deletions(-)
diff --git a/documentation/docs/configuration-default.md b/documentation/docs/configuration-default.md
index 74cd754af..fb6ed7f6c 100644
--- a/documentation/docs/configuration-default.md
+++ b/documentation/docs/configuration-default.md
@@ -94,11 +94,29 @@ The following is the [default configuration file](https://raw.githubusercontent.
## Connector Template Variables
-The connector templates configured under `connectors` use SQRL-specific variables like `${sqrl:table-name}`.
+The connector templates configured under `connectors` can use environment variables and SQRL-specific variables for dynamic configuration.
-SQRL-specific variables start with a `sqrl:` prefix and are substituted by the compiler at compile-time in configuration files. SQRL env variables `${sqrl:}` are used for templating inside connector configuration templates and support the following identifiers:
-`table-name`, `original-table-name`, `filename`, `format`, and `kafka-key`.
+### Environment Variables
+
+You can reference environment variables using the `${VAR_NAME}` placeholder syntax, for example `${POSTGRES_PASSWORD}`.
+At runtime, these placeholders are automatically resolved using the environment variables defined in the system or deployment environment.
+
+This can help decouple security credentials or add flexibility across different deployment environments.
+
+### SQRL Variables
+
+SQRL-specific variables start with a `sqrl:` prefix and are used for templating inside connector configuration options.
+The proper syntax look like `${sqrl:}`.
+
+Supported identifiers include:
+- `table-name`
+- `original-table-name`
+- `filename`
+- `format`
+- `kafka-key`
+
+These are typically used within connector templates to inject table-specific or context-aware configuration values.
:::warning
Unresolved `${sqrl:*}` placeholders raise a validation error.
-:::
\ No newline at end of file
+:::
From 3bd1d0ced399ab9f44cc27461162aa1cefb6b002 Mon Sep 17 00:00:00 2001
From: Ferenc Csaky
Date: Wed, 29 Oct 2025 17:53:08 +0100
Subject: [PATCH 31/31] fix snapshots
---
.../com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt
index 8b051a29c..c0c17b8b8 100644
--- a/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt
+++ b/sqrl-testing/sqrl-testing-integration/src/test/resources/snapshots/com/datasqrl/DAGPlannerTest/mutationInsertTypeTest.txt
@@ -60,7 +60,7 @@ Schema:
- machineid: INTEGER
- updatedTime: TIMESTAMP_LTZ(3) *ROWTIME*
Plan:
-LogicalWatermarkAssigner(rowtime=[updatedTime], watermark=[-($2, 15000:INTERVAL SECOND)])
+LogicalWatermarkAssigner(rowtime=[updatedTime], watermark=[-($2, 0:INTERVAL SECOND)])
LogicalProject(sensorid=[$0], machineid=[$1], updatedTime=[CAST($2):TIMESTAMP_LTZ(3) *ROWTIME*])
LogicalTableScan(table=[[default_catalog, default_database, Sensors, metadata=[timestamp]]])
SQL: CREATE VIEW `Sensors__view`
@@ -95,7 +95,7 @@ CREATE TABLE `Sensors` (
`machineid` INTEGER,
`updatedTime` TIMESTAMP_LTZ(3) METADATA FROM 'timestamp',
PRIMARY KEY (`sensorid`) NOT ENFORCED,
- WATERMARK FOR `updatedTime` AS `updatedTime` - INTERVAL '15.0' SECOND
+ WATERMARK FOR `updatedTime` AS `updatedTime` - INTERVAL '0.0' SECOND
) WITH (
'connector' = 'upsert-kafka',
'key.flexible-json.timestamp-format.standard' = 'ISO-8601',