From 71c150d4c3dbc6860f935bfeb2b592c25e3d9dd7 Mon Sep 17 00:00:00 2001 From: Alex Gittings Date: Mon, 8 Sep 2025 17:12:33 +0100 Subject: [PATCH 01/16] Add `create_diff` to the clients to manually create a diff between two times. (#531) * feat: add create_diff method and update get_diff_summary to accept name parameter * feat: enhance get_diff_summary to support time range filtering * fix: update action field in diff summary to reflect correct status * feat: add create_diff method and update get_diff_summary for time range support * feat: add create_diff method and enhance get_diff_summary to support time range filtering * fix: remove unnecessary blank lines in InfrahubClientSync class * feat: update create_diff method to validate time range and change return type --- changelog/529.added.md | 2 + infrahub_sdk/client.py | 78 ++++++++++++++++++++++++++++- infrahub_sdk/diff.py | 6 +-- tests/unit/sdk/test_diff_summary.py | 6 +-- 4 files changed, 84 insertions(+), 8 deletions(-) create mode 100644 changelog/529.added.md diff --git a/changelog/529.added.md b/changelog/529.added.md new file mode 100644 index 00000000..cb02de4f --- /dev/null +++ b/changelog/529.added.md @@ -0,0 +1,2 @@ +Add `create_diff` method to create a diff summary between two timestamps +Update `get_diff_summary` to accept optional time range parameters \ No newline at end of file diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 671a2f5f..18af1985 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -5,6 +5,7 @@ import logging import time from collections.abc import Coroutine, MutableMapping +from datetime import datetime from functools import wraps from time import sleep from typing import ( @@ -24,6 +25,7 @@ from .batch import InfrahubBatch, InfrahubBatchSync from .branch import ( + MUTATION_QUERY_TASK, BranchData, InfrahubBranchManager, InfrahubBranchManagerSync, @@ -1154,21 +1156,57 @@ async def query_gql_query( return decode_json(response=resp) + async def create_diff( + self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True + ) -> bool | str: + if from_time > to_time: + raise ValueError("from_time must be <= to_time") + input_data = { + "wait_until_completion": wait_until_completion, + "data": { + "name": name, + "branch": branch, + "from_time": from_time.isoformat(), + "to_time": to_time.isoformat(), + }, + } + + mutation_query = MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None} + query = Mutation(mutation="DiffUpdate", input_data=input_data, query=mutation_query) + response = await self.execute_graphql(query=query.render(), tracker="mutation-diff-update") + + if not wait_until_completion and "task" in response["DiffUpdate"]: + return response["DiffUpdate"]["task"]["id"] + + return response["DiffUpdate"]["ok"] + async def get_diff_summary( self, branch: str, + name: str | None = None, + from_time: datetime | None = None, + to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool = True, ) -> list[NodeDiff]: query = get_diff_summary_query() + input_data = {"branch_name": branch} + if name: + input_data["name"] = name + if from_time and to_time and from_time > to_time: + raise ValueError("from_time must be <= to_time") + if from_time: + input_data["from_time"] = from_time.isoformat() + if to_time: + input_data["to_time"] = to_time.isoformat() response = await self.execute_graphql( query=query, branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error, - variables={"branch_name": branch}, + variables=input_data, ) node_diffs: list[NodeDiff] = [] @@ -2293,21 +2331,57 @@ def query_gql_query( return decode_json(response=resp) + def create_diff( + self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True + ) -> bool | str: + if from_time > to_time: + raise ValueError("from_time must be <= to_time") + input_data = { + "wait_until_completion": wait_until_completion, + "data": { + "name": name, + "branch": branch, + "from_time": from_time.isoformat(), + "to_time": to_time.isoformat(), + }, + } + + mutation_query = MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None} + query = Mutation(mutation="DiffUpdate", input_data=input_data, query=mutation_query) + response = self.execute_graphql(query=query.render(), tracker="mutation-diff-update") + + if not wait_until_completion and "task" in response["DiffUpdate"]: + return response["DiffUpdate"]["task"]["id"] + + return response["DiffUpdate"]["ok"] + def get_diff_summary( self, branch: str, + name: str | None = None, + from_time: datetime | None = None, + to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool = True, ) -> list[NodeDiff]: query = get_diff_summary_query() + input_data = {"branch_name": branch} + if name: + input_data["name"] = name + if from_time and to_time and from_time > to_time: + raise ValueError("from_time must be <= to_time") + if from_time: + input_data["from_time"] = from_time.isoformat() + if to_time: + input_data["to_time"] = to_time.isoformat() response = self.execute_graphql( query=query, branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error, - variables={"branch_name": branch}, + variables=input_data, ) node_diffs: list[NodeDiff] = [] diff --git a/infrahub_sdk/diff.py b/infrahub_sdk/diff.py index c445000f..6b17b147 100644 --- a/infrahub_sdk/diff.py +++ b/infrahub_sdk/diff.py @@ -37,8 +37,8 @@ class NodeDiffPeer(TypedDict): def get_diff_summary_query() -> str: return """ - query GetDiffTree($branch_name: String!) { - DiffTree(branch: $branch_name) { + query GetDiffTree($branch_name: String!, $name: String, $from_time: DateTime, $to_time: DateTime) { + DiffTree(branch: $branch_name, name: $name, from_time: $from_time, to_time: $to_time) { nodes { uuid kind @@ -121,7 +121,7 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) -> branch=branch_name, kind=str(node_dict.get("kind")), id=str(node_dict.get("uuid")), - action=str(node_dict.get("action")), + action=str(node_dict.get("status")), display_label=str(node_dict.get("label")), elements=element_diffs, ) diff --git a/tests/unit/sdk/test_diff_summary.py b/tests/unit/sdk/test_diff_summary.py index 7a176b20..73832cab 100644 --- a/tests/unit/sdk/test_diff_summary.py +++ b/tests/unit/sdk/test_diff_summary.py @@ -109,7 +109,7 @@ async def test_diffsummary(clients: BothClients, mock_diff_tree_query, client_ty "branch": "branch2", "kind": "TestCar", "id": "17fbadf0-6637-4fa2-43e6-1677ea170e0f", - "action": "None", + "action": "UPDATED", "display_label": "nolt #444444", "elements": [ { @@ -124,7 +124,7 @@ async def test_diffsummary(clients: BothClients, mock_diff_tree_query, client_ty "branch": "branch2", "kind": "TestPerson", "id": "17fbadf0-634f-05a8-43e4-1677e744d4c0", - "action": "None", + "action": "UPDATED", "display_label": "Jane", "elements": [ { @@ -140,7 +140,7 @@ async def test_diffsummary(clients: BothClients, mock_diff_tree_query, client_ty "branch": "branch2", "kind": "TestPerson", "id": "17fbadf0-6243-5d3c-43ee-167718ff8dac", - "action": "None", + "action": "UPDATED", "display_label": "Jonathan", "elements": [ { From 565674fd2ed6d5e5e7fe15b5306b381cc14e5539 Mon Sep 17 00:00:00 2001 From: Alex Gittings Date: Tue, 9 Sep 2025 13:20:55 +0100 Subject: [PATCH 02/16] Fix issue with `render` ctl command not using branch environment variables (#536) * Fix branch handling in `_run_transform` and `execute_graphql_query` functions * Add changelog entry for fixing branch handling in `_run_transform` and `execute_graphql_query` functions * Remove unused import of `get_branch` from `utils.py` * Add jinja2 transform and GraphQL query for tags, and implement branch selection test * Update changelog to specify environment variable usage for branch management in `_run_transform` and `execute_graphql_query` functions. * Remove unused `get_branch` import and set default branch in `validate_graphql` function --- changelog/535.fixed.md | 1 + infrahub_sdk/ctl/cli_commands.py | 3 +- infrahub_sdk/ctl/utils.py | 4 ++ infrahub_sdk/ctl/validate.py | 8 ++-- .../repos/ctl_integration/.infrahub.yml | 6 +++ .../repos/ctl_integration/templates/tags.j2 | 1 + .../ctl_integration/templates/tags_query.gql | 11 +++++ tests/unit/ctl/test_render_app.py | 41 +++++++++++++++++++ 8 files changed, 70 insertions(+), 5 deletions(-) create mode 100644 changelog/535.fixed.md create mode 100644 tests/fixtures/repos/ctl_integration/templates/tags.j2 create mode 100644 tests/fixtures/repos/ctl_integration/templates/tags_query.gql diff --git a/changelog/535.fixed.md b/changelog/535.fixed.md new file mode 100644 index 00000000..fdbd499e --- /dev/null +++ b/changelog/535.fixed.md @@ -0,0 +1 @@ +Fix branch handling in `_run_transform` and `execute_graphql_query` functions in Infrahubctl to use environment variables for branch management. \ No newline at end of file diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index bc6cc3d3..91222785 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -46,7 +46,7 @@ from ..schema import MainSchemaTypesAll, SchemaRoot from ..template import Jinja2Template from ..template.exceptions import JinjaTemplateError -from ..utils import get_branch, write_to_file +from ..utils import write_to_file from ..yaml import SchemaFile from .exporter import dump from .importer import load @@ -208,7 +208,6 @@ async def _run_transform( debug: Prints debug info to the command line repository_config: Repository config object. This is used to load the graphql query from the repository. """ - branch = get_branch(branch) try: response = execute_graphql_query( diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py index 66f86865..074a066e 100644 --- a/infrahub_sdk/ctl/utils.py +++ b/infrahub_sdk/ctl/utils.py @@ -118,6 +118,10 @@ def execute_graphql_query( query_str = query_object.load_query() client = initialize_client_sync() + + if not branch: + branch = client.config.default_infrahub_branch + response = client.execute_graphql( query=query_str, branch_name=branch, diff --git a/infrahub_sdk/ctl/validate.py b/infrahub_sdk/ctl/validate.py index 99318239..d1715d9f 100644 --- a/infrahub_sdk/ctl/validate.py +++ b/infrahub_sdk/ctl/validate.py @@ -14,7 +14,7 @@ from ..ctl.exceptions import QueryNotFoundError from ..ctl.utils import catch_exception, find_graphql_query, parse_cli_vars from ..exceptions import GraphQLError -from ..utils import get_branch, write_to_file +from ..utils import write_to_file from ..yaml import SchemaFile from .parameters import CONFIG_PARAM from .utils import load_yamlfile_from_disk_and_exit @@ -68,8 +68,6 @@ def validate_graphql( ) -> None: """Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint""" - branch = get_branch(branch) - try: query_str = find_graphql_query(query) except QueryNotFoundError: @@ -81,6 +79,10 @@ def validate_graphql( variables_dict = parse_cli_vars(variables) client = initialize_client_sync() + + if not branch: + branch = client.config.default_infrahub_branch + try: response = client.execute_graphql( query=query_str, diff --git a/tests/fixtures/repos/ctl_integration/.infrahub.yml b/tests/fixtures/repos/ctl_integration/.infrahub.yml index 605cdff4..7d7d8682 100644 --- a/tests/fixtures/repos/ctl_integration/.infrahub.yml +++ b/tests/fixtures/repos/ctl_integration/.infrahub.yml @@ -26,7 +26,13 @@ generator_definitions: parameters: name: "name__value" +jinja2_transforms: + - name: tags + query: "tags_query" + template_path: "templates/tags.j2" queries: - name: animal_person file_path: queries/animal_person.gql + - name: tags_query + file_path: templates/tags_query.gql diff --git a/tests/fixtures/repos/ctl_integration/templates/tags.j2 b/tests/fixtures/repos/ctl_integration/templates/tags.j2 new file mode 100644 index 00000000..deeeb42c --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/templates/tags.j2 @@ -0,0 +1 @@ +{{ data['BuiltinTag']['edges'][0]['node']['name']['value'] }} \ No newline at end of file diff --git a/tests/fixtures/repos/ctl_integration/templates/tags_query.gql b/tests/fixtures/repos/ctl_integration/templates/tags_query.gql new file mode 100644 index 00000000..6d2ea6ab --- /dev/null +++ b/tests/fixtures/repos/ctl_integration/templates/tags_query.gql @@ -0,0 +1,11 @@ +query TagsQuery($name: String!) { + BuiltinTag(name__value: $name) { + edges { + node { + name { + value + } + } + } + } +} diff --git a/tests/unit/ctl/test_render_app.py b/tests/unit/ctl/test_render_app.py index dceba985..0589acda 100644 --- a/tests/unit/ctl/test_render_app.py +++ b/tests/unit/ctl/test_render_app.py @@ -73,3 +73,44 @@ def test_validate_template_not_found(test_case: RenderAppFailure, httpx_mock: HT output = runner.invoke(app, ["render", test_case.template, "name=red"]) assert test_case.error in strip_color(output.stdout) assert output.exit_code == 1 + + +@pytest.mark.parametrize( + "cli_branch,env_branch,from_git,expected_branch", + [ + ("cli-branch", None, False, "cli-branch"), + (None, "env-branch", False, "env-branch"), + (None, None, True, "git-branch"), + ], +) +@requires_python_310 +def test_render_branch_selection(monkeypatch, httpx_mock: HTTPXMock, cli_branch, env_branch, from_git, expected_branch): + """Test that the render command uses the correct branch source.""" + + if from_git: + monkeypatch.setattr("dulwich.porcelain.active_branch", lambda _: b"git-branch") + + httpx_mock.add_response( + method="POST", + url=f"http://mock/graphql/{expected_branch}", + json=json.loads( + read_fixture( + "red_tag.json", + "unit/test_infrahubctl/red_tags_query", + ) + ), + ) + + with temp_repo_and_cd(source_dir=FIXTURE_BASE_DIR / "ctl_integration"): + args = ["render", "tags", "name=red"] + env = {} + # Add test-specific variables + if cli_branch: + args.extend(["--branch", cli_branch]) + if env_branch: + env["INFRAHUB_DEFAULT_BRANCH"] = env_branch + env["INFRAHUB_DEFAULT_BRANCH_FROM_GIT"] = "false" + if from_git: + env["INFRAHUB_DEFAULT_BRANCH_FROM_GIT"] = "true" + output = runner.invoke(app, args, env=env) + assert output.exit_code == 0 From 8c266e7be5e8a59ab8fa7007713801b7f375da16 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Wed, 10 Sep 2025 20:30:48 +0200 Subject: [PATCH 03/16] Remove unnecessary assignments before `return` statement --- infrahub_sdk/branch.py | 8 ++------ infrahub_sdk/client.py | 6 ++---- infrahub_sdk/diff.py | 3 +-- infrahub_sdk/node/node.py | 6 ++---- infrahub_sdk/playback.py | 3 +-- infrahub_sdk/repository.py | 3 +-- infrahub_sdk/timestamp.py | 3 +-- pyproject.toml | 2 +- 8 files changed, 11 insertions(+), 23 deletions(-) diff --git a/infrahub_sdk/branch.py b/infrahub_sdk/branch.py index 2403e1ef..2b1905ce 100644 --- a/infrahub_sdk/branch.py +++ b/infrahub_sdk/branch.py @@ -188,9 +188,7 @@ async def all(self) -> dict[str, BranchData]: query = Query(name="GetAllBranch", query=QUERY_ALL_BRANCHES_DATA) data = await self.client.execute_graphql(query=query.render(), tracker="query-branch-all") - branches = {branch["name"]: BranchData(**branch) for branch in data["Branch"]} - - return branches + return {branch["name"]: BranchData(**branch) for branch in data["Branch"]} async def get(self, branch_name: str) -> BranchData: query = Query(name="GetBranch", query=QUERY_ONE_BRANCH_DATA, variables={"branch_name": str}) @@ -230,9 +228,7 @@ def all(self) -> dict[str, BranchData]: query = Query(name="GetAllBranch", query=QUERY_ALL_BRANCHES_DATA) data = self.client.execute_graphql(query=query.render(), tracker="query-branch-all") - branches = {branch["name"]: BranchData(**branch) for branch in data["Branch"]} - - return branches + return {branch["name"]: BranchData(**branch) for branch in data["Branch"]} def get(self, branch_name: str) -> BranchData: query = Query(name="GetBranch", query=QUERY_ONE_BRANCH_DATA, variables={"branch_name": str}) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 18af1985..6b27599a 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -307,8 +307,7 @@ def _initialize(self) -> None: async def get_version(self) -> str: """Return the Infrahub version.""" response = await self.execute_graphql(query="query { InfrahubInfo { version }}") - version = response.get("InfrahubInfo", {}).get("version", "") - return version + return response.get("InfrahubInfo", {}).get("version", "") async def get_user(self) -> dict: """Return user information""" @@ -1602,8 +1601,7 @@ def _initialize(self) -> None: def get_version(self) -> str: """Return the Infrahub version.""" response = self.execute_graphql(query="query { InfrahubInfo { version }}") - version = response.get("InfrahubInfo", {}).get("version", "") - return version + return response.get("InfrahubInfo", {}).get("version", "") def get_user(self) -> dict: """Return user information""" diff --git a/infrahub_sdk/diff.py b/infrahub_sdk/diff.py index 6b17b147..fad10080 100644 --- a/infrahub_sdk/diff.py +++ b/infrahub_sdk/diff.py @@ -117,7 +117,7 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) -> ) relationship_diff["peers"] = peer_diffs element_diffs.append(relationship_diff) - node_diff = NodeDiff( + return NodeDiff( branch=branch_name, kind=str(node_dict.get("kind")), id=str(node_dict.get("uuid")), @@ -125,4 +125,3 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) -> display_label=str(node_dict.get("label")), elements=element_diffs, ) - return node_diff diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index e6d129c3..72624467 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -579,8 +579,7 @@ async def artifact_fetch(self, name: str) -> str | dict[str, Any]: self._validate_artifact_support(ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE) artifact = await self._client.get(kind="CoreArtifact", name__value=name, object__ids=[self.id]) - content = await self._client.object_store.get(identifier=artifact._get_attribute(name="storage_id").value) - return content + return await self._client.object_store.get(identifier=artifact._get_attribute(name="storage_id").value) async def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None: input_data = {"data": {"id": self.id}} @@ -1208,8 +1207,7 @@ def artifact_generate(self, name: str) -> None: def artifact_fetch(self, name: str) -> str | dict[str, Any]: self._validate_artifact_support(ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE) artifact = self._client.get(kind="CoreArtifact", name__value=name, object__ids=[self.id]) - content = self._client.object_store.get(identifier=artifact._get_attribute(name="storage_id").value) - return content + return self._client.object_store.get(identifier=artifact._get_attribute(name="storage_id").value) def delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None: input_data = {"data": {"id": self.id}} diff --git a/infrahub_sdk/playback.py b/infrahub_sdk/playback.py index 0ec72e8c..c00badc5 100644 --- a/infrahub_sdk/playback.py +++ b/infrahub_sdk/playback.py @@ -56,5 +56,4 @@ def _read_request( with Path(f"{self.directory}/{filename}.json").open(encoding="utf-8") as fobj: data = ujson.load(fobj) - response = httpx.Response(status_code=data["status_code"], content=data["response_content"], request=request) - return response + return httpx.Response(status_code=data["status_code"], content=data["response_content"], request=request) diff --git a/infrahub_sdk/repository.py b/infrahub_sdk/repository.py index 9472c4fa..331d15f1 100644 --- a/infrahub_sdk/repository.py +++ b/infrahub_sdk/repository.py @@ -29,5 +29,4 @@ def initialize_repo(self) -> Repo: @property def active_branch(self) -> str | None: - active_branch = porcelain.active_branch(self.root_directory).decode("utf-8") - return active_branch + return porcelain.active_branch(self.root_directory).decode("utf-8") diff --git a/infrahub_sdk/timestamp.py b/infrahub_sdk/timestamp.py index a9a56278..578a149e 100644 --- a/infrahub_sdk/timestamp.py +++ b/infrahub_sdk/timestamp.py @@ -43,8 +43,7 @@ def obj(self) -> ZonedDateTime: @classmethod def _parse_string(cls, value: str) -> ZonedDateTime: try: - zoned_date = ZonedDateTime.parse_common_iso(value) - return zoned_date + return ZonedDateTime.parse_common_iso(value) except ValueError: pass diff --git a/pyproject.toml b/pyproject.toml index cb185950..0e29c57a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -193,7 +193,6 @@ ignore = [ "PLW1641", # Object does not implement `__hash__` method "PTH100", # `os.path.abspath()` should be replaced by `Path.resolve()` "PTH109", # `os.getcwd()` should be replaced by `Path.cwd()` - "RET504", # Unnecessary assignment to `data` before `return` statement "RUF005", # Consider `[*path, str(key)]` instead of concatenation "RUF015", # Prefer `next(iter(input_data["variables"].keys()))` over single element slice "RUF029", # Function is declared `async`, but doesn't `await` or use `async` features. @@ -273,6 +272,7 @@ max-complexity = 17 "ANN201", # ANN201 Missing return type annotation for public function "ANN202", # Missing return type annotation for private function "ANN204", # Missing return type annotation for special method + "RET504", # Unnecessary assignment to `data` before `return` statement ] "tests/unit/sdk/test_client.py" = [ From 30e661d1a6019adf157d28df0a1dc98c286a029b Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 13 Sep 2025 11:15:43 +0100 Subject: [PATCH 04/16] add livestream to docs --- docs/docs/python-sdk/introduction.mdx | 7 + docs/package-lock.json | 401 +++++++++++++++++++++- docs/package.json | 3 +- docs/src/components/VideoPlayer/index.tsx | 31 ++ 4 files changed, 440 insertions(+), 2 deletions(-) create mode 100644 docs/src/components/VideoPlayer/index.tsx diff --git a/docs/docs/python-sdk/introduction.mdx b/docs/docs/python-sdk/introduction.mdx index 1b737dca..26f945ce 100644 --- a/docs/docs/python-sdk/introduction.mdx +++ b/docs/docs/python-sdk/introduction.mdx @@ -1,9 +1,16 @@ --- title: Python SDK --- +import VideoPlayer from '../../src/components/VideoPlayer'; The Infrahub Python SDK greatly simplifies how you can interact with Infrahub programmatically. +## Videos + +
+ +
+ ## Blog posts - [Querying Data in Infrahub via the Python SDK](https://www.opsmill.com/querying-data-in-infrahub-via-the-python-sdk/) diff --git a/docs/package-lock.json b/docs/package-lock.json index 7fdd6241..35a180d1 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -15,7 +15,8 @@ "prism-react-renderer": "^2.3.0", "raw-loader": "^4.0.2", "react": "^18.0.0", - "react-dom": "^18.0.0" + "react-dom": "^18.0.0", + "react-player": "^3.3.2" }, "devDependencies": { "@docusaurus/module-type-aliases": "^3.8.1", @@ -4076,6 +4077,74 @@ "react": ">=16" } }, + "node_modules/@mux/mux-data-google-ima": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@mux/mux-data-google-ima/-/mux-data-google-ima-0.2.8.tgz", + "integrity": "sha512-0ZEkHdcZ6bS8QtcjFcoJeZxJTpX7qRIledf4q1trMWPznugvtajCjCM2kieK/pzkZj1JM6liDRFs1PJSfVUs2A==", + "license": "MIT", + "dependencies": { + "mux-embed": "5.9.0" + } + }, + "node_modules/@mux/mux-player": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@mux/mux-player/-/mux-player-3.6.0.tgz", + "integrity": "sha512-yVWmTMJUoKNZZxsINFmz7ZUUR3GC+Qf7b6Qv2GTmUoYn14pO1aXywHLlMLDohstLIvdeOdh6F/WsD2/gDVSOmQ==", + "license": "MIT", + "dependencies": { + "@mux/mux-video": "0.27.0", + "@mux/playback-core": "0.31.0", + "media-chrome": "~4.13.1", + "player.style": "^0.2.0" + } + }, + "node_modules/@mux/mux-player-react": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@mux/mux-player-react/-/mux-player-react-3.6.0.tgz", + "integrity": "sha512-bh2Z1fQqNkKCNUMS/3VU6jL2iY22155ZSIyizfz+bVX0EYHqdsS/iG95iDYLPlzA8WPyIh+J210tme68e1qP+w==", + "license": "MIT", + "dependencies": { + "@mux/mux-player": "3.6.0", + "@mux/playback-core": "0.31.0", + "prop-types": "^15.8.1" + }, + "peerDependencies": { + "@types/react": "^17.0.0 || ^17.0.0-0 || ^18 || ^18.0.0-0 || ^19 || ^19.0.0-0", + "react": "^17.0.2 || ^17.0.0-0 || ^18 || ^18.0.0-0 || ^19 || ^19.0.0-0", + "react-dom": "^17.0.2 || ^17.0.2-0 || ^18 || ^18.0.0-0 || ^19 || ^19.0.0-0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@mux/mux-video": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/@mux/mux-video/-/mux-video-0.27.0.tgz", + "integrity": "sha512-Oi142YAcPKrmHTG+eaWHWaE7ucMHeJwx1FXABbLM2hMGj9MQ7kYjsD5J3meFlvuyz5UeVDsPLHeUJgeBXUZovg==", + "license": "MIT", + "dependencies": { + "@mux/mux-data-google-ima": "0.2.8", + "@mux/playback-core": "0.31.0", + "castable-video": "~1.1.10", + "custom-media-element": "~1.4.5", + "media-tracks": "~0.3.3" + } + }, + "node_modules/@mux/playback-core": { + "version": "0.31.0", + "resolved": "https://registry.npmjs.org/@mux/playback-core/-/playback-core-0.31.0.tgz", + "integrity": "sha512-VADcrtS4O6fQBH8qmgavS6h7v7amzy2oCguu1NnLaVZ3Z8WccNXcF0s7jPRoRDyXWGShgtVhypW2uXjLpkPxyw==", + "license": "MIT", + "dependencies": { + "hls.js": "~1.6.6", + "mux-embed": "^5.8.3" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -4465,6 +4534,12 @@ "url": "https://github.com/sponsors/gregberge" } }, + "node_modules/@svta/common-media-library": { + "version": "0.12.4", + "resolved": "https://registry.npmjs.org/@svta/common-media-library/-/common-media-library-0.12.4.tgz", + "integrity": "sha512-9EuOoaNmz7JrfGwjsrD9SxF9otU5TNMnbLu1yU4BeLK0W5cDxVXXR58Z89q9u2AnHjIctscjMTYdlqQ1gojTuw==", + "license": "Apache-2.0" + }, "node_modules/@szmarczak/http-timer": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", @@ -4876,6 +4951,16 @@ "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", "license": "ISC" }, + "node_modules/@vimeo/player": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/@vimeo/player/-/player-2.29.0.tgz", + "integrity": "sha512-9JjvjeqUndb9otCCFd0/+2ESsLk7VkDE6sxOBy9iy2ukezuQbplVRi+g9g59yAurKofbmTi/KcKxBGO/22zWRw==", + "license": "MIT", + "dependencies": { + "native-promise-only": "0.8.1", + "weakmap-polyfill": "2.0.4" + } + }, "node_modules/@webassemblyjs/ast": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", @@ -5500,6 +5585,45 @@ "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==", "license": "MIT" }, + "node_modules/bcp-47": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/bcp-47/-/bcp-47-2.1.0.tgz", + "integrity": "sha512-9IIS3UPrvIa1Ej+lVDdDwO7zLehjqsaByECw0bu2RRGP73jALm6FYbzI5gWbgHLvNdkvfXB5YrSbocZdOS0c0w==", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/bcp-47-match": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz", + "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/bcp-47-normalize": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/bcp-47-normalize/-/bcp-47-normalize-2.3.0.tgz", + "integrity": "sha512-8I/wfzqQvttUFz7HVJgIZ7+dj3vUaIyIxYXaTRP1YWoSDfzt6TUmxaKZeuXR62qBmYr+nvuWINFRl6pZ5DlN4Q==", + "license": "MIT", + "dependencies": { + "bcp-47": "^2.0.0", + "bcp-47-match": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/big.js": { "version": "5.2.2", "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", @@ -5813,6 +5937,15 @@ ], "license": "CC-BY-4.0" }, + "node_modules/castable-video": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/castable-video/-/castable-video-1.1.10.tgz", + "integrity": "sha512-/T1I0A4VG769wTEZ8gWuy1Crn9saAfRTd1UYTb8xbOPlN78+zOi/1nU2dD5koNkfE5VWvgabkIqrGKmyNXOjSQ==", + "license": "MIT", + "dependencies": { + "custom-media-element": "~1.4.5" + } + }, "node_modules/ccount": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", @@ -5823,6 +5956,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/ce-la-react": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/ce-la-react/-/ce-la-react-0.3.1.tgz", + "integrity": "sha512-g0YwpZDPIwTwFumGTzNHcgJA6VhFfFCJkSNdUdC04br2UfU+56JDrJrJva3FZ7MToB4NDHAFBiPE/PZdNl1mQA==", + "license": "BSD-3-Clause", + "peerDependencies": { + "react": ">=17.0.0" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -6065,6 +6207,12 @@ "node": ">=6" } }, + "node_modules/cloudflare-video-element": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/cloudflare-video-element/-/cloudflare-video-element-1.3.4.tgz", + "integrity": "sha512-F9g+tXzGEXI6v6L48qXxr8vnR8+L6yy7IhpJxK++lpzuVekMHTixxH7/dzLuq6OacVGziU4RB5pzZYJ7/LYtJg==", + "license": "MIT" + }, "node_modules/clsx": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", @@ -6074,6 +6222,12 @@ "node": ">=6" } }, + "node_modules/codem-isoboxer": { + "version": "0.3.10", + "resolved": "https://registry.npmjs.org/codem-isoboxer/-/codem-isoboxer-0.3.10.tgz", + "integrity": "sha512-eNk3TRV+xQMJ1PEj0FQGY8KD4m0GPxT487XJ+Iftm7mVa9WpPFDMWqPt+46buiP5j5Wzqe5oMIhqBcAeKfygSA==", + "license": "MIT" + }, "node_modules/collapse-white-space": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-2.1.0.tgz", @@ -6914,6 +7068,40 @@ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", "license": "MIT" }, + "node_modules/custom-media-element": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/custom-media-element/-/custom-media-element-1.4.5.tgz", + "integrity": "sha512-cjrsQufETwxjvwZbYbKBCJNvmQ2++G9AvT45zDi7NXL9k2PdVcs2h0jQz96J6G4TMKRCcEsoJ+QTgQD00Igtjw==", + "license": "MIT" + }, + "node_modules/dash-video-element": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/dash-video-element/-/dash-video-element-0.1.6.tgz", + "integrity": "sha512-4gHShaQjcFv6diX5EzB6qAdUGKlIUGGZY8J8yp2pQkWqR0jX4c6plYy0cFraN7mr0DZINe8ujDN1fssDYxJjcg==", + "license": "MIT", + "dependencies": { + "custom-media-element": "^1.4.5", + "dashjs": "^5.0.3" + } + }, + "node_modules/dashjs": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/dashjs/-/dashjs-5.0.3.tgz", + "integrity": "sha512-TXndNnCUjFjF2nYBxDVba+hWRpVkadkQ8flLp7kHkem+5+wZTfRShJCnVkPUosmjS0YPE9fVNLbYPJxHBeQZvA==", + "license": "BSD-3-Clause", + "dependencies": { + "@svta/common-media-library": "^0.12.4", + "bcp-47-match": "^2.0.3", + "bcp-47-normalize": "^2.3.0", + "codem-isoboxer": "0.3.10", + "fast-deep-equal": "3.1.3", + "html-entities": "^2.5.2", + "imsc": "^1.1.5", + "localforage": "^1.10.0", + "path-browserify": "^1.0.1", + "ua-parser-js": "^1.0.37" + } + }, "node_modules/debounce": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz", @@ -8704,6 +8892,23 @@ "value-equal": "^1.0.1" } }, + "node_modules/hls-video-element": { + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/hls-video-element/-/hls-video-element-1.5.7.tgz", + "integrity": "sha512-R+uYimNZQndT2iqBgW7Gm0KiHT6pmlt5tnT63rYIcqOEcKD59M6pmdwqtX2vKPfHo+1ACM14Fy9JF1YMwlrLdQ==", + "license": "MIT", + "dependencies": { + "custom-media-element": "^1.4.5", + "hls.js": "^1.6.5", + "media-tracks": "^0.3.3" + } + }, + "node_modules/hls.js": { + "version": "1.6.12", + "resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.6.12.tgz", + "integrity": "sha512-Pz+7IzvkbAht/zXvwLzA/stUHNqztqKvlLbfpq6ZYU68+gZ+CZMlsbQBPUviRap+3IQ41E39ke7Ia+yvhsehEQ==", + "license": "Apache-2.0" + }, "node_modules/hoist-non-react-statics": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", @@ -9067,6 +9272,12 @@ "node": ">=16.x" } }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "license": "MIT" + }, "node_modules/import-fresh": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", @@ -9092,6 +9303,21 @@ "node": ">=8" } }, + "node_modules/imsc": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/imsc/-/imsc-1.1.5.tgz", + "integrity": "sha512-V8je+CGkcvGhgl2C1GlhqFFiUOIEdwXbXLiu1Fcubvvbo+g9inauqT3l0pNYXGoLPBj3jxtZz9t+wCopMkwadQ==", + "license": "BSD-2-Clause", + "dependencies": { + "sax": "1.2.1" + } + }, + "node_modules/imsc/node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==", + "license": "ISC" + }, "node_modules/imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", @@ -9662,6 +9888,15 @@ "node": ">=6" } }, + "node_modules/lie": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.1.1.tgz", + "integrity": "sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==", + "license": "MIT", + "dependencies": { + "immediate": "~3.0.5" + } + }, "node_modules/lilconfig": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", @@ -9703,6 +9938,15 @@ "node": ">=8.9.0" } }, + "node_modules/localforage": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/localforage/-/localforage-1.10.0.tgz", + "integrity": "sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==", + "license": "Apache-2.0", + "dependencies": { + "lie": "3.1.1" + } + }, "node_modules/locate-path": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", @@ -10233,6 +10477,21 @@ "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", "license": "CC0-1.0" }, + "node_modules/media-chrome": { + "version": "4.13.1", + "resolved": "https://registry.npmjs.org/media-chrome/-/media-chrome-4.13.1.tgz", + "integrity": "sha512-jPPwYrFkM4ky27/xNYEeyRPOBC7qvru4Oydy7vQHMHplXLQJmjtcauhlLPvG0O5kkYFEaOBXv5zGYes/UxOoVw==", + "license": "MIT", + "dependencies": { + "ce-la-react": "^0.3.0" + } + }, + "node_modules/media-tracks": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/media-tracks/-/media-tracks-0.3.3.tgz", + "integrity": "sha512-9P2FuUHnZZ3iji+2RQk7Zkh5AmZTnOG5fODACnjhCVveX1McY3jmCRHofIEI+yTBqplz7LXy48c7fQ3Uigp88w==", + "license": "MIT" + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -12213,6 +12472,12 @@ "multicast-dns": "cli.js" } }, + "node_modules/mux-embed": { + "version": "5.9.0", + "resolved": "https://registry.npmjs.org/mux-embed/-/mux-embed-5.9.0.tgz", + "integrity": "sha512-wmunL3uoPhma/tWy8PrDPZkvJpXvSFBwbD3KkC4PG8Ztjfb1X3hRJwGUAQyRz7z99b/ovLm2UTTitrkvStjH4w==", + "license": "MIT" + }, "node_modules/nanoid": { "version": "3.3.11", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", @@ -12231,6 +12496,12 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/native-promise-only": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", + "integrity": "sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==", + "license": "MIT" + }, "node_modules/negotiator": { "version": "0.6.4", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", @@ -12791,6 +13062,12 @@ "tslib": "^2.0.3" } }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "license": "MIT" + }, "node_modules/path-exists": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", @@ -12881,6 +13158,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/player.style": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/player.style/-/player.style-0.2.0.tgz", + "integrity": "sha512-Ngoaz49TClptMr8HDA2IFmjT3Iq6R27QEUH/C+On33L59RSF3dCLefBYB1Au2RDZQJ6oVFpc1sXaPVpp7fEzzA==", + "license": "MIT", + "workspaces": [ + ".", + "site", + "examples/*", + "scripts/*", + "themes/*" + ], + "dependencies": { + "media-chrome": "~4.13.0" + } + }, "node_modules/postcss": { "version": "8.5.6", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", @@ -14756,6 +15049,29 @@ "webpack": ">=4.41.1 || 5.x" } }, + "node_modules/react-player": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/react-player/-/react-player-3.3.2.tgz", + "integrity": "sha512-MBSCxTA1FPyMR19Wy+2LtVjguhrLl9p2l5nODY4fbumgsoaCEuhMLpZvxh8RWjzzvqL8V3jYcPfw/XhqrbTzFw==", + "license": "MIT", + "dependencies": { + "@mux/mux-player-react": "^3.5.1", + "cloudflare-video-element": "^1.3.3", + "dash-video-element": "^0.1.6", + "hls-video-element": "^1.5.6", + "spotify-audio-element": "^1.0.2", + "tiktok-video-element": "^0.1.0", + "twitch-video-element": "^0.1.2", + "vimeo-video-element": "^1.5.3", + "wistia-video-element": "^1.3.3", + "youtube-video-element": "^1.6.1" + }, + "peerDependencies": { + "@types/react": "^17.0.0 || ^18 || ^19", + "react": "^17.0.2 || ^18 || ^19", + "react-dom": "^17.0.2 || ^18 || ^19" + } + }, "node_modules/react-router": { "version": "5.3.4", "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz", @@ -16060,6 +16376,12 @@ "wbuf": "^1.7.3" } }, + "node_modules/spotify-audio-element": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/spotify-audio-element/-/spotify-audio-element-1.0.3.tgz", + "integrity": "sha512-I1/qD8cg/UnTlCIMiKSdZUJTyYfYhaqFK7LIVElc48eOqUUbVCaw1bqL8I6mJzdMJTh3eoNyF/ewvB7NoS/g9A==", + "license": "MIT" + }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", @@ -16250,6 +16572,12 @@ "postcss": "^8.4.31" } }, + "node_modules/super-media-element": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/super-media-element/-/super-media-element-1.4.2.tgz", + "integrity": "sha512-9pP/CVNp4NF2MNlRzLwQkjiTgKKe9WYXrLh9+8QokWmMxz+zt2mf1utkWLco26IuA3AfVcTb//qtlTIjY3VHxA==", + "license": "MIT" + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -16420,6 +16748,12 @@ "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", "license": "MIT" }, + "node_modules/tiktok-video-element": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tiktok-video-element/-/tiktok-video-element-0.1.1.tgz", + "integrity": "sha512-BaiVzvNz2UXDKTdSrXzrNf4q6Ecc+/utYUh7zdEu2jzYcJVDoqYbVfUl0bCfMoOeeAqg28vD/yN63Y3E9jOrlA==", + "license": "MIT" + }, "node_modules/tiny-invariant": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", @@ -16497,6 +16831,12 @@ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, + "node_modules/twitch-video-element": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/twitch-video-element/-/twitch-video-element-0.1.4.tgz", + "integrity": "sha512-SDpZ4f7sZmwHF6XG5PF0KWuP18pH/kNG04MhTcpqJby7Lk/D3TS/lCYd+RSg0rIAAVi1LDgSIo1yJs9kmHlhgw==", + "license": "MIT" + }, "node_modules/type-fest": { "version": "2.19.0", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", @@ -16566,6 +16906,32 @@ "node": ">=14.17" } }, + "node_modules/ua-parser-js": { + "version": "1.0.41", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.41.tgz", + "integrity": "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "license": "MIT", + "bin": { + "ua-parser-js": "script/cli.js" + }, + "engines": { + "node": "*" + } + }, "node_modules/undici-types": { "version": "7.10.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", @@ -17060,6 +17426,15 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/vimeo-video-element": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/vimeo-video-element/-/vimeo-video-element-1.5.5.tgz", + "integrity": "sha512-9QVvKPPnubMNeNYHY5KZqAYerVMuVG+7PSK+6IrEUD7a/wnCGtzb8Sfxl9qNxDAL6Q8i+p+5SDoVKobCd866vw==", + "license": "MIT", + "dependencies": { + "@vimeo/player": "2.29.0" + } + }, "node_modules/watchpack": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz", @@ -17082,6 +17457,15 @@ "minimalistic-assert": "^1.0.0" } }, + "node_modules/weakmap-polyfill": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/weakmap-polyfill/-/weakmap-polyfill-2.0.4.tgz", + "integrity": "sha512-ZzxBf288iALJseijWelmECm/1x7ZwQn3sMYIkDr2VvZp7r6SEKuT8D0O9Wiq6L9Nl5mazrOMcmiZE/2NCenaxw==", + "license": "MIT", + "engines": { + "node": ">=8.10.0" + } + }, "node_modules/web-namespaces": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", @@ -17483,6 +17867,15 @@ "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", "license": "MIT" }, + "node_modules/wistia-video-element": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/wistia-video-element/-/wistia-video-element-1.3.4.tgz", + "integrity": "sha512-2l22oaQe4jUfi3yvsh2m2oCEgvbqTzaSYx6aJnZAvV5hlMUJlyZheFUnaj0JU2wGlHdVGV7xNY+5KpKu+ruLYA==", + "license": "MIT", + "dependencies": { + "super-media-element": "~1.4.2" + } + }, "node_modules/wrap-ansi": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", @@ -17620,6 +18013,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/youtube-video-element": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/youtube-video-element/-/youtube-video-element-1.6.2.tgz", + "integrity": "sha512-YHDIOAqgRpfl1Ois9HcB8UFtWOxK8KJrV5TXpImj4BKYP1rWT04f/fMM9tQ9SYZlBKukT7NR+9wcI3UpB5BMDQ==", + "license": "MIT" + }, "node_modules/zwitch": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", diff --git a/docs/package.json b/docs/package.json index 0817379e..0dc1e714 100644 --- a/docs/package.json +++ b/docs/package.json @@ -22,7 +22,8 @@ "prism-react-renderer": "^2.3.0", "raw-loader": "^4.0.2", "react": "^18.0.0", - "react-dom": "^18.0.0" + "react-dom": "^18.0.0", + "react-player": "^3.3.2" }, "devDependencies": { "@docusaurus/module-type-aliases": "^3.8.1", diff --git a/docs/src/components/VideoPlayer/index.tsx b/docs/src/components/VideoPlayer/index.tsx new file mode 100644 index 00000000..b100cc12 --- /dev/null +++ b/docs/src/components/VideoPlayer/index.tsx @@ -0,0 +1,31 @@ +import React from 'react'; +import ReactPlayer from 'react-player'; + +interface VideoPlayerProps { + url: string; + light?: boolean; +} + +export default function VideoPlayer({ url, light = false }: VideoPlayerProps) { + return ( +
+
+ +
+
+ ); +} \ No newline at end of file From 4ec81022872f5e2d030cc31a9a1d5e3c2f83341a Mon Sep 17 00:00:00 2001 From: Bearchitek Date: Mon, 15 Sep 2025 16:14:54 +0200 Subject: [PATCH 05/16] fix issue with double quotes in HFID --- changelog/+escape-hfid.fixed.md | 1 + infrahub_sdk/graphql.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog/+escape-hfid.fixed.md diff --git a/changelog/+escape-hfid.fixed.md b/changelog/+escape-hfid.fixed.md new file mode 100644 index 00000000..b7621ef5 --- /dev/null +++ b/changelog/+escape-hfid.fixed.md @@ -0,0 +1 @@ +- Fixed issue with improperly escaped special characters in `hfid` fields and other string values in GraphQL mutations by implementing proper JSON-style string escaping \ No newline at end of file diff --git a/infrahub_sdk/graphql.py b/infrahub_sdk/graphql.py index cf48ad83..2610e8d1 100644 --- a/infrahub_sdk/graphql.py +++ b/infrahub_sdk/graphql.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json from enum import Enum from typing import Any @@ -18,7 +19,9 @@ def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str: return convert_to_graphql_as_string(value=value.value, convert_enum=True) return value.name if isinstance(value, str): - return f'"{value}"' + # Use json.dumps() to properly escape the string according to JSON rules, + # which are compatible with GraphQL string escaping + return json.dumps(value) if isinstance(value, bool): return repr(value).lower() if isinstance(value, list): From e7ed9811efea321419cdab1695f4c4e1b12dc7a0 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Tue, 16 Sep 2025 09:48:47 +0200 Subject: [PATCH 06/16] Format with ruff --- infrahub_sdk/client.py | 280 +++++++++++------------------------------ 1 file changed, 75 insertions(+), 205 deletions(-) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 2978496a..8189c1d5 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -95,9 +95,7 @@ class ProcessRelationsNodeSync(TypedDict): def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): # type: ignore[no-untyped-def] @wraps(func) - async def wrapper( - client: InfrahubClient, *args: Any, **kwargs: Any - ) -> httpx.Response: + async def wrapper(client: InfrahubClient, *args: Any, **kwargs: Any) -> httpx.Response: response = await func(client, *args, **kwargs) if response.status_code == 401: errors = response.json().get("errors", []) @@ -111,9 +109,7 @@ async def wrapper( def handle_relogin_sync(func: Callable[..., httpx.Response]): # type: ignore[no-untyped-def] @wraps(func) - def wrapper( - client: InfrahubClientSync, *args: Any, **kwargs: Any - ) -> httpx.Response: + def wrapper(client: InfrahubClientSync, *args: Any, **kwargs: Any) -> httpx.Response: response = func(client, *args, **kwargs) if response.status_code == 401: errors = response.json().get("errors", []) @@ -309,9 +305,7 @@ def _build_ip_prefix_allocation_query( input_data["prefix_length"] = prefix_length if member_type: if member_type not in ("prefix", "address"): - raise ValueError( - "member_type possible values are 'prefix' or 'address'" - ) + raise ValueError("member_type possible values are 'prefix' or 'address'") input_data["member_type"] = member_type if prefix_type: input_data["prefix_type"] = prefix_type @@ -345,12 +339,8 @@ def _initialize(self) -> None: self.object_store = ObjectStore(self) self.store = NodeStore(default_branch=self.default_branch) self.task = InfrahubTaskManager(self) - self.concurrent_execution_limit = asyncio.Semaphore( - self.max_concurrent_execution - ) - self._request_method: AsyncRequester = ( - self.config.requester or self._default_request_method - ) + self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution) + self._request_method: AsyncRequester = self.config.requester or self._default_request_method self.group_context = InfrahubGroupContext(self) async def get_version(self) -> str: @@ -365,9 +355,7 @@ async def get_user(self) -> dict: async def get_user_permissions(self) -> dict: """Return user permissions""" user_info = await self.get_user() - return get_user_permissions( - user_info["AccountProfile"]["member_of_groups"]["edges"] - ) + return get_user_permissions(user_info["AccountProfile"]["member_of_groups"]["edges"]) @overload async def create( @@ -402,13 +390,9 @@ async def create( if not data and not kwargs: raise ValueError("Either data or a list of keywords but be provided") - return InfrahubNode( - client=self, schema=schema, branch=branch, data=data or kwargs - ) + return InfrahubNode(client=self, schema=schema, branch=branch, data=data or kwargs) - async def delete( - self, kind: str | type[SchemaType], id: str, branch: str | None = None - ) -> None: + async def delete(self, kind: str | type[SchemaType], id: str, branch: str | None = None) -> None: branch = branch or self.default_branch schema = await self.schema.get(kind=kind, branch=branch) @@ -552,11 +536,7 @@ async def get( filters: MutableMapping[str, Any] = {} if id: - if ( - not is_valid_uuid(id) - and isinstance(schema, NodeSchemaAPI) - and schema.default_filter - ): + if not is_valid_uuid(id) and isinstance(schema, NodeSchemaAPI) and schema.default_filter: filters[schema.default_filter] = id else: filters["ids"] = [id] @@ -564,9 +544,7 @@ async def get( if isinstance(schema, NodeSchemaAPI) and schema.human_friendly_id: filters["hfid"] = hfid else: - raise ValueError( - "Cannot filter by HFID if the node doesn't have an HFID defined" - ) + raise ValueError("Cannot filter by HFID if the node doesn't have an HFID defined") if kwargs: filters.update(kwargs) if len(filters) == 0: @@ -587,9 +565,7 @@ async def get( ) if len(results) == 0 and raise_when_missing: - raise NodeNotFoundError( - branch_name=branch, node_type=schema.kind, identifier=filters - ) + raise NodeNotFoundError(branch_name=branch, node_type=schema.kind, identifier=filters) if len(results) == 0 and not raise_when_missing: return None if len(results) > 1: @@ -625,14 +601,10 @@ async def _process_nodes_and_relationships( related_nodes: list[InfrahubNode] = [] for item in response.get(schema_kind, {}).get("edges", []): - node = await InfrahubNode.from_graphql( - client=self, branch=branch, data=item, timeout=timeout - ) + node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships or ( - include and any(rel in include for rel in node._relationships) - ): + if prefetch_relationships or (include and any(rel in include for rel in node._relationships)): await node._process_relationships( node_data=item, branch=branch, @@ -858,13 +830,9 @@ async def filters( filters = kwargs pagination_size = self.pagination_size - async def process_page( - page_offset: int, page_number: int - ) -> tuple[dict, ProcessRelationsNode]: + async def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelationsNode]: """Process a single page of results.""" - query_data = await InfrahubNode( - client=self, schema=schema, branch=branch - ).generate_query_data( + query_data = await InfrahubNode(client=self, schema=schema, branch=branch).generate_query_data( offset=page_offset if offset is None else offset, limit=limit or pagination_size, filters=filters, @@ -885,15 +853,13 @@ async def process_page( timeout=timeout, ) - process_result: ProcessRelationsNode = ( - await self._process_nodes_and_relationships( - response=response, - schema_kind=schema.kind, - branch=branch, - prefetch_relationships=prefetch_relationships, - timeout=timeout, - include=include, - ) + process_result: ProcessRelationsNode = await self._process_nodes_and_relationships( + response=response, + schema_kind=schema.kind, + branch=branch, + prefetch_relationships=prefetch_relationships, + timeout=timeout, + include=include, ) return response, process_result @@ -902,16 +868,12 @@ async def process_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: nodes = [] related_nodes = [] batch_process = await self.create_batch() - count = await self.count( - kind=schema.kind, branch=branch, partial_match=partial_match, **filters - ) + count = await self.count(kind=schema.kind, branch=branch, partial_match=partial_match, **filters) total_pages = (count + pagination_size - 1) // pagination_size for page_number in range(1, total_pages + 1): page_offset = (page_number - 1) * pagination_size - batch_process.add( - task=process_page, page_offset=page_offset, page_number=page_number - ) + batch_process.add(task=process_page, page_offset=page_offset, page_number=page_number) async for _, response in batch_process.execute(): nodes.extend(response[1]["nodes"]) @@ -928,15 +890,11 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: while has_remaining_items: page_offset = (page_number - 1) * pagination_size - response, process_result = await process_page( - page_offset=page_offset, page_number=page_number - ) + response, process_result = await process_page(page_offset=page_offset, page_number=page_number) nodes.extend(process_result["nodes"]) related_nodes.extend(process_result["related_nodes"]) - remaining_items = response[schema.kind].get("count", 0) - ( - page_offset + pagination_size - ) + remaining_items = response[schema.kind].get("count", 0) - (page_offset + pagination_size) if remaining_items < 0 or offset is not None or limit is not None: has_remaining_items = False page_number += 1 @@ -944,9 +902,7 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]: return nodes, related_nodes # Select parallel or non-parallel processing - nodes, related_nodes = await ( - process_batch() if parallel else process_non_batch() - ) + nodes, related_nodes = await (process_batch() if parallel else process_non_batch()) if populate_store: for node in nodes: @@ -1012,9 +968,7 @@ async def execute_graphql( while retry and time.time() - start_time < self.config.max_retry_duration: retry = self.retry_on_failure try: - resp = await self._post( - url=url, payload=payload, headers=headers, timeout=timeout - ) + resp = await self._post(url=url, payload=payload, headers=headers, timeout=timeout) if raise_for_error in (None, True): resp.raise_for_status() @@ -1044,9 +998,7 @@ async def execute_graphql( response = decode_json(response=resp) if "errors" in response: - raise GraphQLError( - errors=response["errors"], query=query, variables=variables - ) + raise GraphQLError(errors=response["errors"], query=query, variables=variables) return response["data"] @@ -1081,9 +1033,7 @@ async def _post( ) @handle_relogin - async def _get( - self, url: str, headers: dict | None = None, timeout: int | None = None - ) -> httpx.Response: + async def _get(self, url: str, headers: dict | None = None, timeout: int | None = None) -> httpx.Response: """Execute a HTTP GET with HTTPX. Raises: @@ -1111,9 +1061,7 @@ async def _request( timeout: int, payload: dict | None = None, ) -> httpx.Response: - response = await self._request_method( - url=url, method=method, headers=headers, timeout=timeout, payload=payload - ) + response = await self._request_method(url=url, method=method, headers=headers, timeout=timeout, payload=payload) self._record(response) return response @@ -1135,16 +1083,12 @@ async def _default_request_method( elif self.config.proxy_mounts.is_set: proxy_config["mounts"] = { key: httpx.AsyncHTTPTransport(proxy=value) - for key, value in self.config.proxy_mounts.model_dump( - by_alias=True - ).items() + for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items() } async with httpx.AsyncClient( **proxy_config, - verify=self.config.tls_ca_file - if self.config.tls_ca_file - else not self.config.tls_insecure, + verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure, ) as client: try: response = await client.request( @@ -1303,15 +1247,9 @@ async def create_diff( }, } - mutation_query = ( - MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None} - ) - query = Mutation( - mutation="DiffUpdate", input_data=input_data, query=mutation_query - ) - response = await self.execute_graphql( - query=query.render(), tracker="mutation-diff-update" - ) + mutation_query = MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None} + query = Mutation(mutation="DiffUpdate", input_data=input_data, query=mutation_query) + response = await self.execute_graphql(query=query.render(), tracker="mutation-diff-update") if not wait_until_completion and "task" in response["DiffUpdate"]: return response["DiffUpdate"]["task"]["id"] @@ -1353,9 +1291,7 @@ async def get_diff_summary( if diff_tree is None or "nodes" not in diff_tree: return [] for node_dict in diff_tree["nodes"]: - node_diff = diff_tree_node_to_node_diff( - node_dict=node_dict, branch_name=branch - ) + node_diff = diff_tree_node_to_node_diff(node_dict=node_dict, branch_name=branch) node_diffs.append(node_diff) return node_diffs @@ -1501,9 +1437,7 @@ async def allocate_next_ip_address( if response[mutation_name]["ok"]: resource_details = response[mutation_name]["node"] - return await self.get( - kind=resource_details["kind"], id=resource_details["id"], branch=branch - ) + return await self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) return None @overload @@ -1656,9 +1590,7 @@ async def allocate_next_ip_prefix( if response[mutation_name]["ok"]: resource_details = response[mutation_name]["node"] - return await self.get( - kind=resource_details["kind"], id=resource_details["id"], branch=branch - ) + return await self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) return None async def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch: @@ -1753,9 +1685,7 @@ def _initialize(self) -> None: self.object_store = ObjectStoreSync(self) self.store = NodeStoreSync(default_branch=self.default_branch) self.task = InfrahubTaskManagerSync(self) - self._request_method: SyncRequester = ( - self.config.sync_requester or self._default_request_method - ) + self._request_method: SyncRequester = self.config.sync_requester or self._default_request_method self.group_context = InfrahubGroupContextSync(self) def get_version(self) -> str: @@ -1770,9 +1700,7 @@ def get_user(self) -> dict: def get_user_permissions(self) -> dict: """Return user permissions""" user_info = self.get_user() - return get_user_permissions( - user_info["AccountProfile"]["member_of_groups"]["edges"] - ) + return get_user_permissions(user_info["AccountProfile"]["member_of_groups"]["edges"]) @overload def create( @@ -1806,19 +1734,13 @@ def create( if not data and not kwargs: raise ValueError("Either data or a list of keywords but be provided") - return InfrahubNodeSync( - client=self, schema=schema, branch=branch, data=data or kwargs - ) + return InfrahubNodeSync(client=self, schema=schema, branch=branch, data=data or kwargs) - def delete( - self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None - ) -> None: + def delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None) -> None: branch = branch or self.default_branch schema = self.schema.get(kind=kind, branch=branch) - node = InfrahubNodeSync( - client=self, schema=schema, branch=branch, data={"id": id} - ) + node = InfrahubNodeSync(client=self, schema=schema, branch=branch, data={"id": id}) node.delete() def clone(self, branch: str | None = None) -> InfrahubClientSync: @@ -1876,9 +1798,7 @@ def execute_graphql( while retry and time.time() - start_time < self.config.max_retry_duration: retry = self.retry_on_failure try: - resp = self._post( - url=url, payload=payload, headers=headers, timeout=timeout - ) + resp = self._post(url=url, payload=payload, headers=headers, timeout=timeout) if raise_for_error in (None, True): resp.raise_for_status() @@ -1908,9 +1828,7 @@ def execute_graphql( response = decode_json(response=resp) if "errors" in response: - raise GraphQLError( - errors=response["errors"], query=query, variables=variables - ) + raise GraphQLError(errors=response["errors"], query=query, variables=variables) return response["data"] @@ -2069,14 +1987,10 @@ def _process_nodes_and_relationships( related_nodes: list[InfrahubNodeSync] = [] for item in response.get(schema_kind, {}).get("edges", []): - node = InfrahubNodeSync.from_graphql( - client=self, branch=branch, data=item, timeout=timeout - ) + node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships or ( - include and any(rel in include for rel in node._relationships) - ): + if prefetch_relationships or (include and any(rel in include for rel in node._relationships)): node._process_relationships( node_data=item, branch=branch, @@ -2177,13 +2091,9 @@ def filters( filters = kwargs pagination_size = self.pagination_size - def process_page( - page_offset: int, page_number: int - ) -> tuple[dict, ProcessRelationsNodeSync]: + def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelationsNodeSync]: """Process a single page of results.""" - query_data = InfrahubNodeSync( - client=self, schema=schema, branch=branch - ).generate_query_data( + query_data = InfrahubNodeSync(client=self, schema=schema, branch=branch).generate_query_data( offset=page_offset if offset is None else offset, limit=limit or pagination_size, filters=filters, @@ -2204,15 +2114,13 @@ def process_page( tracker=f"query-{str(schema.kind).lower()}-page{page_number}", ) - process_result: ProcessRelationsNodeSync = ( - self._process_nodes_and_relationships( - response=response, - schema_kind=schema.kind, - branch=branch, - prefetch_relationships=prefetch_relationships, - timeout=timeout, - include=include, - ) + process_result: ProcessRelationsNodeSync = self._process_nodes_and_relationships( + response=response, + schema_kind=schema.kind, + branch=branch, + prefetch_relationships=prefetch_relationships, + timeout=timeout, + include=include, ) return response, process_result @@ -2222,16 +2130,12 @@ def process_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]: related_nodes = [] batch_process = self.create_batch() - count = self.count( - kind=schema.kind, branch=branch, partial_match=partial_match, **filters - ) + count = self.count(kind=schema.kind, branch=branch, partial_match=partial_match, **filters) total_pages = (count + pagination_size - 1) // pagination_size for page_number in range(1, total_pages + 1): page_offset = (page_number - 1) * pagination_size - batch_process.add( - task=process_page, page_offset=page_offset, page_number=page_number - ) + batch_process.add(task=process_page, page_offset=page_offset, page_number=page_number) for _, response in batch_process.execute(): nodes.extend(response[1]["nodes"]) @@ -2239,9 +2143,7 @@ def process_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]: return nodes, related_nodes - def process_non_batch() -> tuple[ - list[InfrahubNodeSync], list[InfrahubNodeSync] - ]: + def process_non_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]: """Process queries without parallel mode.""" nodes = [] related_nodes = [] @@ -2250,16 +2152,12 @@ def process_non_batch() -> tuple[ while has_remaining_items: page_offset = (page_number - 1) * pagination_size - response, process_result = process_page( - page_offset=page_offset, page_number=page_number - ) + response, process_result = process_page(page_offset=page_offset, page_number=page_number) nodes.extend(process_result["nodes"]) related_nodes.extend(process_result["related_nodes"]) - remaining_items = response[schema.kind].get("count", 0) - ( - page_offset + pagination_size - ) + remaining_items = response[schema.kind].get("count", 0) - (page_offset + pagination_size) if remaining_items < 0 or offset is not None or limit is not None: has_remaining_items = False page_number += 1 @@ -2416,11 +2314,7 @@ def get( filters: MutableMapping[str, Any] = {} if id: - if ( - not is_valid_uuid(id) - and isinstance(schema, NodeSchemaAPI) - and schema.default_filter - ): + if not is_valid_uuid(id) and isinstance(schema, NodeSchemaAPI) and schema.default_filter: filters[schema.default_filter] = id else: filters["ids"] = [id] @@ -2428,9 +2322,7 @@ def get( if isinstance(schema, NodeSchemaAPI) and schema.human_friendly_id: filters["hfid"] = hfid else: - raise ValueError( - "Cannot filter by HFID if the node doesn't have an HFID defined" - ) + raise ValueError("Cannot filter by HFID if the node doesn't have an HFID defined") if kwargs: filters.update(kwargs) if len(filters) == 0: @@ -2451,9 +2343,7 @@ def get( ) if len(results) == 0 and raise_when_missing: - raise NodeNotFoundError( - branch_name=branch, node_type=schema.kind, identifier=filters - ) + raise NodeNotFoundError(branch_name=branch, node_type=schema.kind, identifier=filters) if len(results) == 0 and not raise_when_missing: return None if len(results) > 1: @@ -2564,15 +2454,9 @@ def create_diff( }, } - mutation_query = ( - MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None} - ) - query = Mutation( - mutation="DiffUpdate", input_data=input_data, query=mutation_query - ) - response = self.execute_graphql( - query=query.render(), tracker="mutation-diff-update" - ) + mutation_query = MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None} + query = Mutation(mutation="DiffUpdate", input_data=input_data, query=mutation_query) + response = self.execute_graphql(query=query.render(), tracker="mutation-diff-update") if not wait_until_completion and "task" in response["DiffUpdate"]: return response["DiffUpdate"]["task"]["id"] @@ -2614,9 +2498,7 @@ def get_diff_summary( if diff_tree is None or "nodes" not in diff_tree: return [] for node_dict in diff_tree["nodes"]: - node_diff = diff_tree_node_to_node_diff( - node_dict=node_dict, branch_name=branch - ) + node_diff = diff_tree_node_to_node_diff(node_dict=node_dict, branch_name=branch) node_diffs.append(node_diff) return node_diffs @@ -2762,9 +2644,7 @@ def allocate_next_ip_address( if response[mutation_name]["ok"]: resource_details = response[mutation_name]["node"] - return self.get( - kind=resource_details["kind"], id=resource_details["id"], branch=branch - ) + return self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) return None @overload @@ -2917,9 +2797,7 @@ def allocate_next_ip_prefix( if response[mutation_name]["ok"]: resource_details = response[mutation_name]["node"] - return self.get( - kind=resource_details["kind"], id=resource_details["id"], branch=branch - ) + return self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) return None def repository_update_commit( @@ -2934,9 +2812,7 @@ def repository_update_commit( ) @handle_relogin_sync - def _get( - self, url: str, headers: dict | None = None, timeout: int | None = None - ) -> httpx.Response: + def _get(self, url: str, headers: dict | None = None, timeout: int | None = None) -> httpx.Response: """Execute a HTTP GET with HTTPX. Raises: @@ -2992,9 +2868,7 @@ def _request( timeout: int, payload: dict | None = None, ) -> httpx.Response: - response = self._request_method( - url=url, method=method, headers=headers, timeout=timeout, payload=payload - ) + response = self._request_method(url=url, method=method, headers=headers, timeout=timeout, payload=payload) self._record(response) return response @@ -3017,16 +2891,12 @@ def _default_request_method( elif self.config.proxy_mounts.is_set: proxy_config["mounts"] = { key: httpx.HTTPTransport(proxy=value) - for key, value in self.config.proxy_mounts.model_dump( - by_alias=True - ).items() + for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items() } with httpx.Client( **proxy_config, - verify=self.config.tls_ca_file - if self.config.tls_ca_file - else not self.config.tls_insecure, + verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure, ) as client: try: response = client.request( From c111658dd8c980683b3930562751a798cfcdeb65 Mon Sep 17 00:00:00 2001 From: Babatunde Olusola Date: Wed, 17 Sep 2025 12:36:18 +0100 Subject: [PATCH 07/16] IFC-1811: Replace toml package with tomllib and tomli optionally (#551) * Replace toml package with tomllib and tomli optionally * fix for mypy errors * update config, add pre-commit config * Make tomli package optional * Add towncrier housekeeping message * update python to uppercase p --- .github/workflows/ci.yml | 14 ++- .pre-commit-config.yaml | 22 +++++ changelog/528.housekeeping.md | 1 + docs/docusaurus.config.ts | 2 +- infrahub_sdk/ctl/config.py | 9 +- poetry.lock | 179 ++++++++-------------------------- pyproject.toml | 7 +- 7 files changed, 86 insertions(+), 148 deletions(-) create mode 100644 .pre-commit-config.yaml create mode 100644 changelog/528.housekeeping.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3a62338..8217b2e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -146,7 +146,7 @@ jobs: - name: "Install dependencies" run: npm install - name: "Setup Python environment" - run: "pip install invoke toml" + run: "pip install invoke" - name: "Build docs website" run: "invoke docs" @@ -176,7 +176,7 @@ jobs: - name: "Install dependencies" run: "poetry install --no-interaction --no-ansi --extras ctl" - name: "Setup environment" - run: "pip install invoke toml" + run: "poetry run pip install invoke" - name: "Validate generated documentation" run: "poetry run invoke docs-validate" @@ -236,7 +236,11 @@ jobs: run: | pipx install poetry==${{ needs.prepare-environment.outputs.POETRY_VERSION }} --python python${{ matrix.python-version }} poetry config virtualenvs.create true --local - pip install invoke toml codecov + pip install invoke codecov + - name: "Install tomli for Python < 3.11" + if: matrix.python-version == '3.9' || matrix.python-version == '3.10' + run: | + pip install tomli - name: "Install Package" run: "poetry install --all-extras" - name: "Mypy Tests" @@ -289,7 +293,7 @@ jobs: run: | pipx install poetry==${{ needs.prepare-environment.outputs.POETRY_VERSION }} poetry config virtualenvs.create true --local - pip install invoke toml codecov + pip install invoke codecov - name: "Install Package" run: "poetry install --all-extras" - name: "Integration Tests" @@ -362,7 +366,7 @@ jobs: # run: | # pipx install poetry==${{ needs.prepare-environment.outputs.POETRY_VERSION }} # poetry config virtualenvs.create true --local - # pip install invoke toml codecov + # pip install invoke codecov # - name: "Install Package" # run: "poetry install --all-extras" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..537c7969 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +--- +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: check-ast + - id: check-case-conflict + - id: check-merge-conflict + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.11.9 + hooks: + # Run the linter. + - id: ruff + args: [--fix] + # Run the formatter. + - id: ruff-format diff --git a/changelog/528.housekeeping.md b/changelog/528.housekeeping.md new file mode 100644 index 00000000..fdf10b4e --- /dev/null +++ b/changelog/528.housekeeping.md @@ -0,0 +1 @@ +Replace toml package with tomllib and tomli optionally for when Python version is less than 3.11 diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index 977e3db1..e5792e6d 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -110,7 +110,7 @@ const config: Config = { additionalLanguages: ["bash", "python", "markup-templating", "django", "json", "toml", "yaml"], }, } satisfies Preset.ThemeConfig, - + markdown: { format: "mdx", preprocessor: ({ filePath, fileContent }) => { diff --git a/infrahub_sdk/ctl/config.py b/infrahub_sdk/ctl/config.py index 9d3b6488..2f65f4c3 100644 --- a/infrahub_sdk/ctl/config.py +++ b/infrahub_sdk/ctl/config.py @@ -2,13 +2,18 @@ from __future__ import annotations +import sys from pathlib import Path -import toml import typer from pydantic import Field, ValidationError, field_validator from pydantic_settings import BaseSettings, SettingsConfigDict +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + DEFAULT_CONFIG_FILE = "infrahubctl.toml" ENVVAR_CONFIG_FILE = "INFRAHUBCTL_CONFIG" INFRAHUB_REPO_CONFIG_FILE = ".infrahub.yml" @@ -59,7 +64,7 @@ def load(self, config_file: str | Path = "infrahubctl.toml", config_data: dict | if config_file.is_file(): config_string = config_file.read_text(encoding="utf-8") - config_tmp = toml.loads(config_string) + config_tmp = tomllib.loads(config_string) self._settings = Settings(**config_tmp) return diff --git a/poetry.lock b/poetry.lock index fb547d71..a952e6ce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -32,7 +32,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] [[package]] @@ -82,8 +82,8 @@ files = [ six = ">=1.12.0" [package.extras] -astroid = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\""] -test = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\"", "pytest"] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "async-timeout" @@ -92,7 +92,7 @@ description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version == \"3.10\"" +markers = "python_version >= \"3.10\" and python_version < \"3.11\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -112,12 +112,12 @@ files = [ ] [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "cachetools" @@ -296,7 +296,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "extra == \"ctl\" or extra == \"all\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\" or python_version >= \"3.10\""} +markers = {main = "extra == \"ctl\" or sys_platform == \"win32\" or extra == \"all\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\" or python_version >= \"3.10\""} [[package]] name = "coolname" @@ -427,7 +427,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "dateparser" @@ -612,7 +612,7 @@ description = "Like `typing._eval_type`, but lets older Python versions use newe optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version == \"3.9\"" +markers = "python_version < \"3.10\"" files = [ {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, @@ -665,7 +665,7 @@ files = [ ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "fastapi" @@ -705,7 +705,7 @@ files = [ [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] -typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "fsspec" @@ -745,7 +745,7 @@ smb = ["smbprotocol"] ssh = ["paramiko"] test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard ; python_version < \"3.14\""] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] tqdm = ["tqdm"] [[package]] @@ -891,7 +891,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -972,12 +972,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -987,7 +987,7 @@ description = "Read resources from Python packages" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version == \"3.9\"" +markers = "python_version < \"3.10\"" files = [ {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, @@ -997,7 +997,7 @@ files = [ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -1705,7 +1705,7 @@ python-dateutil = ">=2.6" tzdata = ">=2020.1" [package.extras] -test = ["time-machine (>=2.6.0) ; implementation_name != \"pypy\""] +test = ["time-machine (>=2.6.0)"] [[package]] name = "pexpect" @@ -2029,7 +2029,7 @@ typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" @@ -2161,11 +2161,11 @@ pydantic = ">=2.5.2" typing-extensions = "*" [package.extras] -all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<10)", "pycountry (>=23)", "pymongo (>=4.0.0,<5.0.0)", "python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<4) ; python_version >= \"3.9\"", "pytz (>=2024.1)", "semver (>=3.0.2)", "semver (>=3.0.2,<3.1.0)", "tzdata (>=2024.1)"] +all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<10)", "pycountry (>=23)", "pymongo (>=4.0.0,<5.0.0)", "python-ulid (>=1,<2)", "python-ulid (>=1,<4)", "pytz (>=2024.1)", "semver (>=3.0.2)", "semver (>=3.0.2,<3.1.0)", "tzdata (>=2024.1)"] pendulum = ["pendulum (>=3.0.0,<4.0.0)"] phonenumbers = ["phonenumbers (>=8,<10)"] pycountry = ["pycountry (>=23)"] -python-ulid = ["python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<4) ; python_version >= \"3.9\""] +python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<4)"] semver = ["semver (>=3.0.2)"] [[package]] @@ -2390,7 +2390,7 @@ async-timeout = {version = ">=4.0", optional = true, markers = "python_version < [package.extras] anyio = ["anyio (>=3.3.4,<5.0.0)"] -asyncio = ["async-timeout (>=4.0) ; python_version < \"3.11\""] +asyncio = ["async-timeout (>=4.0)"] curio = ["curio (>=1.4)"] trio = ["trio (>=0.24)"] @@ -2883,7 +2883,7 @@ description = "C version of reader, parser and emitter for ruamel.yaml derived f optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and platform_python_implementation == \"CPython\"" files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, @@ -2891,6 +2891,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, @@ -2899,6 +2900,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, @@ -2907,6 +2909,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, @@ -2915,6 +2918,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, @@ -2923,6 +2927,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, @@ -3106,12 +3111,12 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -groups = ["main", "dev"] +groups = ["dev"] +markers = "python_version >= \"3.10\"" files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -markers = {main = "extra == \"ctl\" or extra == \"all\"", dev = "extra == \"ctl\" or extra == \"all\" or python_version >= \"3.10\""} [[package]] name = "tomli" @@ -3207,18 +3212,6 @@ files = [ {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, ] -[[package]] -name = "types-toml" -version = "0.10.8.20240310" -description = "Typing stubs for toml" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, - {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, -] - [[package]] name = "types-ujson" version = "5.10.0.20240515" @@ -3269,7 +3262,7 @@ files = [ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] -markers = {main = "sys_platform == \"win32\"", dev = "(platform_system == \"Windows\" or sys_platform == \"win32\" or python_version < \"3.13\") and python_version >= \"3.10\""} +markers = {main = "sys_platform == \"win32\"", dev = "python_version >= \"3.10\" and python_version < \"3.13\" or python_version >= \"3.10\" and platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "tzlocal" @@ -3391,7 +3384,7 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -3415,7 +3408,7 @@ h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "virtualenv" @@ -3436,7 +3429,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "wcwidth" @@ -3531,91 +3524,6 @@ files = [ {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] -[[package]] -name = "whenever" -version = "0.7.2" -description = "Modern datetime library for Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version <= \"3.11\"" -files = [ - {file = "whenever-0.7.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a87864d3e7679dbedc55d3aa8c6cef5ffdc45520e16805f4c5a3cf71241fb986"}, - {file = "whenever-0.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f37dc37d1bea611af16a3aaba5960038604ddfb4a592b1d72a3efccd5853b6da"}, - {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3da602b9fb80f8c6495e0495638c54a8b9a43362769199fcfe4e4fc6df33697"}, - {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7afaeaada1d244016ce38252f0c0340bd7d199b4a240ba986efaab66b02f2"}, - {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f71fd077601c27830e202ed652bd89b46ae6f1ba0f96d29897038dae9c80eead"}, - {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40c74e8f0f3a9a540f580d44a22f2f9dc54b17b68d64abb1c0c961ab1343d43b"}, - {file = "whenever-0.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de72b15de311b28e6fcdb45bd436fbb0bde0d4596e0c446f9301bb523b6f2369"}, - {file = "whenever-0.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a84ff30d230e56250f89e99f5442d51a5215e10f6b7902d0d7ec51d8b06b6b2"}, - {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2af28fa6c8446f513ed3c71275349831e79df021dadb0051fb5b6cbd353d16d6"}, - {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:7f65c163f80b397f532d6dd9f56ead5b5d8b76bc24b1587dbb152bb466bd7de0"}, - {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ef6c83a20b9ccfe10623596dda19d666cc95c0e83260a6568d767bc926da3781"}, - {file = "whenever-0.7.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6322b78dd97b295164f0d59115be71e9242f74c100899736876b1e8f19b2ff0f"}, - {file = "whenever-0.7.2-cp310-cp310-win32.whl", hash = "sha256:b9a2fc32a8914771d994d6349dcf25208c82d0eb6cf33f27b2309d9e8f58a51a"}, - {file = "whenever-0.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:1440b8e1ef507c318a741bede7a43263f84909c43cf48f110de509233b89d77c"}, - {file = "whenever-0.7.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0b5aaa62551213b3b099b460331fce75c7dbabc2f6696fe3be845cb4ecc8a856"}, - {file = "whenever-0.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4720cc7bf704e92b89bf60329f21084256b4b4a9dcc47a782461f7918d7e1fb"}, - {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91a18c81e517124463200b7fcde40ddcc18c959791b219dd681dc5fdec04f050"}, - {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:85ef3850f6a9ce3d5349a4f5a1d7fda14c68d3f18c0d18a890bcb11955709a8c"}, - {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af47e5ff5d8528a7149f253276e1094bb944335074241d7e9f6c26ea12aa9ac"}, - {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389afeb92b6272f35132a428884ba03f52ca5a9e80c1b28e0f9699f6098abf34"}, - {file = "whenever-0.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:825a9567ba0b91f1e970cd59f0bbf7b6c2c12c41621fd3264e2d1a0f596c3efe"}, - {file = "whenever-0.7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d303cb0c691784219b7539e537167ea573cf58acc42696159585d27dacd10af"}, - {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6194cf9bf76cb0e3c8593d757b73b41cb33c1137ce1a79795812d43be8a29a95"}, - {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:59c0fb56aed72a0ec10a83b99f8eee2e96e4b32045e4ecfe85027129295cde6a"}, - {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f50068f98c85706e384a84e6a6e5d0d38760bbcb770fbd140596d2228f101c2e"}, - {file = "whenever-0.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:993c98e9956597b61e21c3c65da8d5e9eb342fe6c6efc2135432be56aa64a116"}, - {file = "whenever-0.7.2-cp311-cp311-win32.whl", hash = "sha256:e1d0ea62becd437ae9c911303cbcc5ba66107a79c9e60a4e0f965537878a3c77"}, - {file = "whenever-0.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:c70a6ab84a4d7bb44e86fa9ebec2ea36a456457d211dcb48f16f54487774ec45"}, - {file = "whenever-0.7.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:acaedfa0327e8859c078c40c2e17a3d169ce9f784c3735c09fd701d4035b7432"}, - {file = "whenever-0.7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38de1c34ab4e42eda4006e8635cadc0c526094a546aa5ebf6a903c61d33053f3"}, - {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e62291c4a0f212a13053f021b9255e0b820e57303c96e94b48304b84a1849d"}, - {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cbf68b2833e6766fb4898ebe432406ce6ead7ac846f7b15427bfbd560d5939"}, - {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2220cf0d818d960d4a7ec1b05ffbed7b81e482807be0b4bb7a5466418a4c8f79"}, - {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7395c69109113eb1666bac29b6207caf28e38e25d332c57649a7e710f0d863db"}, - {file = "whenever-0.7.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efcbffe9a510f310f019fe5bfe877e591ea8cdad90ac8fe6868a80659d411ac5"}, - {file = "whenever-0.7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2d836ad37f4333e938779eae6e64f532f27ce19529ee9c09bfb62f796e41db1"}, - {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:59f1949d1efe4a85cfe81130159dc2c871ea5b56bae6e9782d5e344a747a758e"}, - {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:030836f2cb15eb33631c3d3c2f904d481edc797df063814f9c77d060db5db17d"}, - {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b25d8fd6ade64cf1492707c019cccb726aa07dfb20f79a4751eccb56555c2012"}, - {file = "whenever-0.7.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:247e0255c6c8ded48a6d3734aabf448f2bf07bb2abb65b2828104df1eaab82cf"}, - {file = "whenever-0.7.2-cp312-cp312-win32.whl", hash = "sha256:81fcef2c6917333d3aa8d24043e01323d8831c1354cabcb935e29b2a1f6a7a4f"}, - {file = "whenever-0.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:2cdb4ddd2b2e908a076232a60577e4616096d4cf166da9373c4a03bf9d81721e"}, - {file = "whenever-0.7.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:c35f47f613a7816d602fd39594400bfe7fff70a3bd7272cd9b8c736ffc13feed"}, - {file = "whenever-0.7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0cb7515e180aa2fea6c2d1855607011dd08d14acaba750b0673d7d6f536b0f5e"}, - {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac40aed9be0bc1aeba2662e17f145987f84e8a0bafbfa5f938b40db82fc7aba"}, - {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccd0840d04648dad3c5ae81a53a56e08a971a316d4167921665a7aa5fa8f0085"}, - {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf5199ffa1da783207b0c75d478ab6d808309cc0cbb2631640393bd943b6167e"}, - {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7be787eeb542f86cc2d0081c541e89e4417261976a50a7824f6e43248fadb294"}, - {file = "whenever-0.7.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d78e13c648ce246dbaa54f78faf1d2f3d8107619f3c598d3d127ca45fd5d792a"}, - {file = "whenever-0.7.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cc490b577f38bb55957e04d6a1b594c5365f01a6f3429c38b26243d3cf473d80"}, - {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bf322daa4184e7d89a4549498c8408e6c4a0bd2309eacd4b21151020bf51870c"}, - {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:00b60a9af13e4c6b618f52a55ae7c15c36eb3ff42bfc6cb050981e8a2402bc9f"}, - {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a2f82fd85a6521090d3f44412f4c76687a0e141df215541f6f0f6691276257e7"}, - {file = "whenever-0.7.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a386df7b5e08f56a49f8a00991e54c3f5ebb218570d7a98c726d793859a2b0ea"}, - {file = "whenever-0.7.2-cp313-cp313-win32.whl", hash = "sha256:46e51abd495c91fd586828401884750d7eb96ca3658d3d9f228f62beb140c758"}, - {file = "whenever-0.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:af67395516ed16a8423735a4dd5a8795353f39e758b7428178dbe8de06977f21"}, - {file = "whenever-0.7.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a8b25304ffc9563bf17914a9a9bf6642456923c727d330fcfa483d303f549805"}, - {file = "whenever-0.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2978fb80700e583e2f957cd47c51d6b161f38a50b85a1744fcf3b13e53acf113"}, - {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:246ce04d18469169582cd492b6a4f74f6c166ed2caa869679522b02228c0bbf8"}, - {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d464feea543f36dd712eee0f47ea690cf1a4d474c39ddaafe30254434ac9b2e"}, - {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:253460d1194a1dcb27a47a0c6cead61cbf0a29d5bb795e7f42caa0e7be32cae9"}, - {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a6b35953ca90ef5f0f2a7f3e951d110239fcccde5eccf08c4a0872821d41066"}, - {file = "whenever-0.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6c9bb2528c345d552e0e25ab82276dd9765185718dfdf2654f0d84771eb3fa9"}, - {file = "whenever-0.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bc0a7e6e5bfa15531910ca4a062fdc20c071747f016599999eac3d8fef7ea4db"}, - {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8c16c03a556819c8f1738dbcfa2793c8c0d2a9a496e0ec1524fea8a124d20037"}, - {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:43c2f1be68f638c7f3f27c60e5851b5b94aa3ba0186e84bc2010c880e71f7f84"}, - {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:715c4da7fbef766bfb5511017782873c98adac9f5f982806ead9b4a99f7bb086"}, - {file = "whenever-0.7.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3daadd03d392048a4041969132ae2a6b57941b172870c526b14c8343721967d"}, - {file = "whenever-0.7.2-cp39-cp39-win32.whl", hash = "sha256:7b3c1d9ec5dc844686aad66bb0e14dda7d9667a113757c1f566a8e8036e4585f"}, - {file = "whenever-0.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:166f4d31f0be9ee59d00670f52a724c4d1090688b46e3531d0ccb74ae3157032"}, - {file = "whenever-0.7.2.tar.gz", hash = "sha256:a292dddd4d635a5b597686117e455d41e6134716a7be66b3903554514df8729c"}, -] - -[package.dependencies] -tzdata = {version = ">=2020.1", markers = "sys_platform == \"win32\""} - [[package]] name = "whenever" version = "0.7.3" @@ -3623,7 +3531,6 @@ description = "Modern datetime library for Python" optional = false python-versions = ">=3.9" groups = ["main", "dev"] -markers = "python_version >= \"3.12\"" files = [ {file = "whenever-0.7.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:50b9cd57c6bf173c320cfcac499aa3c26e40204648b995b68d083a60edb27d93"}, {file = "whenever-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b26c38b4f3cac25c671760c0bac7950aaa0b8ac6b028e1c9c60244ef1e841c0b"}, @@ -3809,19 +3716,19 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] -all = ["Jinja2", "click", "copier", "numpy", "numpy", "pyarrow", "pytest", "pyyaml", "rich", "toml", "typer"] -ctl = ["Jinja2", "click", "copier", "numpy", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer"] +all = ["Jinja2", "click", "copier", "numpy", "numpy", "pyarrow", "pytest", "pyyaml", "rich", "tomli", "typer"] +ctl = ["Jinja2", "click", "copier", "numpy", "numpy", "pyarrow", "pyyaml", "rich", "tomli", "typer"] tests = ["Jinja2", "pytest", "pyyaml", "rich"] [metadata] lock-version = "2.1" python-versions = "^3.9, <3.14" -content-hash = "ef968c8e3a88fca5c1890126f0d154dc0e51dd37603878f7d7a5584b69efbb71" +content-hash = "95a903d6668a2aca0f6cb12b295d472b2b3855e51392d1b59d06cccace87d99d" diff --git a/pyproject.toml b/pyproject.toml index 61747987..932a4033 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ numpy = [ ] pyarrow = { version = ">=14", optional = true } rich = { version = ">=12, <14", optional = true } -toml = { version = "^0.10", optional = true } typer = { version = "^0.12.3", optional = true } pytest = { version = "*", optional = true } pyyaml = { version = "^6", optional = true } @@ -46,6 +45,7 @@ whenever = ">=0.7.2,<0.8.0" netutils = "^1.0.0" click = { version = "8.1.*", optional = true } copier = { version = "^9.8.0", optional = true } +tomli = { version = ">=1.1.0", python = "<3.11", optional = true } [tool.poetry.group.dev.dependencies] pytest = "*" @@ -58,7 +58,6 @@ mypy = "*" ipython = "*" requests = "*" pre-commit = "^2.20.0" -types-toml = "*" types-ujson = "*" types-pyyaml = "*" ruff = "0.11.0" @@ -70,7 +69,7 @@ infrahub-testcontainers = { version = "^1.4.0", python = ">=3.10" } astroid = "~3.1" [tool.poetry.extras] -ctl = ["Jinja2", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer", "click", "copier"] +ctl = ["Jinja2", "numpy", "pyarrow", "pyyaml", "rich", "tomli", "typer", "click", "copier"] tests = ["Jinja2", "pytest", "pyyaml", "rich"] all = [ "Jinja2", @@ -79,7 +78,7 @@ all = [ "pytest", "pyyaml", "rich", - "toml", + "tomli", "typer", "click", "copier", From 10cb555d1c4f7e358426f910fca40a292997be51 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 17 Sep 2025 14:48:01 +0000 Subject: [PATCH 08/16] Initial plan From 40489aa8c7beb4d936afd8499d358a5b9cf5a976 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 17 Sep 2025 14:58:45 +0000 Subject: [PATCH 09/16] Fix JsonDecodeError to include server response content in error message Co-authored-by: BeArchiTek <1334310+BeArchiTek@users.noreply.github.com> --- infrahub_sdk/exceptions.py | 2 ++ tests/unit/sdk/test_utils.py | 54 ++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/infrahub_sdk/exceptions.py b/infrahub_sdk/exceptions.py index a8b1ef9b..d8982d8e 100644 --- a/infrahub_sdk/exceptions.py +++ b/infrahub_sdk/exceptions.py @@ -17,6 +17,8 @@ def __init__(self, message: str | None = None, content: str | None = None, url: self.url = url if not self.message and self.url: self.message = f"Unable to decode response as JSON data from {self.url}" + if self.content: + self.message += f". Server response: {self.content}" super().__init__(self.message) diff --git a/tests/unit/sdk/test_utils.py b/tests/unit/sdk/test_utils.py index 99ef7e29..bc56bf98 100644 --- a/tests/unit/sdk/test_utils.py +++ b/tests/unit/sdk/test_utils.py @@ -1,11 +1,14 @@ +import json import tempfile import uuid from pathlib import Path +from unittest.mock import Mock import pytest from graphql import parse from whenever import Instant +from infrahub_sdk.exceptions import JsonDecodeError from infrahub_sdk.utils import ( base16decode, base16encode, @@ -13,6 +16,7 @@ base36encode, calculate_time_diff, compare_lists, + decode_json, deep_merge_dict, dict_hash, duplicates, @@ -227,3 +231,53 @@ def test_calculate_time_diff() -> None: time5 = Instant.now().subtract(hours=77, minutes=12, seconds=34).format_common_iso() assert calculate_time_diff(time5) == "3d and 5h ago" + + +def test_decode_json_success() -> None: + """Test decode_json with valid JSON response.""" + mock_response = Mock() + mock_response.json.return_value = {"status": "ok", "data": {"key": "value"}} + + result = decode_json(mock_response) + assert result == {"status": "ok", "data": {"key": "value"}} + + +def test_decode_json_failure_with_content() -> None: + """Test decode_json with invalid JSON response includes server content in error message.""" + mock_response = Mock() + mock_response.json.side_effect = json.decoder.JSONDecodeError("Invalid JSON", "document", 0) + mock_response.text = "Internal Server Error: Database connection failed" + mock_response.url = "https://example.com/api/graphql" + + with pytest.raises(JsonDecodeError) as exc_info: + decode_json(mock_response) + + error_message = str(exc_info.value) + assert "Unable to decode response as JSON data from https://example.com/api/graphql" in error_message + assert "Server response: Internal Server Error: Database connection failed" in error_message + + +def test_decode_json_failure_without_content() -> None: + """Test decode_json with invalid JSON response and no content.""" + mock_response = Mock() + mock_response.json.side_effect = json.decoder.JSONDecodeError("Invalid JSON", "document", 0) + mock_response.text = "" + mock_response.url = "https://example.com/api/graphql" + + with pytest.raises(JsonDecodeError) as exc_info: + decode_json(mock_response) + + error_message = str(exc_info.value) + assert "Unable to decode response as JSON data from https://example.com/api/graphql" in error_message + # Should not include server response part when content is empty + assert "Server response:" not in error_message + + +def test_json_decode_error_custom_message() -> None: + """Test JsonDecodeError with custom message does not override custom message.""" + custom_message = "Custom error message" + error = JsonDecodeError(message=custom_message, content="server error", url="https://example.com") + + assert str(error) == custom_message + assert error.content == "server error" + assert error.url == "https://example.com" From 9340fd24ea418cc54d6243a77428bebbe37b170b Mon Sep 17 00:00:00 2001 From: LucasG0 <44552904+LucasG0@users.noreply.github.com> Date: Fri, 19 Sep 2025 16:09:30 +0200 Subject: [PATCH 10/16] Add convert_object_type method (#554) --- changelog/+convert-object-type.added.md | 1 + infrahub_sdk/client.py | 65 +++++++++++ infrahub_sdk/convert_object_type.py | 60 ++++++++++ tests/constants.py | 3 + tests/integration/test_convert_object_type.py | 108 ++++++++++++++++++ 5 files changed, 237 insertions(+) create mode 100644 changelog/+convert-object-type.added.md create mode 100644 infrahub_sdk/convert_object_type.py create mode 100644 tests/constants.py create mode 100644 tests/integration/test_convert_object_type.py diff --git a/changelog/+convert-object-type.added.md b/changelog/+convert-object-type.added.md new file mode 100644 index 00000000..2a27d473 --- /dev/null +++ b/changelog/+convert-object-type.added.md @@ -0,0 +1 @@ +Add `convert_object_type` method to allow converting an object to another type. \ No newline at end of file diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 8189c1d5..3d2649e0 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -33,6 +33,7 @@ ) from .config import Config from .constants import InfrahubClientMode +from .convert_object_type import CONVERT_OBJECT_MUTATION, ConversionFieldInput from .data import RepositoryBranchInfo, RepositoryData from .diff import NodeDiff, diff_tree_node_to_node_diff, get_diff_summary_query from .exceptions import ( @@ -1670,6 +1671,38 @@ async def __aexit__( self.mode = InfrahubClientMode.DEFAULT + async def convert_object_type( + self, + node_id: str, + target_kind: str, + branch: str | None = None, + fields_mapping: dict[str, ConversionFieldInput] | None = None, + ) -> InfrahubNode: + """ + Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names + and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field + in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type + for more information. + """ + + if fields_mapping is None: + mapping_dict = {} + else: + mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()} + + branch_name = branch or self.default_branch + response = await self.execute_graphql( + query=CONVERT_OBJECT_MUTATION, + variables={ + "node_id": node_id, + "fields_mapping": mapping_dict, + "target_kind": target_kind, + }, + branch_name=branch_name, + raise_for_error=True, + ) + return await InfrahubNode.from_graphql(client=self, branch=branch_name, data=response["ConvertObjectType"]) + class InfrahubClientSync(BaseClient): schema: InfrahubSchemaSync @@ -2984,3 +3017,35 @@ def __exit__( self.group_context.update_group() self.mode = InfrahubClientMode.DEFAULT + + def convert_object_type( + self, + node_id: str, + target_kind: str, + branch: str | None = None, + fields_mapping: dict[str, ConversionFieldInput] | None = None, + ) -> InfrahubNodeSync: + """ + Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names + and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field + in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type + for more information. + """ + + if fields_mapping is None: + mapping_dict = {} + else: + mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()} + + branch_name = branch or self.default_branch + response = self.execute_graphql( + query=CONVERT_OBJECT_MUTATION, + variables={ + "node_id": node_id, + "fields_mapping": mapping_dict, + "target_kind": target_kind, + }, + branch_name=branch_name, + raise_for_error=True, + ) + return InfrahubNodeSync.from_graphql(client=self, branch=branch_name, data=response["ConvertObjectType"]) diff --git a/infrahub_sdk/convert_object_type.py b/infrahub_sdk/convert_object_type.py new file mode 100644 index 00000000..fe7ee4b5 --- /dev/null +++ b/infrahub_sdk/convert_object_type.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +from typing import Any + +from pydantic import BaseModel, model_validator + +CONVERT_OBJECT_MUTATION = """ + mutation($node_id: String!, $target_kind: String!, $fields_mapping: GenericScalar!) { + ConvertObjectType(data: { + node_id: $node_id, + target_kind: $target_kind, + fields_mapping: $fields_mapping + }) { + ok + node + } + } +""" + + +class ConversionFieldValue(BaseModel): # Only one of these fields can be not None + """ + Holds the new value of the destination field during an object conversion. + Use `attribute_value` to specify the new raw value of an attribute. + Use `peer_id` to specify new peer of a cardinality one relationship. + Use `peers_ids` to specify new peers of a cardinality many relationship. + Only one of `attribute_value`, `peer_id` and `peers_ids` can be specified. + """ + + attribute_value: Any | None = None + peer_id: str | None = None + peers_ids: list[str] | None = None + + @model_validator(mode="after") + def check_only_one_field(self) -> ConversionFieldValue: + fields = [self.attribute_value, self.peer_id, self.peers_ids] + set_fields = [f for f in fields if f is not None] + if len(set_fields) != 1: + raise ValueError("Exactly one of attribute_value, peer_id, or peers_ids must be set") + return self + + +class ConversionFieldInput(BaseModel): + """ + Indicates how to fill in the value of the destination field during an object conversion. + Use `source_field` to reuse the value of the corresponding field of the object being converted. + Use `data` to specify the new value for the field. + Only one of `source_field` or `data` can be specified. + """ + + source_field: str | None = None + data: ConversionFieldValue | None = None + + @model_validator(mode="after") + def check_only_one_field(self) -> ConversionFieldInput: + if self.source_field is not None and self.data is not None: + raise ValueError("Only one of source_field or data can be set") + if self.source_field is None and self.data is None: + raise ValueError("Either source_field or data must be set") + return self diff --git a/tests/constants.py b/tests/constants.py new file mode 100644 index 00000000..1c64b631 --- /dev/null +++ b/tests/constants.py @@ -0,0 +1,3 @@ +CLIENT_TYPE_ASYNC = "standard" +CLIENT_TYPE_SYNC = "sync" +CLIENT_TYPES = [CLIENT_TYPE_ASYNC, CLIENT_TYPE_SYNC] diff --git a/tests/integration/test_convert_object_type.py b/tests/integration/test_convert_object_type.py new file mode 100644 index 00000000..7aee141a --- /dev/null +++ b/tests/integration/test_convert_object_type.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +import uuid +from typing import Any + +import pytest + +from infrahub_sdk.convert_object_type import ConversionFieldInput, ConversionFieldValue +from infrahub_sdk.testing.docker import TestInfrahubDockerClient +from tests.constants import CLIENT_TYPE_ASYNC, CLIENT_TYPES + +SCHEMA: dict[str, Any] = { + "version": "1.0", + "generics": [ + { + "name": "PersonGeneric", + "namespace": "Testconv", + "human_friendly_id": ["name__value"], + "attributes": [ + {"name": "name", "kind": "Text", "unique": True}, + ], + }, + ], + "nodes": [ + { + "name": "Person1", + "namespace": "Testconv", + "inherit_from": ["TestconvPersonGeneric"], + }, + { + "name": "Person2", + "namespace": "Testconv", + "inherit_from": ["TestconvPersonGeneric"], + "attributes": [ + {"name": "age", "kind": "Number"}, + ], + "relationships": [ + { + "name": "worst_car", + "peer": "TestconvCar", + "cardinality": "one", + "identifier": "person__mandatory_owner", + }, + { + "name": "fastest_cars", + "peer": "TestconvCar", + "cardinality": "many", + "identifier": "person__fastest_cars", + }, + ], + }, + { + "name": "Car", + "namespace": "Testconv", + "human_friendly_id": ["name__value"], + "attributes": [ + {"name": "name", "kind": "Text"}, + ], + }, + ], +} + + +class TestConvertObjectType(TestInfrahubDockerClient): + @pytest.mark.parametrize("client_type", CLIENT_TYPES) + async def test_convert_object_type(self, client, client_sync, client_type) -> None: + resp = await client.schema.load(schemas=[SCHEMA], wait_until_converged=True) + assert not resp.errors + + person_1 = await client.create(kind="TestconvPerson1", name=f"person_{uuid.uuid4()}") + await person_1.save() + car_1 = await client.create(kind="TestconvCar", name=f"car_{uuid.uuid4()}") + await car_1.save() + + new_age = 25 + fields_mapping = { + "name": ConversionFieldInput(source_field="name"), + "age": ConversionFieldInput(data=ConversionFieldValue(attribute_value=new_age)), + "worst_car": ConversionFieldInput(data=ConversionFieldValue(peer_id=car_1.id)), + "fastest_cars": ConversionFieldInput(data=ConversionFieldValue(peers_ids=[car_1.id])), + } + + if client_type == CLIENT_TYPE_ASYNC: + person_2 = await client.convert_object_type( + node_id=person_1.id, + target_kind="TestconvPerson2", + branch=client.default_branch, + fields_mapping=fields_mapping, + ) + else: + person_2 = client_sync.convert_object_type( + node_id=person_1.id, + target_kind="TestconvPerson2", + branch=client.default_branch, + fields_mapping=fields_mapping, + ) + + assert person_2.get_kind() == "TestconvPerson2" + assert person_2.name.value == person_1.name.value + assert person_2.age.value == new_age + + # Fetch relationships of new node + person_2 = await client.get( + kind="TestconvPerson2", id=person_2.id, branch=client.default_branch, prefetch_relationships=True + ) + assert person_2.worst_car.peer.id == car_1.id + await person_2.fastest_cars.fetch() + assert {related_node.peer.id for related_node in person_2.fastest_cars.peers} == {car_1.id} From f32152329a80499e2e0ea4b3256a4fd31bc0c613 Mon Sep 17 00:00:00 2001 From: Alex Gittings Date: Mon, 22 Sep 2025 09:47:34 +0100 Subject: [PATCH 11/16] Add support for clearing optional attributes (#548) * add support for clearing optional dropdown attributes and enhance tests for dropdown handling * add changelog entry for clearing optional dropdown attributes * refactor: update branch handling in changelog and allow clearing optional attributes if mutated --- changelog/535.fixed.md | 2 +- changelog/549.fixed.md | 1 + infrahub_sdk/node/attribute.py | 2 ++ tests/unit/sdk/conftest.py | 42 ++++++++++++++++++++++++++++++++++ tests/unit/sdk/test_node.py | 28 +++++++++++++++++++++++ 5 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 changelog/549.fixed.md diff --git a/changelog/535.fixed.md b/changelog/535.fixed.md index fdbd499e..56c8fd43 100644 --- a/changelog/535.fixed.md +++ b/changelog/535.fixed.md @@ -1 +1 @@ -Fix branch handling in `_run_transform` and `execute_graphql_query` functions in Infrahubctl to use environment variables for branch management. \ No newline at end of file +Fix branch handling in `_run_transform` and `execute_graphql_query` functions in Infrahubctl to use environment variables for branch management. \ No newline at end of file diff --git a/changelog/549.fixed.md b/changelog/549.fixed.md new file mode 100644 index 00000000..1a4f975c --- /dev/null +++ b/changelog/549.fixed.md @@ -0,0 +1 @@ +Allow the ability to clear optional attributes by setting them to None if they have been mutated by the user. \ No newline at end of file diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py index 5ddc5cbe..9c752521 100644 --- a/infrahub_sdk/node/attribute.py +++ b/infrahub_sdk/node/attribute.py @@ -76,6 +76,8 @@ def _generate_input_data(self) -> dict | None: variables: dict[str, Any] = {} if self.value is None: + if self._schema.optional and self.value_has_been_mutated: + data["value"] = None return data if isinstance(self.value, str): diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index 5f0d7c2a..92749412 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -177,6 +177,48 @@ async def location_schema() -> NodeSchemaAPI: return NodeSchema(**data).convert_api() # type: ignore +@pytest.fixture +async def location_schema_with_dropdown() -> NodeSchemaAPI: + data = { + "name": "Location", + "namespace": "Builtin", + "default_filter": "name__value", + "attributes": [ + {"name": "name", "kind": "String", "unique": True}, + {"name": "description", "kind": "String", "optional": True}, + {"name": "type", "kind": "String"}, + { + "name": "status", + "kind": "Dropdown", + "optional": True, + "choices": [{"name": "active", "label": "Active"}, {"name": "planning", "label": "Planning"}], + }, + ], + "relationships": [ + { + "name": "tags", + "peer": "BuiltinTag", + "optional": True, + "cardinality": "many", + }, + { + "name": "primary_tag", + "peer": "BuiltinTag", + "optional": True, + "cardinality": "one", + }, + { + "name": "member_of_groups", + "peer": "CoreGroup", + "optional": True, + "cardinality": "many", + "kind": "Group", + }, + ], + } + return NodeSchema(**data).convert_api() # type: ignore + + @pytest.fixture async def schema_with_hfid() -> dict[str, NodeSchemaAPI]: data = { diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index c5c75052..e4192871 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -1370,6 +1370,34 @@ async def test_create_input_data(client, location_schema: NodeSchemaAPI, client_ } +@pytest.mark.parametrize("client_type", client_types) +async def test_create_input_data_with_dropdown(client, location_schema_with_dropdown, client_type) -> None: + """Validate input data including dropdown field""" + data = { + "name": {"value": "JFK1"}, + "description": {"value": "JFK Airport"}, + "type": {"value": "SITE"}, + "status": {"value": "active"}, + } + + if client_type == "standard": + node = InfrahubNode(client=client, schema=location_schema_with_dropdown, data=data) + else: + node = InfrahubNodeSync(client=client, schema=location_schema_with_dropdown, data=data) + + assert node.status.value == "active" + node.status = None + assert node._generate_input_data()["data"] == { + "data": { + "name": {"value": "JFK1"}, + "description": {"value": "JFK Airport"}, + "type": {"value": "SITE"}, + "status": {"value": None}, + "primary_tag": None, + } + } + + @pytest.mark.parametrize("client_type", client_types) async def test_create_input_data__with_relationships_02(client, location_schema, client_type) -> None: """Validate input data with variables that needs replacements""" From 471bfac7c4af2b2ff75fecbeed926e799a1be47c Mon Sep 17 00:00:00 2001 From: "infrahub-github-bot-app[bot]" <190746546+infrahub-github-bot-app[bot]@users.noreply.github.com> Date: Mon, 22 Sep 2025 16:21:26 +0100 Subject: [PATCH 12/16] Add support for clearing optional attributes (#548) (#557) * add support for clearing optional dropdown attributes and enhance tests for dropdown handling * add changelog entry for clearing optional dropdown attributes * refactor: update branch handling in changelog and allow clearing optional attributes if mutated Co-authored-by: Alex Gittings --- changelog/535.fixed.md | 2 +- changelog/549.fixed.md | 1 + infrahub_sdk/node/attribute.py | 2 ++ tests/unit/sdk/conftest.py | 42 ++++++++++++++++++++++++++++++++++ tests/unit/sdk/test_node.py | 28 +++++++++++++++++++++++ 5 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 changelog/549.fixed.md diff --git a/changelog/535.fixed.md b/changelog/535.fixed.md index fdbd499e..56c8fd43 100644 --- a/changelog/535.fixed.md +++ b/changelog/535.fixed.md @@ -1 +1 @@ -Fix branch handling in `_run_transform` and `execute_graphql_query` functions in Infrahubctl to use environment variables for branch management. \ No newline at end of file +Fix branch handling in `_run_transform` and `execute_graphql_query` functions in Infrahubctl to use environment variables for branch management. \ No newline at end of file diff --git a/changelog/549.fixed.md b/changelog/549.fixed.md new file mode 100644 index 00000000..1a4f975c --- /dev/null +++ b/changelog/549.fixed.md @@ -0,0 +1 @@ +Allow the ability to clear optional attributes by setting them to None if they have been mutated by the user. \ No newline at end of file diff --git a/infrahub_sdk/node/attribute.py b/infrahub_sdk/node/attribute.py index 5ddc5cbe..9c752521 100644 --- a/infrahub_sdk/node/attribute.py +++ b/infrahub_sdk/node/attribute.py @@ -76,6 +76,8 @@ def _generate_input_data(self) -> dict | None: variables: dict[str, Any] = {} if self.value is None: + if self._schema.optional and self.value_has_been_mutated: + data["value"] = None return data if isinstance(self.value, str): diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index 5f0d7c2a..92749412 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -177,6 +177,48 @@ async def location_schema() -> NodeSchemaAPI: return NodeSchema(**data).convert_api() # type: ignore +@pytest.fixture +async def location_schema_with_dropdown() -> NodeSchemaAPI: + data = { + "name": "Location", + "namespace": "Builtin", + "default_filter": "name__value", + "attributes": [ + {"name": "name", "kind": "String", "unique": True}, + {"name": "description", "kind": "String", "optional": True}, + {"name": "type", "kind": "String"}, + { + "name": "status", + "kind": "Dropdown", + "optional": True, + "choices": [{"name": "active", "label": "Active"}, {"name": "planning", "label": "Planning"}], + }, + ], + "relationships": [ + { + "name": "tags", + "peer": "BuiltinTag", + "optional": True, + "cardinality": "many", + }, + { + "name": "primary_tag", + "peer": "BuiltinTag", + "optional": True, + "cardinality": "one", + }, + { + "name": "member_of_groups", + "peer": "CoreGroup", + "optional": True, + "cardinality": "many", + "kind": "Group", + }, + ], + } + return NodeSchema(**data).convert_api() # type: ignore + + @pytest.fixture async def schema_with_hfid() -> dict[str, NodeSchemaAPI]: data = { diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index c5c75052..e4192871 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -1370,6 +1370,34 @@ async def test_create_input_data(client, location_schema: NodeSchemaAPI, client_ } +@pytest.mark.parametrize("client_type", client_types) +async def test_create_input_data_with_dropdown(client, location_schema_with_dropdown, client_type) -> None: + """Validate input data including dropdown field""" + data = { + "name": {"value": "JFK1"}, + "description": {"value": "JFK Airport"}, + "type": {"value": "SITE"}, + "status": {"value": "active"}, + } + + if client_type == "standard": + node = InfrahubNode(client=client, schema=location_schema_with_dropdown, data=data) + else: + node = InfrahubNodeSync(client=client, schema=location_schema_with_dropdown, data=data) + + assert node.status.value == "active" + node.status = None + assert node._generate_input_data()["data"] == { + "data": { + "name": {"value": "JFK1"}, + "description": {"value": "JFK Airport"}, + "type": {"value": "SITE"}, + "status": {"value": None}, + "primary_tag": None, + } + } + + @pytest.mark.parametrize("client_type", client_types) async def test_create_input_data__with_relationships_02(client, location_schema, client_type) -> None: """Validate input data with variables that needs replacements""" From 840d98a652292110ffee20257477295e21d0d250 Mon Sep 17 00:00:00 2001 From: Babatunde Olusola Date: Mon, 22 Sep 2025 17:36:52 +0100 Subject: [PATCH 13/16] IHS-147: Fix schema load failure exception (#555) * Fix schema load failure exception * Add towncrier housekeeping message --- changelog/464.housekeeping.md | 1 + infrahub_sdk/ctl/schema.py | 13 +++++++- tests/unit/sdk/test_schema.py | 60 +++++++++++++++++++++++++++++++++++ 3 files changed, 73 insertions(+), 1 deletion(-) create mode 100644 changelog/464.housekeeping.md diff --git a/changelog/464.housekeeping.md b/changelog/464.housekeeping.md new file mode 100644 index 00000000..9478ed71 --- /dev/null +++ b/changelog/464.housekeeping.md @@ -0,0 +1 @@ +Handle error gracefully when loading schema instead of failing with an exception diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 8c18b395..0e6ce548 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -77,7 +77,18 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche elif len(loc_path) > 6: loc_type = loc_path[5] - input_label = node[loc_type][loc_path[6]].get("name", None) + error_data = node[loc_type] + attribute = loc_path[6] + + if isinstance(attribute, str): + input_label = None + for data in error_data: + if data.get(attribute) is not None: + input_label = data.get("name", None) + break + else: + input_label = error_data[attribute].get("name", None) + input_str = error.get("input", None) error_message = f"{loc_type[:-1].title()}: {input_label} ({input_str}) | {error['msg']} ({error['type']})" console.print(f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}") diff --git a/tests/unit/sdk/test_schema.py b/tests/unit/sdk/test_schema.py index f5d1ce10..64c2984b 100644 --- a/tests/unit/sdk/test_schema.py +++ b/tests/unit/sdk/test_schema.py @@ -392,3 +392,63 @@ async def test_display_schema_load_errors_details_namespace(mock_get_node) -> No Node: OuTInstance | namespace (OuT) | String should match pattern '^[A-Z]+$' (string_pattern_mismatch) """ assert output == expected_console + + +@mock.patch( + "infrahub_sdk.ctl.schema.get_node", + return_value={ + "name": "TailscaleSSHRule", + "namespace": "Security", + "icon": "mdi:security", + "inherit_from": ["SecurityRule"], + "attributes": [ + { + "name": "check_period", + "kind": "Number", + "optional": True, + "default_value": 720, + "min_value": 0, + "max_value": 10080, + }, + {"name": "accept_env", "kind": "List", "optional": True}, + { + "name": "action", + "optional": True, + "kind": "Dropdown", + "default_value": "allow", + "choices": [ + {"label": "allow", "name": "allow"}, + {"label": "check", "name": "check"}, + ], + }, + ], + }, +) +async def test_display_schema_load_errors_details_when_error_is_in_attribute_or_relationship(mock_get_node) -> None: + """Validate error message with details when loading schema and errors are in attribute or relationship.""" + error = { + "detail": [ + { + "type": "extra_forbidden", + "loc": ["body", "schemas", 0, "nodes", 4, "attributes", "min_value"], + "msg": "Extra inputs are not permitted", + "input": 0, + }, + { + "type": "extra_forbidden", + "loc": ["body", "schemas", 0, "nodes", 4, "attributes", "max_value"], + "msg": "Extra inputs are not permitted", + "input": 10080, + }, + ] + } + + with mock.patch("infrahub_sdk.ctl.schema.console", Console(file=StringIO(), width=1000)) as console: + display_schema_load_errors(response=error, schemas_data=[]) + assert mock_get_node.call_count == 2 + output = console.file.getvalue() + expected_console = """Unable to load the schema: + Node: SecurityTailscaleSSHRule | Attribute: check_period (0) | Extra inputs are not permitted (extra_forbidden) + Node: SecurityTailscaleSSHRule | Attribute: check_period (10080) | Extra inputs are not permitted (extra_forbidden) +""" + assert output == expected_console From 6cdd7809db2f53887c71dd33313d1d93ad335b66 Mon Sep 17 00:00:00 2001 From: LucasG0 <44552904+LucasG0@users.noreply.github.com> Date: Thu, 25 Sep 2025 00:56:42 +0200 Subject: [PATCH 14/16] Add to object conversion input (#558) --- infrahub_sdk/convert_object_type.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/infrahub_sdk/convert_object_type.py b/infrahub_sdk/convert_object_type.py index fe7ee4b5..3e30dd2f 100644 --- a/infrahub_sdk/convert_object_type.py +++ b/infrahub_sdk/convert_object_type.py @@ -36,7 +36,7 @@ def check_only_one_field(self) -> ConversionFieldValue: fields = [self.attribute_value, self.peer_id, self.peers_ids] set_fields = [f for f in fields if f is not None] if len(set_fields) != 1: - raise ValueError("Exactly one of attribute_value, peer_id, or peers_ids must be set") + raise ValueError("Exactly one of `attribute_value`, `peer_id`, or `peers_ids` must be set") return self @@ -45,16 +45,17 @@ class ConversionFieldInput(BaseModel): Indicates how to fill in the value of the destination field during an object conversion. Use `source_field` to reuse the value of the corresponding field of the object being converted. Use `data` to specify the new value for the field. - Only one of `source_field` or `data` can be specified. + Use `use_default_value` to set the destination field to its schema default. + Only one of `source_field`, `data`, or `use_default_value` can be specified. """ source_field: str | None = None data: ConversionFieldValue | None = None + use_default_value: bool = False @model_validator(mode="after") def check_only_one_field(self) -> ConversionFieldInput: - if self.source_field is not None and self.data is not None: - raise ValueError("Only one of source_field or data can be set") - if self.source_field is None and self.data is None: - raise ValueError("Either source_field or data must be set") + fields_set = [self.source_field is not None, self.data is not None, self.use_default_value is True] + if sum(fields_set) != 1: + raise ValueError("Exactly one of `source_field`, `data` or `use_default_value` must be set") return self From 7858a622430a42ac05d787d07935ededc2ba1819 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 30 Sep 2025 09:20:46 +0000 Subject: [PATCH 15/16] Add changelog entry for issue #473 JsonDecodeError fix Co-authored-by: BeArchiTek <1334310+BeArchiTek@users.noreply.github.com> --- changelog/473.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/473.fixed.md diff --git a/changelog/473.fixed.md b/changelog/473.fixed.md new file mode 100644 index 00000000..22c5a5af --- /dev/null +++ b/changelog/473.fixed.md @@ -0,0 +1 @@ +JsonDecodeError now includes server response content in error message when JSON decoding fails, providing better debugging information for non-JSON server responses. \ No newline at end of file From 2c167737006f3d90ad997eb94fbb3e3a42afdad7 Mon Sep 17 00:00:00 2001 From: Alex Gittings Date: Tue, 30 Sep 2025 20:19:36 +0100 Subject: [PATCH 16/16] Object file range expansion (#561) * Add range expansion feature for object files and update documentation - Implemented range expansion for string fields in object files, allowing patterns like [1-5] to create multiple objects. - Added validation to ensure expanded lists have the same length. - Updated documentation to include usage examples and details on the new feature. - Added unit tests to verify range expansion functionality and error handling for mismatched lengths. * Refactor range expansion tests to clarify expected behavior for edge cases * Refactor range expansion logic to a standalone function for improved reusability and clarity * Rename variable for clarity in data expansion process within InfrahubObjectFileData class * Remove unnecessary range expansion in InfrahubObjectFileData class to streamline data handling --- changelog/560.added.md | 1 + docs/docs/python-sdk/topics/object_file.mdx | 83 ++++++++++++++ infrahub_sdk/spec/object.py | 47 +++++++- infrahub_sdk/spec/range_expansion.py | 118 ++++++++++++++++++++ tests/unit/sdk/spec/test_object.py | 77 +++++++++++++ tests/unit/sdk/test_range_expansion.py | 106 ++++++++++++++++++ 6 files changed, 428 insertions(+), 4 deletions(-) create mode 100644 changelog/560.added.md create mode 100644 infrahub_sdk/spec/range_expansion.py create mode 100644 tests/unit/sdk/test_range_expansion.py diff --git a/changelog/560.added.md b/changelog/560.added.md new file mode 100644 index 00000000..95fe3b37 --- /dev/null +++ b/changelog/560.added.md @@ -0,0 +1 @@ +Add the ability to perform range expansions in object files. This feature allows users to define patterns in string fields that will be expanded into multiple objects, facilitating bulk object creation and management. The implementation includes validation to ensure that all expanded lists have the same length, preventing inconsistencies. Documentation has been updated to explain how to use this feature, including examples of valid and invalid configurations. \ No newline at end of file diff --git a/docs/docs/python-sdk/topics/object_file.mdx b/docs/docs/python-sdk/topics/object_file.mdx index aebacb83..c5033eb5 100644 --- a/docs/docs/python-sdk/topics/object_file.mdx +++ b/docs/docs/python-sdk/topics/object_file.mdx @@ -195,3 +195,86 @@ Metadata support is planned for future releases. Currently, the Object file does 2. Keep object files organized by model type or purpose. 3. Validate object files before loading them into production environments. 4. Use comments in your YAML files to document complex relationships or dependencies. + +## Range Expansion in Object Files + +The Infrahub Python SDK supports **range expansion** for string fields in object files. This feature allows you to specify a range pattern (e.g., `[1-5]`) in any string value, and the SDK will automatically expand it into multiple objects during validation and processing. + +### How Range Expansion Works + +- Any string field containing a pattern like `[1-5]`, `[10-15]`, or `[1,3,5]` will be expanded into multiple objects. +- If multiple fields in the same object use range expansion, **all expanded lists must have the same length**. If not, validation will fail. +- The expansion is performed before validation and processing, so all downstream logic works on the expanded data. + +### Examples + +#### Single Field Expansion + +```yaml +spec: + kind: BuiltinLocation + data: + - name: AMS[1-3] + type: Country +``` + +This will expand to: + +```yaml +- name: AMS1 + type: Country +- name: AMS2 + type: Country +- name: AMS3 + type: Country +``` + +#### Multiple Field Expansion (Matching Lengths) + +```yaml +spec: + kind: BuiltinLocation + data: + - name: AMS[1-3] + description: Datacenter [A-C] + type: Country +``` + +This will expand to: + +```yaml +- name: AMS1 + description: Datacenter A + type: Country +- name: AMS2 + description: Datacenter B + type: Country +- name: AMS3 + description: Datacenter C + type: Country +``` + +#### Error: Mismatched Range Lengths + +If you use ranges of different lengths in multiple fields: + +```yaml +spec: + kind: BuiltinLocation + data: + - name: AMS[1-3] + description: "Datacenter [10-15]" + type: Country +``` + +This will **fail validation** with an error like: + +```bash +Range expansion mismatch: fields expanded to different lengths: [3, 6] +``` + +### Notes + +- Range expansion is supported for any string field in the `data` section. +- If no range pattern is present, the field is left unchanged. +- If expansion fails for any field, validation will fail with an error message. diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index 23a11c10..5bd54892 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -1,5 +1,7 @@ from __future__ import annotations +import copy +import re from enum import Enum from typing import TYPE_CHECKING, Any @@ -8,6 +10,7 @@ from ..exceptions import ObjectValidationError, ValidationError from ..schema import GenericSchemaAPI, RelationshipKind, RelationshipSchema from ..yaml import InfrahubFile, InfrahubFileKind +from .range_expansion import MATCH_PATTERN, range_expansion if TYPE_CHECKING: from ..client import InfrahubClient @@ -164,6 +167,37 @@ async def get_relationship_info( return info +def expand_data_with_ranges(data: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Expand any item in self.data with range pattern in any value. Supports multiple fields, requires equal expansion length.""" + range_pattern = re.compile(MATCH_PATTERN) + expanded = [] + for item in data: + # Find all fields to expand + expand_fields = {} + for key, value in item.items(): + if isinstance(value, str) and range_pattern.search(value): + try: + expand_fields[key] = range_expansion(value) + except Exception: + # If expansion fails, treat as no expansion + expand_fields[key] = [value] + if not expand_fields: + expanded.append(item) + continue + # Check all expanded lists have the same length + lengths = [len(v) for v in expand_fields.values()] + if len(set(lengths)) > 1: + raise ValidationError(f"Range expansion mismatch: fields expanded to different lengths: {lengths}") + n = lengths[0] + # Zip expanded values and produce new items + for i in range(n): + new_item = copy.deepcopy(item) + for key, values in expand_fields.items(): + new_item[key] = values[i] + expanded.append(new_item) + return expanded + + class InfrahubObjectFileData(BaseModel): kind: str data: list[dict[str, Any]] = Field(default_factory=list) @@ -171,7 +205,9 @@ class InfrahubObjectFileData(BaseModel): async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> list[ObjectValidationError]: errors: list[ObjectValidationError] = [] schema = await client.schema.get(kind=self.kind, branch=branch) - for idx, item in enumerate(self.data): + expanded_data = expand_data_with_ranges(self.data) + self.data = expanded_data + for idx, item in enumerate(expanded_data): errors.extend( await self.validate_object( client=client, @@ -186,7 +222,8 @@ async def validate_format(self, client: InfrahubClient, branch: str | None = Non async def process(self, client: InfrahubClient, branch: str | None = None) -> None: schema = await client.schema.get(kind=self.kind, branch=branch) - for idx, item in enumerate(self.data): + expanded_data = expand_data_with_ranges(self.data) + for idx, item in enumerate(expanded_data): await self.create_node( client=client, schema=schema, @@ -311,7 +348,8 @@ async def validate_related_nodes( rel_info.find_matching_relationship(peer_schema=peer_schema) context.update(rel_info.get_context(value="placeholder")) - for idx, peer_data in enumerate(data["data"]): + expanded_data = expand_data_with_ranges(data=data["data"]) + for idx, peer_data in enumerate(expanded_data): context["list_index"] = idx errors.extend( await cls.validate_object( @@ -525,7 +563,8 @@ async def create_related_nodes( rel_info.find_matching_relationship(peer_schema=peer_schema) context.update(rel_info.get_context(value=parent_node.id)) - for idx, peer_data in enumerate(data["data"]): + expanded_data = expand_data_with_ranges(data=data["data"]) + for idx, peer_data in enumerate(expanded_data): context["list_index"] = idx if isinstance(peer_data, dict): node = await cls.create_node( diff --git a/infrahub_sdk/spec/range_expansion.py b/infrahub_sdk/spec/range_expansion.py new file mode 100644 index 00000000..441c589c --- /dev/null +++ b/infrahub_sdk/spec/range_expansion.py @@ -0,0 +1,118 @@ +import itertools +import re + +MATCH_PATTERN = r"(\[[\w,-]+\])" + + +def _escape_brackets(s: str) -> str: + return s.replace("\\[", "__LBRACK__").replace("\\]", "__RBRACK__") + + +def _unescape_brackets(s: str) -> str: + return s.replace("__LBRACK__", "[").replace("__RBRACK__", "]") + + +def _char_range_expand(char_range_str: str) -> list[str]: + """Expands a string of numbers or single-character letters.""" + expanded_values: list[str] = [] + # Special case: if no dash and no comma, and multiple characters, error if not all alphanumeric + if "," not in char_range_str and "-" not in char_range_str and len(char_range_str) > 1: + if not char_range_str.isalnum(): + raise ValueError(f"Invalid non-alphanumeric range: [{char_range_str}]") + return list(char_range_str) + + for value in char_range_str.split(","): + if not value: + # Malformed: empty part in comma-separated list + return [f"[{char_range_str}]"] + if "-" in value: + start_char, end_char = value.split("-", 1) + if not start_char or not end_char: + expanded_values.append(f"[{char_range_str}]") + return expanded_values + # Check if it's a numeric range + if start_char.isdigit() and end_char.isdigit(): + start_num = int(start_char) + end_num = int(end_char) + step = 1 if start_num <= end_num else -1 + expanded_values.extend(str(i) for i in range(start_num, end_num + step, step)) + # Check if it's an alphabetical range (single character) + elif len(start_char) == 1 and len(end_char) == 1 and start_char.isalpha() and end_char.isalpha(): + start_ord = ord(start_char) + end_ord = ord(end_char) + step = 1 if start_ord <= end_ord else -1 + is_upper = start_char.isupper() + for i in range(start_ord, end_ord + step, step): + char = chr(i) + expanded_values.append(char.upper() if is_upper else char) + else: + # Mixed or unsupported range type, append as-is + expanded_values.append(value) + else: + # If the value is a single character or valid alphanumeric string, append + if not value.isalnum(): + raise ValueError(f"Invalid non-alphanumeric value: [{value}]") + expanded_values.append(value) + return expanded_values + + +def _extract_constants(pattern: str, re_compiled: re.Pattern) -> tuple[list[int], list[list[str]]]: + cartesian_list = [] + interface_constant = [0] + for match in re_compiled.finditer(pattern): + interface_constant.append(match.start()) + interface_constant.append(match.end()) + cartesian_list.append(_char_range_expand(match.group()[1:-1])) + return interface_constant, cartesian_list + + +def _expand_interfaces(pattern: str, interface_constant: list[int], cartesian_list: list[list[str]]) -> list[str]: + def _pairwise(lst: list[int]) -> list[tuple[int, int]]: + it = iter(lst) + return list(zip(it, it)) + + if interface_constant[-1] < len(pattern): + interface_constant.append(len(pattern)) + interface_constant_out = _pairwise(interface_constant) + expanded_interfaces = [] + for element in itertools.product(*cartesian_list): + current_interface = "" + for count, item in enumerate(interface_constant_out): + current_interface += pattern[item[0] : item[1]] + if count < len(element): + current_interface += element[count] + expanded_interfaces.append(_unescape_brackets(current_interface)) + return expanded_interfaces + + +def range_expansion(interface_pattern: str) -> list[str]: + """Expand string pattern into a list of strings, supporting both + number and single-character alphabet ranges. Heavily inspired by + Netutils interface_range_expansion but adapted to support letters. + + Args: + interface_pattern: The string pattern that will be parsed to create the list of interfaces. + + Returns: + Contains the expanded list of interfaces. + + Examples: + >>> from infrahub_sdk.spec.range_expansion import range_expansion + >>> range_expansion("Device [A-C]") + ['Device A', 'Device B', 'Device C'] + >>> range_expansion("FastEthernet[1-2]/0/[10-15]") + ['FastEthernet1/0/10', 'FastEthernet1/0/11', 'FastEthernet1/0/12', + 'FastEthernet1/0/13', 'FastEthernet1/0/14', 'FastEthernet1/0/15', + 'FastEthernet2/0/10', 'FastEthernet2/0/11', 'FastEthernet2/0/12', + 'FastEthernet2/0/13', 'FastEthernet2/0/14', 'FastEthernet2/0/15'] + >>> range_expansion("GigabitEthernet[a-c]/0/1") + ['GigabitEtherneta/0/1', 'GigabitEthernetb/0/1', 'GigabitEthernetc/0/1'] + >>> range_expansion("Eth[a,c,e]/0/1") + ['Etha/0/1', 'Ethc/0/1', 'Ethe/0/1'] + """ + pattern_escaped = _escape_brackets(interface_pattern) + re_compiled = re.compile(MATCH_PATTERN) + if not re_compiled.search(pattern_escaped): + return [_unescape_brackets(pattern_escaped)] + interface_constant, cartesian_list = _extract_constants(pattern_escaped, re_compiled) + return _expand_interfaces(pattern_escaped, interface_constant, cartesian_list) diff --git a/tests/unit/sdk/spec/test_object.py b/tests/unit/sdk/spec/test_object.py index 29f06391..dbe517ab 100644 --- a/tests/unit/sdk/spec/test_object.py +++ b/tests/unit/sdk/spec/test_object.py @@ -43,6 +43,47 @@ def location_bad_syntax02(root_location: dict) -> dict: return location +@pytest.fixture +def location_expansion(root_location: dict) -> dict: + data = [ + { + "name": "AMS[1-5]", + "type": "Country", + } + ] + location = root_location.copy() + location["spec"]["data"] = data + return location + + +@pytest.fixture +def location_expansion_multiple_ranges(root_location: dict) -> dict: + data = [ + { + "name": "AMS[1-5]", + "type": "Country", + "description": "Amsterdam datacenter [a,e,i,o,u]", + } + ] + location = root_location.copy() + location["spec"]["data"] = data + return location + + +@pytest.fixture +def location_expansion_multiple_ranges_bad_syntax(root_location: dict) -> dict: + data = [ + { + "name": "AMS[1-5]", + "type": "Country", + "description": "Amsterdam datacenter [10-15]", + } + ] + location = root_location.copy() + location["spec"]["data"] = data + return location + + async def test_validate_object(client: InfrahubClient, mock_schema_query_01: HTTPXMock, location_mexico_01) -> None: obj = ObjectFile(location="some/path", content=location_mexico_01) await obj.validate_format(client=client) @@ -70,6 +111,42 @@ async def test_validate_object_bad_syntax02( assert "notvalidattribute" in str(exc.value) +async def test_validate_object_expansion( + client: InfrahubClient, mock_schema_query_01: HTTPXMock, location_expansion +) -> None: + obj = ObjectFile(location="some/path", content=location_expansion) + await obj.validate_format(client=client) + + assert obj.spec.kind == "BuiltinLocation" + assert len(obj.spec.data) == 5 + assert obj.spec.data[0]["name"] == "AMS1" + assert obj.spec.data[4]["name"] == "AMS5" + + +async def test_validate_object_expansion_multiple_ranges( + client: InfrahubClient, mock_schema_query_01: HTTPXMock, location_expansion_multiple_ranges +) -> None: + obj = ObjectFile(location="some/path", content=location_expansion_multiple_ranges) + await obj.validate_format(client=client) + + assert obj.spec.kind == "BuiltinLocation" + assert len(obj.spec.data) == 5 + assert obj.spec.data[0]["name"] == "AMS1" + assert obj.spec.data[0]["description"] == "Amsterdam datacenter a" + assert obj.spec.data[4]["name"] == "AMS5" + assert obj.spec.data[4]["description"] == "Amsterdam datacenter u" + + +async def test_validate_object_expansion_multiple_ranges_bad_syntax( + client: InfrahubClient, mock_schema_query_01: HTTPXMock, location_expansion_multiple_ranges_bad_syntax +) -> None: + obj = ObjectFile(location="some/path", content=location_expansion_multiple_ranges_bad_syntax) + with pytest.raises(ValidationError) as exc: + await obj.validate_format(client=client) + + assert "Range expansion mismatch" in str(exc.value) + + get_relationship_info_testdata = [ pytest.param( [ diff --git a/tests/unit/sdk/test_range_expansion.py b/tests/unit/sdk/test_range_expansion.py new file mode 100644 index 00000000..26d817c0 --- /dev/null +++ b/tests/unit/sdk/test_range_expansion.py @@ -0,0 +1,106 @@ +from infrahub_sdk.spec.range_expansion import range_expansion + + +def test_number_range_expansion() -> None: + assert range_expansion("Device[1-3]") == ["Device1", "Device2", "Device3"] + assert range_expansion("FastEthernet[1-2]/0/[10-12]") == [ + "FastEthernet1/0/10", + "FastEthernet1/0/11", + "FastEthernet1/0/12", + "FastEthernet2/0/10", + "FastEthernet2/0/11", + "FastEthernet2/0/12", + ] + + +def test_letter_range_expansion() -> None: + assert range_expansion("Device [A-C]") == ["Device A", "Device B", "Device C"] + assert range_expansion("GigabitEthernet[a-c]/0/1") == [ + "GigabitEtherneta/0/1", + "GigabitEthernetb/0/1", + "GigabitEthernetc/0/1", + ] + assert range_expansion("Eth[a,c,e]/0/1") == [ + "Etha/0/1", + "Ethc/0/1", + "Ethe/0/1", + ] + + +def test_mixed_range_expansion() -> None: + assert range_expansion("Device[1-2,A-C]") == [ + "Device1", + "Device2", + "DeviceA", + "DeviceB", + "DeviceC", + ] + assert range_expansion("Interface[1-2,a-c]/0/[10-11,x,z]") == [ + "Interface1/0/10", + "Interface1/0/11", + "Interface1/0/x", + "Interface1/0/z", + "Interface2/0/10", + "Interface2/0/11", + "Interface2/0/x", + "Interface2/0/z", + "Interfacea/0/10", + "Interfacea/0/11", + "Interfacea/0/x", + "Interfacea/0/z", + "Interfaceb/0/10", + "Interfaceb/0/11", + "Interfaceb/0/x", + "Interfaceb/0/z", + "Interfacec/0/10", + "Interfacec/0/11", + "Interfacec/0/x", + "Interfacec/0/z", + ] + + +def test_single_value_in_brackets() -> None: + assert range_expansion("Device[5]") == ["Device5"] + + +def test_empty_brackets() -> None: + assert range_expansion("Device[]") == ["Device[]"] + + +def test_no_brackets() -> None: + assert range_expansion("Device1") == ["Device1"] + + +def test_malformed_ranges() -> None: + assert range_expansion("Device[1-]") == ["Device[1-]"] + assert range_expansion("Device[-3]") == ["Device[-3]"] + assert range_expansion("Device[a-]") == ["Device[a-]"] + assert range_expansion("Device[1-3,]") == ["Device[1-3,]"] + + +def test_duplicate_and_overlapping_values() -> None: + assert range_expansion("Device[1,1,2]") == ["Device1", "Device1", "Device2"] + + +def test_whitespace_handling() -> None: + assert range_expansion("Device[ 1 - 3 ]") == ["Device[ 1 - 3 ]"] + + +def test_descending_ranges() -> None: + assert range_expansion("Device[3-1]") == ["Device3", "Device2", "Device1"] + + +def test_multiple_bracketed_ranges_in_a_row() -> None: + assert range_expansion("Dev[A-B][1-2]") == ["DevA1", "DevA2", "DevB1", "DevB2"] + + +def test_non_alphanumeric_ranges() -> None: + assert range_expansion("Port[!@#]") == ["Port[!@#]"] + + +def test_unicode_ranges() -> None: + assert range_expansion("Dev[α-γ]") == ["Devα", "Devβ", "Devγ"] # noqa: RUF001 + + +def test_brackets_in_strings() -> None: + assert range_expansion(r"Service Object [Circuit Provider, X]") == ["Service Object [Circuit Provider, X]"]