Skip to content

Commit f82744b

Browse files
authored
Merge pull request #458 from opsmill/stable
Merge stable into develop
2 parents 4717ffc + 16da1ba commit f82744b

File tree

16 files changed

+162
-37
lines changed

16 files changed

+162
-37
lines changed

CHANGELOG.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,28 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang
1111

1212
<!-- towncrier release notes start -->
1313

14+
## [1.13.5](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.5) - 2025-07-23
15+
16+
### Fixed
17+
18+
- Respect ordering when loading files from a directory
19+
20+
## [1.13.4](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.4) - 2025-07-22
21+
22+
### Fixed
23+
24+
- Fix processing of relationshhip during nodes retrieval using the Sync Client, when prefecthing related_nodes. ([#461](https://github.com/opsmill/infrahub-sdk-python/issues/461))
25+
- Fix schema loading to ignore non-YAML files in folders. ([#462](https://github.com/opsmill/infrahub-sdk-python/issues/462))
26+
- Fix ignored node variable in filters(). ([#469](https://github.com/opsmill/infrahub-sdk-python/issues/469))
27+
- Fix use of parallel with filters for Infrahub Client Sync.
28+
- Avoid sending empty list to infrahub if no valids schemas are found.
29+
30+
## [1.13.3](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.3) - 2025-06-30
31+
32+
### Fixed
33+
34+
- Update InfrahubNode creation to include __typename, display_label, and kind from a RelatedNode ([#455](https://github.com/opsmill/infrahub-sdk-python/issues/455))
35+
1436
## [1.13.2](https://github.com/opsmill/infrahub-sdk-python/tree/v1.13.2) - 2025-06-27
1537

1638
### Fixed

changelog/+batch.fixed.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Create a new batch while fetching relationships instead of using the reusing the same one.

changelog/+branch-in-count.fixed.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Update internal calls to `count` to include the branch parameter so that the query is performed on the correct branch

infrahub_sdk/client.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -784,7 +784,6 @@ async def filters(
784784
if at:
785785
at = Timestamp(at)
786786

787-
node = InfrahubNode(client=self, schema=schema, branch=branch)
788787
filters = kwargs
789788
pagination_size = self.pagination_size
790789

@@ -825,12 +824,12 @@ async def process_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]:
825824
nodes = []
826825
related_nodes = []
827826
batch_process = await self.create_batch()
828-
count = await self.count(kind=schema.kind, partial_match=partial_match, **filters)
827+
count = await self.count(kind=schema.kind, branch=branch, partial_match=partial_match, **filters)
829828
total_pages = (count + pagination_size - 1) // pagination_size
830829

831830
for page_number in range(1, total_pages + 1):
832831
page_offset = (page_number - 1) * pagination_size
833-
batch_process.add(task=process_page, node=node, page_offset=page_offset, page_number=page_number)
832+
batch_process.add(task=process_page, page_offset=page_offset, page_number=page_number)
834833

835834
async for _, response in batch_process.execute():
836835
nodes.extend(response[1]["nodes"])
@@ -847,7 +846,7 @@ async def process_non_batch() -> tuple[list[InfrahubNode], list[InfrahubNode]]:
847846

848847
while has_remaining_items:
849848
page_offset = (page_number - 1) * pagination_size
850-
response, process_result = await process_page(page_offset, page_number)
849+
response, process_result = await process_page(page_offset=page_offset, page_number=page_number)
851850

852851
nodes.extend(process_result["nodes"])
853852
related_nodes.extend(process_result["related_nodes"])
@@ -1946,9 +1945,9 @@ def filters(
19461945
"""
19471946
branch = branch or self.default_branch
19481947
schema = self.schema.get(kind=kind, branch=branch)
1949-
node = InfrahubNodeSync(client=self, schema=schema, branch=branch)
19501948
if at:
19511949
at = Timestamp(at)
1950+
19521951
filters = kwargs
19531952
pagination_size = self.pagination_size
19541953

@@ -1990,12 +1989,12 @@ def process_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]:
19901989
related_nodes = []
19911990
batch_process = self.create_batch()
19921991

1993-
count = self.count(kind=schema.kind, partial_match=partial_match, **filters)
1992+
count = self.count(kind=schema.kind, branch=branch, partial_match=partial_match, **filters)
19941993
total_pages = (count + pagination_size - 1) // pagination_size
19951994

19961995
for page_number in range(1, total_pages + 1):
19971996
page_offset = (page_number - 1) * pagination_size
1998-
batch_process.add(task=process_page, node=node, page_offset=page_offset, page_number=page_number)
1997+
batch_process.add(task=process_page, page_offset=page_offset, page_number=page_number)
19991998

20001999
for _, response in batch_process.execute():
20012000
nodes.extend(response[1]["nodes"])
@@ -2012,7 +2011,7 @@ def process_non_batch() -> tuple[list[InfrahubNodeSync], list[InfrahubNodeSync]]
20122011

20132012
while has_remaining_items:
20142013
page_offset = (page_number - 1) * pagination_size
2015-
response, process_result = process_page(page_offset, page_number)
2014+
response, process_result = process_page(page_offset=page_offset, page_number=page_number)
20162015

20172016
nodes.extend(process_result["nodes"])
20182017
related_nodes.extend(process_result["related_nodes"])

infrahub_sdk/ctl/utils.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -187,6 +187,9 @@ def load_yamlfile_from_disk_and_exit(
187187
has_error = False
188188
try:
189189
data_files = file_type.load_from_disk(paths=paths)
190+
if not data_files:
191+
console.print("[red]No valid files found to load.")
192+
raise typer.Exit(1)
190193
except FileNotValidError as exc:
191194
console.print(f"[red]{exc.message}")
192195
raise typer.Exit(1) from exc

infrahub_sdk/node/node.py

Lines changed: 28 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -402,10 +402,10 @@ def generate_query_data_init(
402402
if order:
403403
data["@filters"]["order"] = order
404404

405-
if offset:
405+
if offset is not None:
406406
data["@filters"]["offset"] = offset
407407

408-
if limit:
408+
if limit is not None:
409409
data["@filters"]["limit"] = limit
410410

411411
if include and exclude:
@@ -507,11 +507,17 @@ def _init_relationships(self, data: dict | RelatedNode | None = None) -> None:
507507

508508
if rel_schema.cardinality == "one":
509509
if isinstance(rel_data, RelatedNode):
510-
peer_id_data: dict[str, Any] = {}
511-
if rel_data.id:
512-
peer_id_data["id"] = rel_data.id
513-
if rel_data.hfid:
514-
peer_id_data["hfid"] = rel_data.hfid
510+
peer_id_data: dict[str, Any] = {
511+
key: value
512+
for key, value in (
513+
("id", rel_data.id),
514+
("hfid", rel_data.hfid),
515+
("__typename", rel_data.typename),
516+
("kind", rel_data.kind),
517+
("display_label", rel_data.display_label),
518+
)
519+
if value is not None
520+
}
515521
if peer_id_data:
516522
rel_data = peer_id_data
517523
else:
@@ -1090,11 +1096,17 @@ def _init_relationships(self, data: dict | None = None) -> None:
10901096

10911097
if rel_schema.cardinality == "one":
10921098
if isinstance(rel_data, RelatedNodeSync):
1093-
peer_id_data: dict[str, Any] = {}
1094-
if rel_data.id:
1095-
peer_id_data["id"] = rel_data.id
1096-
if rel_data.hfid:
1097-
peer_id_data["hfid"] = rel_data.hfid
1099+
peer_id_data: dict[str, Any] = {
1100+
key: value
1101+
for key, value in (
1102+
("id", rel_data.id),
1103+
("hfid", rel_data.hfid),
1104+
("__typename", rel_data.typename),
1105+
("kind", rel_data.kind),
1106+
("display_label", rel_data.display_label),
1107+
)
1108+
if value is not None
1109+
}
10981110
if peer_id_data:
10991111
rel_data = peer_id_data
11001112
else:
@@ -1481,15 +1493,15 @@ def _process_relationships(
14811493
for rel_name in self._relationships:
14821494
rel = getattr(self, rel_name)
14831495
if rel and isinstance(rel, RelatedNodeSync):
1484-
relation = node_data["node"].get(rel_name)
1485-
if relation.get("node", None):
1496+
relation = node_data["node"].get(rel_name, None)
1497+
if relation and relation.get("node", None):
14861498
related_node = InfrahubNodeSync.from_graphql(
14871499
client=self._client, branch=branch, data=relation, timeout=timeout
14881500
)
14891501
related_nodes.append(related_node)
14901502
elif rel and isinstance(rel, RelationshipManagerSync):
1491-
peers = node_data["node"].get(rel_name)
1492-
if peers:
1503+
peers = node_data["node"].get(rel_name, None)
1504+
if peers and peers["edges"]:
14931505
for peer in peers["edges"]:
14941506
related_node = InfrahubNodeSync.from_graphql(
14951507
client=self._client, branch=branch, data=peer, timeout=timeout

infrahub_sdk/node/related_node.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict,
3939
self._hfid: list[str] | None = None
4040
self._display_label: str | None = None
4141
self._typename: str | None = None
42+
self._kind: str | None = None
4243

4344
if isinstance(data, (CoreNodeBase)):
4445
self._peer = data
@@ -118,6 +119,12 @@ def typename(self) -> str | None:
118119
return self._peer.typename
119120
return self._typename
120121

122+
@property
123+
def kind(self) -> str | None:
124+
if self._peer:
125+
return self._peer.get_kind()
126+
return self._kind
127+
121128
def _generate_input_data(self, allocate_from_pool: bool = False) -> dict[str, Any]:
122129
data: dict[str, Any] = {}
123130

infrahub_sdk/node/relationship.py

Lines changed: 43 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,15 @@
11
from __future__ import annotations
22

3+
from collections import defaultdict
34
from collections.abc import Iterable
45
from typing import TYPE_CHECKING, Any
56

7+
from ..batch import InfrahubBatch
68
from ..exceptions import (
9+
Error,
710
UninitializedError,
811
)
12+
from ..types import Order
913
from .constants import PROPERTIES_FLAG, PROPERTIES_OBJECT
1014
from .related_node import RelatedNode, RelatedNodeSync
1115

@@ -156,8 +160,26 @@ async def fetch(self) -> None:
156160
self.peers = rm.peers
157161
self.initialized = True
158162

163+
ids_per_kind_map = defaultdict(list)
159164
for peer in self.peers:
160-
await peer.fetch() # type: ignore[misc]
165+
if not peer.id or not peer.typename:
166+
raise Error("Unable to fetch the peer, id and/or typename are not defined")
167+
ids_per_kind_map[peer.typename].append(peer.id)
168+
169+
batch = InfrahubBatch(max_concurrent_execution=self.client.max_concurrent_execution)
170+
for kind, ids in ids_per_kind_map.items():
171+
batch.add(
172+
task=self.client.filters,
173+
kind=kind,
174+
ids=ids,
175+
populate_store=True,
176+
branch=self.branch,
177+
parallel=True,
178+
order=Order(disable=True),
179+
)
180+
181+
async for _ in batch.execute():
182+
pass
161183

162184
def add(self, data: str | RelatedNode | dict) -> None:
163185
"""Add a new peer to this relationship."""
@@ -261,8 +283,27 @@ def fetch(self) -> None:
261283
self.peers = rm.peers
262284
self.initialized = True
263285

286+
ids_per_kind_map = defaultdict(list)
264287
for peer in self.peers:
265-
peer.fetch()
288+
if not peer.id or not peer.typename:
289+
raise Error("Unable to fetch the peer, id and/or typename are not defined")
290+
ids_per_kind_map[peer.typename].append(peer.id)
291+
292+
# Unlike Async, no need to create a new batch from scratch because we are not using a semaphore
293+
batch = self.client.create_batch()
294+
for kind, ids in ids_per_kind_map.items():
295+
batch.add(
296+
task=self.client.filters,
297+
kind=kind,
298+
ids=ids,
299+
populate_store=True,
300+
branch=self.branch,
301+
parallel=True,
302+
order=Order(disable=True),
303+
)
304+
305+
for _ in batch.execute():
306+
pass
266307

267308
def add(self, data: str | RelatedNodeSync | dict) -> None:
268309
"""Add a new peer to this relationship."""

infrahub_sdk/yaml.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -120,16 +120,22 @@ def load_file_from_disk(cls, path: Path) -> list[Self]:
120120
@classmethod
121121
def load_from_disk(cls, paths: list[Path]) -> list[Self]:
122122
yaml_files: list[Self] = []
123+
file_extensions = {".yaml", ".yml", ".json"} # FIXME: .json is not a YAML file, should be removed
124+
123125
for file_path in paths:
124-
if file_path.is_file() and file_path.suffix in [".yaml", ".yml", ".json"]:
125-
yaml_files.extend(cls.load_file_from_disk(path=file_path))
126+
if not file_path.exists():
127+
# Check if the provided path exists, relevant for the first call coming from the user
128+
raise FileNotValidError(name=str(file_path), message=f"{file_path} does not exist!")
129+
if file_path.is_file():
130+
if file_path.suffix in file_extensions:
131+
yaml_files.extend(cls.load_file_from_disk(path=file_path))
132+
# else: silently skip files with unrelevant extensions (e.g. .md, .py...)
126133
elif file_path.is_dir():
134+
# Introduce recursion to handle sub-folders
127135
sub_paths = [Path(sub_file_path) for sub_file_path in file_path.glob("*")]
128-
sub_files = cls.load_from_disk(paths=sub_paths)
129-
sorted_sub_files = sorted(sub_files, key=lambda x: x.location)
130-
yaml_files.extend(sorted_sub_files)
131-
else:
132-
raise FileNotValidError(name=str(file_path), message=f"{file_path} does not exist!")
136+
sub_paths = sorted(sub_paths, key=lambda p: p.name)
137+
yaml_files.extend(cls.load_from_disk(paths=sub_paths))
138+
# else: skip non-file, non-dir (e.g., symlink...)
133139

134140
return yaml_files
135141

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "infrahub-sdk"
3-
version = "1.13.2"
3+
version = "1.13.5"
44
description = "Python Client to interact with Infrahub"
55
authors = ["OpsMill <[email protected]>"]
66
readme = "README.md"

0 commit comments

Comments
 (0)