Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 31 additions & 92 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,16 @@
![Python Versions](https://img.shields.io/pypi/pyversions/pydantic-resolve)
[![CI](https://github.com/allmonday/pydantic_resolve/actions/workflows/ci.yml/badge.svg)](https://github.com/allmonday/pydantic_resolve/actions/workflows/ci.yml)

Pydantic-resolve is a framework for composing complex data structures with an intuitive, declarative, resolver-based way, and then let the data easy to understand and adjust.
Pydantic-resolve is a framework for composing complex data structures with an intuitive, declarative, resolver-based way, and then let the data easy to understand and adjust.

It provides three major functions to facilitate the acquisition and modification of multi-layered data.

- pluggable resolve methods and post methods, they can define how to fetch and modify nodes.
- transporting field data from ancestor nodes to their descendant nodes, through multiple layers.
- collecting data from any descendants nodes to their ancestor nodes, through multiple layers.

It supports:

- pydantic v1
- pydantic v2
- dataclass `from pydantic.dataclasses import dataclass`
Expand All @@ -30,12 +32,12 @@ from biz_services import UserLoader, StoryTaskLoader

class Task(BaseTask):
user: Optional[BaseUser] = None
def resolve_user(self, loader=LoaderDepend(UserLoader)):
def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id) if self.assignee_id else None

class Story(BaseStory):
tasks: list[Task] = []
def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
tasks: list[Task] = []
def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
# this loader returns BaseTask,
# Task inhert from BaseTask so that it can be initialized from it, then fetch the user.
return loader.load(self.id)
Expand All @@ -47,6 +49,7 @@ data = await Resolver().resolve(stories)
then it will transform flat stories into complicated stories with rich details:

BaseStory

```json
[
{ "id": 1, "name": "story - 1" },
Expand All @@ -55,6 +58,7 @@ BaseStory
```

Story

```json
[
{
Expand Down Expand Up @@ -110,7 +114,7 @@ Building complex data structures requires only 3 systematic steps, let's take

Establish entity relationships as foundational data models (stable, serves as architectural blueprint)

<img width="639" alt="image" src="https://github.com/user-attachments/assets/2656f72e-1af5-467a-96f9-cab95760b720" />
<img width="630px" alt="image" src="https://github.com/user-attachments/assets/2656f72e-1af5-467a-96f9-cab95760b720" />

```python
from pydantic import BaseModel
Expand Down Expand Up @@ -156,27 +160,27 @@ DataLoader implementations support flexible data sources, from database queries

Based on a specific business logic, create domain-specific data structures through selective schemas and relationship dataloader (stable, reusable across use cases)

<img width="709" alt="image" src="https://github.com/user-attachments/assets/ffc74e60-0670-475c-85ab-cb0d03460813" />
<img width="630px" alt="image" src="https://github.com/user-attachments/assets/ffc74e60-0670-475c-85ab-cb0d03460813" />

```python
from pydantic_resolve import LoaderDepend
from pydantic_resolve import Loader

class Task(BaseTask):
user: Optional[BaseUser] = None
def resolve_user(self, loader=LoaderDepend(UserLoader)):
def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id) if self.assignee_id else None

class Story(BaseStory):
tasks: list[Task] = []
def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)

assignee: Optional[BaseUser] = None
def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id) if self.assignee_id else None

reporter: Optional[BaseUser] = None
def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to) if self.report_to else None
```

Expand All @@ -190,7 +194,7 @@ class Story(BaseModel):
report_to: int

tasks: list[BaseTask] = []
def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)

```
Expand All @@ -205,29 +209,29 @@ Leverage post_field methods for ancestor data access, node transfers, and in-pla

#### Case 1: Aggregate or collect items

<img width="701" alt="image" src="https://github.com/user-attachments/assets/2e3b1345-9e5e-489b-a81d-dc220b9d6334" />
<img width="630px" alt="image" src="https://github.com/user-attachments/assets/2e3b1345-9e5e-489b-a81d-dc220b9d6334" />

```python
from pydantic_resolve import LoaderDepend, Collector
from pydantic_resolve import Loader, Collector

class Task(BaseTask):
__pydantic_resolve_collect__ = {'user': 'related_users'} # Propagate user to collector: 'related_users'

user: Optional[BaseUser] = None
def resolve_user(self, loader=LoaderDepend(UserLoader)):
def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)

class Story(BaseStory):
tasks: list[Task] = []
def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)

assignee: Optional[BaseUser] = None
def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)

reporter: Optional[BaseUser] = None
def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to)

# ---------- Post-processing ------------
Expand All @@ -238,20 +242,20 @@ class Story(BaseStory):

#### Case 2: Compute extra fields

<img width="687" alt="image" src="https://github.com/user-attachments/assets/fd5897d6-1c6a-49ec-aab0-495070054b83" />
<img width="630px" alt="image" src="https://github.com/user-attachments/assets/fd5897d6-1c6a-49ec-aab0-495070054b83" />

```python
class Story(BaseStory):
tasks: list[Task] = []
def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)

assignee: Optional[BaseUser] = None
def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)

reporter: Optional[BaseUser] = None
def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to)

# ---------- Post-processing ------------
Expand All @@ -263,11 +267,11 @@ class Story(BaseStory):
### Case 3: Propagate ancestor data through ancestor_context

```python
from pydantic_resolve import LoaderDepend
from pydantic_resolve import Loader

class Task(BaseTask):
user: Optional[BaseUser] = None
def resolve_user(self, loader=LoaderDepend(UserLoader)):
def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)

# ---------- Post-processing ------------
Expand All @@ -278,15 +282,15 @@ class Story(BaseStory):
__pydantic_resolve_expose__ = {'name': 'story_name'}

tasks: list[Task] = []
def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)

assignee: Optional[BaseUser] = None
def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)

reporter: Optional[BaseUser] = None
def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to)
```

Expand Down Expand Up @@ -345,71 +349,6 @@ python -m http.server

Current test coverage: 97%

## Benchmark

`ab -c 50 -n 1000` based on FastAPI.

strawberry-graphql (including cost of parsing query statements)

```
Server Software: uvicorn
Server Hostname: localhost
Server Port: 8000

Document Path: /graphql
Document Length: 5303 bytes

Concurrency Level: 50
Time taken for tests: 3.630 seconds
Complete requests: 1000
Failed requests: 0
Total transferred: 5430000 bytes
Total body sent: 395000
HTML transferred: 5303000 bytes
Requests per second: 275.49 [#/sec] (mean)
Time per request: 181.498 [ms] (mean)
Time per request: 3.630 [ms] (mean, across all concurrent requests)
Transfer rate: 1460.82 [Kbytes/sec] received
106.27 kb/s sent
1567.09 kb/s total

Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 0.2 0 1
Processing: 31 178 14.3 178 272
Waiting: 30 176 14.3 176 270
Total: 31 178 14.4 179 273
```

pydantic-resolve

```
Server Software: uvicorn
Server Hostname: localhost
Server Port: 8000

Document Path: /sprints
Document Length: 4621 bytes

Concurrency Level: 50
Time taken for tests: 2.194 seconds
Complete requests: 1000
Failed requests: 0
Total transferred: 4748000 bytes
HTML transferred: 4621000 bytes
Requests per second: 455.79 [#/sec] (mean)
Time per request: 109.700 [ms] (mean)
Time per request: 2.194 [ms] (mean, across all concurrent requests)
Transfer rate: 2113.36 [Kbytes/sec] received

Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 0.3 0 1
Processing: 30 107 10.9 106 138
Waiting: 28 105 10.7 104 138
Total: 30 107 11.0 106 140
```

## Community

[Discord](https://discord.com/channels/1197929379951558797/1197929379951558800)
2 changes: 2 additions & 0 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,8 @@ In `post_default_handler`, you can additionally collect data from the return val

In pydantic-resolve, you need to use `LoaderDepend` to manage `DataLoader`.

> new in v1.12.5, a shorter name `Loader` is also available.

It supports declaring multiple `DataLoader` instances in one method.

```python
Expand Down
2 changes: 2 additions & 0 deletions docs/api.zh.md
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,8 @@ DataLoader 可以将并发的多个异步查询合并为一个。

在 pydantic-resolve 中需要使用 LoaderDepend 来管理 DataLoader。

> 从 v1.12.5 开始, 你也可以使用 `Loader`, 两者是等价的。

支持一个方法中申明多个 DataLoader。

```python
Expand Down
12 changes: 11 additions & 1 deletion docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,17 @@

## v1.12

### v1.12.3 (2025.7.12)
### v1.12.5 (2025.7.24)

feature:
- add short name `Loader` for `LoaderDepend`
- set Loader return type as DataLoader

non-functional:
- add more tests
- rename internal variable names

### v1.12.4 (2025.7.12)

update python versions in pyproject.toml

Expand Down
4 changes: 2 additions & 2 deletions pydantic_resolve/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@
GlobalLoaderFieldOverlappedError,
MissingCollector)
from .resolver import Resolver
from .utils.depend import LoaderDepend
from .utils.depend import LoaderDepend, Loader
from .utils.openapi import (
model_config)


__all__ = [
'Resolver',
'LoaderDepend',

'Loader', # short
'Collector',
'ICollector',

Expand Down
16 changes: 8 additions & 8 deletions pydantic_resolve/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def _scan_resolve_method(method, field: str, request_type: Optional[Type]) -> Re
info: DataLoaderType = {
'param': name,
'kls': param.default.dependency, # for later initialization
'path': class_util.get_kls_full_path(param.default.dependency),
'path': class_util.get_kls_full_name(param.default.dependency),
'request_type': request_type
}
result['dataloaders'].append(info)
Expand Down Expand Up @@ -146,7 +146,7 @@ def _scan_post_method(method, field: str, request_type: Optional[Type]) -> PostM
loader_info: DataLoaderType = {
'param': name,
'kls': param.default.dependency, # for later initialization
'path': class_util.get_kls_full_path(param.default.dependency),
'path': class_util.get_kls_full_name(param.default.dependency),
'request_type': request_type
}
result['dataloaders'].append(loader_info)
Expand Down Expand Up @@ -233,7 +233,7 @@ def _create_instance(loader):
global_loader_param,
loader_params.get(loader_kls, {}))

for field, has_default in class_util.get_class_field_without_default_value(loader_kls):
for field, has_default in class_util.get_fields_default_value_not_provided(loader_kls):
try:
if has_default and field not in param_config:
continue
Expand All @@ -249,7 +249,7 @@ def _create_instance(loader):

def _get_all_fields(kls):
if class_util.safe_issubclass(kls, BaseModel):
return list(class_util.get_keys(kls))
return list(class_util.get_pydantic_field_keys(kls))

elif is_dataclass(kls):
return [f.name for f in dc_fields(kls)]
Expand Down Expand Up @@ -320,11 +320,11 @@ def _get_request_type_for_loader(object_field_pairs, field_name: str):

def _get_all_fields_and_object_fields(kls):
if class_util.safe_issubclass(kls, BaseModel):
all_fields = set(class_util.get_keys(kls))
object_fields = list(class_util.get_pydantic_attrs(kls)) # dive and recursively analysis
all_fields = set(class_util.get_pydantic_field_keys(kls))
object_fields = list(class_util.get_pydantic_fields(kls)) # dive and recursively analysis
elif is_dataclass(kls):
all_fields = set([f.name for f in dc_fields(kls)])
object_fields = list(class_util.get_dataclass_attrs(kls))
object_fields = list(class_util.get_dataclass_fields(kls))
else:
raise AttributeError('invalid type: should be pydantic object or dataclass object') #noqa
return all_fields, object_fields, { x:y for x, y in object_fields}
Expand Down Expand Up @@ -413,7 +413,7 @@ def _populate_ancestors(parents):
metadata[kls_name]['should_traverse'] = True

def walker(kls, ancestors: List[Tuple[str, str]]):
kls_name = class_util.get_kls_full_path(kls)
kls_name = class_util.get_kls_full_name(kls)
hit = metadata.get(kls_name)
if hit:
# if populated by previous node, or self has_config
Expand Down
Loading