diff --git a/README.md b/README.md
index f620638..8a81e5f 100644
--- a/README.md
+++ b/README.md
@@ -3,14 +3,16 @@

[](https://github.com/allmonday/pydantic_resolve/actions/workflows/ci.yml)
-Pydantic-resolve is a framework for composing complex data structures with an intuitive, declarative, resolver-based way, and then let the data easy to understand and adjust.
+Pydantic-resolve is a framework for composing complex data structures with an intuitive, declarative, resolver-based way, and then let the data easy to understand and adjust.
It provides three major functions to facilitate the acquisition and modification of multi-layered data.
+
- pluggable resolve methods and post methods, they can define how to fetch and modify nodes.
- transporting field data from ancestor nodes to their descendant nodes, through multiple layers.
- collecting data from any descendants nodes to their ancestor nodes, through multiple layers.
It supports:
+
- pydantic v1
- pydantic v2
- dataclass `from pydantic.dataclasses import dataclass`
@@ -30,12 +32,12 @@ from biz_services import UserLoader, StoryTaskLoader
class Task(BaseTask):
user: Optional[BaseUser] = None
- def resolve_user(self, loader=LoaderDepend(UserLoader)):
+ def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id) if self.assignee_id else None
class Story(BaseStory):
- tasks: list[Task] = []
- def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
+ tasks: list[Task] = []
+ def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
# this loader returns BaseTask,
# Task inhert from BaseTask so that it can be initialized from it, then fetch the user.
return loader.load(self.id)
@@ -47,6 +49,7 @@ data = await Resolver().resolve(stories)
then it will transform flat stories into complicated stories with rich details:
BaseStory
+
```json
[
{ "id": 1, "name": "story - 1" },
@@ -55,6 +58,7 @@ BaseStory
```
Story
+
```json
[
{
@@ -110,7 +114,7 @@ Building complex data structures requires only 3 systematic steps, let's take
Establish entity relationships as foundational data models (stable, serves as architectural blueprint)
-
+
```python
from pydantic import BaseModel
@@ -156,27 +160,27 @@ DataLoader implementations support flexible data sources, from database queries
Based on a specific business logic, create domain-specific data structures through selective schemas and relationship dataloader (stable, reusable across use cases)
-
+
```python
-from pydantic_resolve import LoaderDepend
+from pydantic_resolve import Loader
class Task(BaseTask):
user: Optional[BaseUser] = None
- def resolve_user(self, loader=LoaderDepend(UserLoader)):
+ def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id) if self.assignee_id else None
class Story(BaseStory):
tasks: list[Task] = []
- def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
+ def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)
assignee: Optional[BaseUser] = None
- def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
+ def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id) if self.assignee_id else None
reporter: Optional[BaseUser] = None
- def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
+ def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to) if self.report_to else None
```
@@ -190,7 +194,7 @@ class Story(BaseModel):
report_to: int
tasks: list[BaseTask] = []
- def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
+ def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)
```
@@ -205,29 +209,29 @@ Leverage post_field methods for ancestor data access, node transfers, and in-pla
#### Case 1: Aggregate or collect items
-
+
```python
-from pydantic_resolve import LoaderDepend, Collector
+from pydantic_resolve import Loader, Collector
class Task(BaseTask):
__pydantic_resolve_collect__ = {'user': 'related_users'} # Propagate user to collector: 'related_users'
user: Optional[BaseUser] = None
- def resolve_user(self, loader=LoaderDepend(UserLoader)):
+ def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)
class Story(BaseStory):
tasks: list[Task] = []
- def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
+ def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)
assignee: Optional[BaseUser] = None
- def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
+ def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)
reporter: Optional[BaseUser] = None
- def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
+ def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to)
# ---------- Post-processing ------------
@@ -238,20 +242,20 @@ class Story(BaseStory):
#### Case 2: Compute extra fields
-
+
```python
class Story(BaseStory):
tasks: list[Task] = []
- def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
+ def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)
assignee: Optional[BaseUser] = None
- def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
+ def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)
reporter: Optional[BaseUser] = None
- def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
+ def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to)
# ---------- Post-processing ------------
@@ -263,11 +267,11 @@ class Story(BaseStory):
### Case 3: Propagate ancestor data through ancestor_context
```python
-from pydantic_resolve import LoaderDepend
+from pydantic_resolve import Loader
class Task(BaseTask):
user: Optional[BaseUser] = None
- def resolve_user(self, loader=LoaderDepend(UserLoader)):
+ def resolve_user(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)
# ---------- Post-processing ------------
@@ -278,15 +282,15 @@ class Story(BaseStory):
__pydantic_resolve_expose__ = {'name': 'story_name'}
tasks: list[Task] = []
- def resolve_tasks(self, loader=LoaderDepend(StoryTaskLoader)):
+ def resolve_tasks(self, loader=Loader(StoryTaskLoader)):
return loader.load(self.id)
assignee: Optional[BaseUser] = None
- def resolve_assignee(self, loader=LoaderDepend(UserLoader)):
+ def resolve_assignee(self, loader=Loader(UserLoader)):
return loader.load(self.assignee_id)
reporter: Optional[BaseUser] = None
- def resolve_reporter(self, loader=LoaderDepend(UserLoader)):
+ def resolve_reporter(self, loader=Loader(UserLoader)):
return loader.load(self.report_to)
```
@@ -345,71 +349,6 @@ python -m http.server
Current test coverage: 97%
-## Benchmark
-
-`ab -c 50 -n 1000` based on FastAPI.
-
-strawberry-graphql (including cost of parsing query statements)
-
-```
-Server Software: uvicorn
-Server Hostname: localhost
-Server Port: 8000
-
-Document Path: /graphql
-Document Length: 5303 bytes
-
-Concurrency Level: 50
-Time taken for tests: 3.630 seconds
-Complete requests: 1000
-Failed requests: 0
-Total transferred: 5430000 bytes
-Total body sent: 395000
-HTML transferred: 5303000 bytes
-Requests per second: 275.49 [#/sec] (mean)
-Time per request: 181.498 [ms] (mean)
-Time per request: 3.630 [ms] (mean, across all concurrent requests)
-Transfer rate: 1460.82 [Kbytes/sec] received
- 106.27 kb/s sent
- 1567.09 kb/s total
-
-Connection Times (ms)
- min mean[+/-sd] median max
-Connect: 0 0 0.2 0 1
-Processing: 31 178 14.3 178 272
-Waiting: 30 176 14.3 176 270
-Total: 31 178 14.4 179 273
-```
-
-pydantic-resolve
-
-```
-Server Software: uvicorn
-Server Hostname: localhost
-Server Port: 8000
-
-Document Path: /sprints
-Document Length: 4621 bytes
-
-Concurrency Level: 50
-Time taken for tests: 2.194 seconds
-Complete requests: 1000
-Failed requests: 0
-Total transferred: 4748000 bytes
-HTML transferred: 4621000 bytes
-Requests per second: 455.79 [#/sec] (mean)
-Time per request: 109.700 [ms] (mean)
-Time per request: 2.194 [ms] (mean, across all concurrent requests)
-Transfer rate: 2113.36 [Kbytes/sec] received
-
-Connection Times (ms)
-min mean[+/-sd] median max
-Connect: 0 0 0.3 0 1
-Processing: 30 107 10.9 106 138
-Waiting: 28 105 10.7 104 138
-Total: 30 107 11.0 106 140
-```
-
## Community
[Discord](https://discord.com/channels/1197929379951558797/1197929379951558800)
diff --git a/docs/api.md b/docs/api.md
index 4eb3a56..bedf856 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -337,6 +337,8 @@ In `post_default_handler`, you can additionally collect data from the return val
In pydantic-resolve, you need to use `LoaderDepend` to manage `DataLoader`.
+> new in v1.12.5, a shorter name `Loader` is also available.
+
It supports declaring multiple `DataLoader` instances in one method.
```python
diff --git a/docs/api.zh.md b/docs/api.zh.md
index 14acf87..d5895cb 100644
--- a/docs/api.zh.md
+++ b/docs/api.zh.md
@@ -329,6 +329,8 @@ DataLoader 可以将并发的多个异步查询合并为一个。
在 pydantic-resolve 中需要使用 LoaderDepend 来管理 DataLoader。
+> 从 v1.12.5 开始, 你也可以使用 `Loader`, 两者是等价的。
+
支持一个方法中申明多个 DataLoader。
```python
diff --git a/docs/changelog.md b/docs/changelog.md
index 0b3a3e6..7a8da7f 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -2,7 +2,17 @@
## v1.12
-### v1.12.3 (2025.7.12)
+### v1.12.5 (2025.7.24)
+
+feature:
+- add short name `Loader` for `LoaderDepend`
+- set Loader return type as DataLoader
+
+non-functional:
+- add more tests
+- rename internal variable names
+
+### v1.12.4 (2025.7.12)
update python versions in pyproject.toml
diff --git a/pydantic_resolve/__init__.py b/pydantic_resolve/__init__.py
index de446b6..730e003 100644
--- a/pydantic_resolve/__init__.py
+++ b/pydantic_resolve/__init__.py
@@ -9,7 +9,7 @@
GlobalLoaderFieldOverlappedError,
MissingCollector)
from .resolver import Resolver
-from .utils.depend import LoaderDepend
+from .utils.depend import LoaderDepend, Loader
from .utils.openapi import (
model_config)
@@ -17,7 +17,7 @@
__all__ = [
'Resolver',
'LoaderDepend',
-
+ 'Loader', # short
'Collector',
'ICollector',
diff --git a/pydantic_resolve/analysis.py b/pydantic_resolve/analysis.py
index cf4c5bf..662aa9d 100644
--- a/pydantic_resolve/analysis.py
+++ b/pydantic_resolve/analysis.py
@@ -108,7 +108,7 @@ def _scan_resolve_method(method, field: str, request_type: Optional[Type]) -> Re
info: DataLoaderType = {
'param': name,
'kls': param.default.dependency, # for later initialization
- 'path': class_util.get_kls_full_path(param.default.dependency),
+ 'path': class_util.get_kls_full_name(param.default.dependency),
'request_type': request_type
}
result['dataloaders'].append(info)
@@ -146,7 +146,7 @@ def _scan_post_method(method, field: str, request_type: Optional[Type]) -> PostM
loader_info: DataLoaderType = {
'param': name,
'kls': param.default.dependency, # for later initialization
- 'path': class_util.get_kls_full_path(param.default.dependency),
+ 'path': class_util.get_kls_full_name(param.default.dependency),
'request_type': request_type
}
result['dataloaders'].append(loader_info)
@@ -233,7 +233,7 @@ def _create_instance(loader):
global_loader_param,
loader_params.get(loader_kls, {}))
- for field, has_default in class_util.get_class_field_without_default_value(loader_kls):
+ for field, has_default in class_util.get_fields_default_value_not_provided(loader_kls):
try:
if has_default and field not in param_config:
continue
@@ -249,7 +249,7 @@ def _create_instance(loader):
def _get_all_fields(kls):
if class_util.safe_issubclass(kls, BaseModel):
- return list(class_util.get_keys(kls))
+ return list(class_util.get_pydantic_field_keys(kls))
elif is_dataclass(kls):
return [f.name for f in dc_fields(kls)]
@@ -320,11 +320,11 @@ def _get_request_type_for_loader(object_field_pairs, field_name: str):
def _get_all_fields_and_object_fields(kls):
if class_util.safe_issubclass(kls, BaseModel):
- all_fields = set(class_util.get_keys(kls))
- object_fields = list(class_util.get_pydantic_attrs(kls)) # dive and recursively analysis
+ all_fields = set(class_util.get_pydantic_field_keys(kls))
+ object_fields = list(class_util.get_pydantic_fields(kls)) # dive and recursively analysis
elif is_dataclass(kls):
all_fields = set([f.name for f in dc_fields(kls)])
- object_fields = list(class_util.get_dataclass_attrs(kls))
+ object_fields = list(class_util.get_dataclass_fields(kls))
else:
raise AttributeError('invalid type: should be pydantic object or dataclass object') #noqa
return all_fields, object_fields, { x:y for x, y in object_fields}
@@ -413,7 +413,7 @@ def _populate_ancestors(parents):
metadata[kls_name]['should_traverse'] = True
def walker(kls, ancestors: List[Tuple[str, str]]):
- kls_name = class_util.get_kls_full_path(kls)
+ kls_name = class_util.get_kls_full_name(kls)
hit = metadata.get(kls_name)
if hit:
# if populated by previous node, or self has_config
diff --git a/pydantic_resolve/resolver.py b/pydantic_resolve/resolver.py
index ca70478..bec0497 100644
--- a/pydantic_resolve/resolver.py
+++ b/pydantic_resolve/resolver.py
@@ -299,7 +299,7 @@ async def _traverse(self, node: T, parent: object) -> T:
return node
kls = node.__class__
- kls_path = class_util.get_kls_full_path(kls)
+ kls_path = class_util.get_kls_full_name(kls)
self._prepare_collectors(node, kls)
self._prepare_expose_fields(node)
@@ -357,7 +357,7 @@ async def _traverse(self, node: T, parent: object) -> T:
async def resolve(self, node: T) -> T:
if isinstance(node, list) and node == []: return node
- root_class = class_util.get_class(node)
+ root_class = class_util.get_class_of_object(node)
metadata = analysis.scan_and_store_metadata(root_class)
self.metadata = analysis.convert_metadata_key_as_kls(metadata)
diff --git a/pydantic_resolve/utils/class_util.py b/pydantic_resolve/utils/class_util.py
index 13d65af..bd41633 100644
--- a/pydantic_resolve/utils/class_util.py
+++ b/pydantic_resolve/utils/class_util.py
@@ -9,72 +9,19 @@
from pydantic import BaseModel
-def get_class_field_without_default_value(cls: Type) -> List[Tuple[str, bool]]: # field name, has default value
- """
- return class field which do not have a default value.
-
- class MyClass:
- a: int
- b: int = 1
-
- print(hasattr(MyClass, 'a')) # False
- print(hasattr(MyClass, 'b')) # True
- """
- anno = cls.__dict__.get('__annotations__') or {}
- return [(k, hasattr(cls, k)) for k in anno.keys()]
-
-
-def safe_issubclass(kls, classinfo):
- try:
- return issubclass(kls, classinfo)
- except TypeError:
- return False
-
-
+# ----------------------- rebuild -----------------------
def rebuild_v1(kls):
kls.update_forward_refs()
+
def rebuild_v2(kls):
kls.model_rebuild()
-rebuild = rebuild_v2 if PYDANTIC_V2 else rebuild_v1
-
-def update_forward_refs(kls):
- def update_pydantic_forward_refs(kls: Type[BaseModel]):
- """
- recursively update refs.
- """
- if getattr(kls, const.PYDANTIC_FORWARD_REF_UPDATED, False):
- return
-
- rebuild(kls)
-
- setattr(kls, const.PYDANTIC_FORWARD_REF_UPDATED, True)
-
- values = get_values(kls)
-
- for field in values:
- shelled_type = shelling_type(get_type(field))
-
- update_forward_refs(shelled_type)
-
- def update_dataclass_forward_refs(kls):
- if not getattr(kls, const.DATACLASS_FORWARD_REF_UPDATED, False):
- anno = get_type_hints(kls)
- kls.__annotations__ = anno
- setattr(kls, const.DATACLASS_FORWARD_REF_UPDATED, True)
-
- for _, v in kls.__annotations__.items():
- shelled_type = shelling_type(v)
- update_forward_refs(shelled_type)
-
- if safe_issubclass(kls, BaseModel):
- update_pydantic_forward_refs(kls)
- if is_dataclass(kls):
- update_dataclass_forward_refs(kls)
+rebuild = rebuild_v2 if PYDANTIC_V2 else rebuild_v1
+# ---------------------- ensure_subset ------------------
def ensure_subset_v1(base):
"""
used with pydantic class or dataclass to make sure a class's field is
@@ -103,7 +50,7 @@ def inner():
def inner():
base_fields = {f.name: f.type for f in fields(base)}
for f in fields(kls):
- has_default = dataclass_has_default(f)
+ has_default = is_dataclass_field_has_default_value(f)
if not has_default:
if f.name not in base_fields:
raise AttributeError(f'{f.name} not existed in {base.__name__}.')
@@ -161,7 +108,7 @@ def inner():
def inner():
base_fields = {f.name: f.type for f in fields(base)}
for f in fields(kls):
- has_default = dataclass_has_default(f)
+ has_default = is_dataclass_field_has_default_value(f)
if not has_default:
if f.name not in base_fields:
raise AttributeError(f'{f.name} not existed in {base.__name__}.')
@@ -177,45 +124,50 @@ def inner():
ensure_subset = ensure_subset_v2 if PYDANTIC_V2 else ensure_subset_v1
-def get_kls_full_path(kls):
- return f'{kls.__module__}.{kls.__qualname__}'
-
-
-def _get_items_v1(kls):
+def _get_pydantic_field_items_v1(kls):
return kls.__fields__.items()
-def _get_items_v2(kls):
+def _get_pydantic_field_items_v2(kls):
return kls.model_fields.items()
-get_items = _get_items_v2 if PYDANTIC_V2 else _get_items_v1
+get_pydantic_field_items = _get_pydantic_field_items_v2 if PYDANTIC_V2 else _get_pydantic_field_items_v1
-def _get_keys_v1(kls) -> str:
+def _get_pydantic_field_keys_v1(kls) -> str:
return kls.__fields__.keys()
-def _get_keys_v2(kls) -> str:
+def _get_pydantic_field_keys_v2(kls) -> str:
return kls.model_fields.keys()
-get_keys = _get_keys_v2 if PYDANTIC_V2 else _get_keys_v1
+get_pydantic_field_keys = _get_pydantic_field_keys_v2 if PYDANTIC_V2 else _get_pydantic_field_keys_v1
-def _get_values_v1(kls):
+def _get_pydantic_field_values_v1(kls):
return kls.__fields__.values()
-def _get_values_v2(kls):
+def _get_pydantic_field_values_v2(kls):
return kls.model_fields.values()
-get_values = _get_values_v2 if PYDANTIC_V2 else _get_values_v1
+get_pydantic_field_values = _get_pydantic_field_values_v2 if PYDANTIC_V2 else _get_pydantic_field_values_v1
+
+
+def _is_pydantic_field_required_v1(field):
+ return field.required
+
+def _is_pydantic_field_required_v2(field):
+ return field.is_required()
+
+is_pydantic_field_required_field = _is_pydantic_field_required_v2 if PYDANTIC_V2 else _is_pydantic_field_required_v1
-def get_pydantic_attrs(kls):
- items = class_util.get_items(kls)
+def get_pydantic_fields(kls):
+ items = class_util.get_pydantic_field_items(kls)
for name, v in items:
t = get_type(v)
@@ -225,31 +177,85 @@ def get_pydantic_attrs(kls):
yield (name, shelled_type) # type_ is the most inner type
-def get_dataclass_attrs(kls):
+def get_dataclass_fields(kls):
for name, v in kls.__annotations__.items():
shelled_type = shelling_type(v)
if is_acceptable_kls(shelled_type):
yield (name, shelled_type)
-def get_class(target):
+def get_class_of_object(target):
if isinstance(target, list):
return target[0].__class__
else:
return target.__class__
-def _is_required_v1(field):
- return field.required
-
-def _is_required_v2(field):
- return field.is_required()
-
-is_required_field = _is_required_v2 if PYDANTIC_V2 else _is_required_v1
-
-def dataclass_has_default(field):
+def is_dataclass_field_has_default_value(field):
if field.default is not MISSING or field.default_factory is not MISSING:
return True
typ = field.type
- return _is_optional(typ)
\ No newline at end of file
+ return _is_optional(typ)
+
+
+def update_forward_refs(kls):
+ def update_pydantic_forward_refs(kls: Type[BaseModel]):
+ """
+ recursively update refs.
+ """
+ if getattr(kls, const.PYDANTIC_FORWARD_REF_UPDATED, False):
+ return
+
+ rebuild(kls)
+
+ setattr(kls, const.PYDANTIC_FORWARD_REF_UPDATED, True)
+
+ values = get_pydantic_field_values(kls)
+
+ for field in values:
+ shelled_type = shelling_type(get_type(field))
+
+ update_forward_refs(shelled_type)
+
+ def update_dataclass_forward_refs(kls):
+ if not getattr(kls, const.DATACLASS_FORWARD_REF_UPDATED, False):
+ anno = get_type_hints(kls)
+ kls.__annotations__ = anno
+ setattr(kls, const.DATACLASS_FORWARD_REF_UPDATED, True)
+
+ for _, v in kls.__annotations__.items():
+ shelled_type = shelling_type(v)
+ update_forward_refs(shelled_type)
+
+ if safe_issubclass(kls, BaseModel):
+ update_pydantic_forward_refs(kls)
+
+ if is_dataclass(kls):
+ update_dataclass_forward_refs(kls)
+
+
+def get_kls_full_name(kls):
+ return f'{kls.__module__}.{kls.__qualname__}'
+
+
+def get_fields_default_value_not_provided(cls: Type) -> List[Tuple[str, bool]]: # field name, has default value
+ """
+ return class field which do not have a default value.
+
+ class MyClass:
+ a: int
+ b: int = 1
+
+ print(hasattr(MyClass, 'a')) # False
+ print(hasattr(MyClass, 'b')) # True
+ """
+ anno = cls.__dict__.get('__annotations__') or {}
+ return [(k, hasattr(cls, k)) for k in anno.keys()]
+
+
+def safe_issubclass(kls, classinfo):
+ try:
+ return issubclass(kls, classinfo)
+ except TypeError:
+ return False
\ No newline at end of file
diff --git a/pydantic_resolve/utils/conversion.py b/pydantic_resolve/utils/conversion.py
index d226a29..8a97119 100644
--- a/pydantic_resolve/utils/conversion.py
+++ b/pydantic_resolve/utils/conversion.py
@@ -186,6 +186,15 @@ def mapper(func_or_class: Union[Callable, Type]):
func_or_class:
is func: run func
is class: call auto_mapping to have a try
+
+ @dataclass
+ class K:
+ id: int
+
+ field: str = ''
+ @mapper(lambda x: x.name)
+ def resolve_field(self, loader=Loader(field_batch_load_fn)):
+ return loader.load(self.id)
"""
def inner(inner_fn):
diff --git a/pydantic_resolve/utils/depend.py b/pydantic_resolve/utils/depend.py
index 4ca87b1..409c747 100644
--- a/pydantic_resolve/utils/depend.py
+++ b/pydantic_resolve/utils/depend.py
@@ -1,4 +1,5 @@
from typing import Any, Callable, Optional
+from aiodataloader import DataLoader
class Depends:
def __init__(
@@ -10,5 +11,8 @@ def __init__(
def LoaderDepend( # noqa: N802
dependency: Optional[Callable[..., Any]] = None,
-) -> Any:
- return Depends(dependency=dependency)
\ No newline at end of file
+) -> DataLoader:
+ return Depends(dependency=dependency)
+
+
+Loader = LoaderDepend
\ No newline at end of file
diff --git a/pydantic_resolve/utils/openapi.py b/pydantic_resolve/utils/openapi.py
index 554f6cd..5e2f49f 100644
--- a/pydantic_resolve/utils/openapi.py
+++ b/pydantic_resolve/utils/openapi.py
@@ -4,16 +4,16 @@
from pydantic import BaseModel
import pydantic_resolve.constant as const
from pydantic_resolve.compat import PYDANTIC_V2
-from pydantic_resolve.utils.class_util import safe_issubclass, is_required_field, get_items
+from pydantic_resolve.utils.class_util import safe_issubclass, is_pydantic_field_required_field, get_pydantic_field_items
def _get_required_fields(kls: BaseModel):
required_fields = []
- items = get_items(kls)
+ items = get_pydantic_field_items(kls)
for fname, field in items:
- if is_required_field(field):
+ if is_pydantic_field_required_field(field):
required_fields.append(fname)
diff --git a/pyproject.toml b/pyproject.toml
index df67f67..64daafd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "pydantic-resolve"
-version = "1.12.4"
+version = "1.12.5"
description = "It just provide a pair of pre & post methods around pydantic fields, the rest is up to your imagination"
authors = ["tangkikodo "]
readme = "README.md"
diff --git a/tests/pydantic_v1/core/test_field.py b/tests/pydantic_v1/core/test_field.py
index 5f7b1c8..1f7b407 100644
--- a/tests/pydantic_v1/core/test_field.py
+++ b/tests/pydantic_v1/core/test_field.py
@@ -1,6 +1,6 @@
# from __future__ import annotations
from pydantic import BaseModel
-from pydantic_resolve.utils.class_util import get_class
+from pydantic_resolve.utils.class_util import get_class_of_object
def test_get_class():
class Student(BaseModel):
@@ -9,5 +9,5 @@ class Student(BaseModel):
stu = Student()
stus = [Student(), Student()]
- assert get_class(stu) == Student
- assert get_class(stus) == Student
+ assert get_class_of_object(stu) == Student
+ assert get_class_of_object(stus) == Student
diff --git a/tests/pydantic_v1/resolver/test_8_loader_depend.py b/tests/pydantic_v1/resolver/test_8_loader_depend.py
index fcc4c3e..8881299 100644
--- a/tests/pydantic_v1/resolver/test_8_loader_depend.py
+++ b/tests/pydantic_v1/resolver/test_8_loader_depend.py
@@ -1,7 +1,7 @@
from typing import List
import pytest
from pydantic import BaseModel
-from pydantic_resolve import Resolver, LoaderDepend
+from pydantic_resolve import Resolver, Loader
from aiodataloader import DataLoader
@pytest.mark.asyncio
@@ -35,7 +35,7 @@ class Student(BaseModel):
name: str
books: List[Book] = []
- def resolve_books(self, loader=LoaderDepend(BookLoader)):
+ def resolve_books(self, loader=Loader(BookLoader)):
return loader.load(self.id)
students = [Student(id=1, name="jack"), Student(id=2, name="mike"), Student(id=3, name="wiki")]
diff --git a/tests/pydantic_v1/utils/test_utils.py b/tests/pydantic_v1/utils/test_utils.py
index 1c0065c..98cccf7 100644
--- a/tests/pydantic_v1/utils/test_utils.py
+++ b/tests/pydantic_v1/utils/test_utils.py
@@ -25,10 +25,10 @@ class D(C):
class E(C):
world: str
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(B)) == [('hello', True)]
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(C)) == [('hello', False)]
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(D)) == []
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(E)) == [('world', False)]
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(B)) == [('hello', True)]
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(C)) == [('hello', False)]
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(D)) == []
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(E)) == [('world', False)]
class User(BaseModel):
diff --git a/tests/pydantic_v2/core/test_field.py b/tests/pydantic_v2/core/test_field.py
index 5f7b1c8..1f7b407 100644
--- a/tests/pydantic_v2/core/test_field.py
+++ b/tests/pydantic_v2/core/test_field.py
@@ -1,6 +1,6 @@
# from __future__ import annotations
from pydantic import BaseModel
-from pydantic_resolve.utils.class_util import get_class
+from pydantic_resolve.utils.class_util import get_class_of_object
def test_get_class():
class Student(BaseModel):
@@ -9,5 +9,5 @@ class Student(BaseModel):
stu = Student()
stus = [Student(), Student()]
- assert get_class(stu) == Student
- assert get_class(stus) == Student
+ assert get_class_of_object(stu) == Student
+ assert get_class_of_object(stus) == Student
diff --git a/tests/pydantic_v2/resolver/test_43_type_adapter_conver_from_pydantic_object.py b/tests/pydantic_v2/resolver/test_43_type_adapter_conver_from_pydantic_object.py
index 25ab297..75019fa 100644
--- a/tests/pydantic_v2/resolver/test_43_type_adapter_conver_from_pydantic_object.py
+++ b/tests/pydantic_v2/resolver/test_43_type_adapter_conver_from_pydantic_object.py
@@ -4,7 +4,7 @@
from pydantic_resolve import Resolver
@pytest.mark.asyncio
-async def test_1():
+async def test_function_param():
class Base(BaseModel):
name: str
id: int
@@ -24,9 +24,33 @@ def resolve_items(self):
c = await Resolver(enable_from_attribute_in_type_adapter=True).resolve(c)
assert c.items[0].name == 'name-1'
+@pytest.mark.asyncio
+async def test_globally_env():
+ import os
+ os.environ['PYDANTIC_RESOLVE_ENABLE_FROM_ATTRIBUTE'] = 'true'
+
+ class Base(BaseModel):
+ name: str
+ id: int
+
+ class Child(BaseModel):
+ name: str
+
+ class Container(BaseModel):
+ items: List[Child] = []
+ def resolve_items(self):
+ return [Base(name='name-1', id=1), Base(name='name-2', id=2)]
+
+ c = Container()
+
+ c = await Resolver().resolve(c)
+ assert c.items[0].name == 'name-1'
+
+ # delete env
+ os.environ.pop('PYDANTIC_RESOLVE_ENABLE_FROM_ATTRIBUTE')
@pytest.mark.asyncio
-async def test_2():
+async def test_list():
class Child(BaseModel):
name: str
@@ -42,7 +66,7 @@ def resolve_items(self):
@pytest.mark.asyncio
-async def test_3():
+async def test_object():
class Child(BaseModel):
name: str
@@ -56,3 +80,22 @@ def resolve_item(self):
assert c.item and c.item.name == 'name-1'
+
+@pytest.mark.asyncio
+async def test_base_to_child():
+ class Base(BaseModel):
+ name: str
+
+ class Child(Base):
+ id: Optional[int] = None
+
+ class Container(BaseModel):
+ items: List[Child] = []
+ def resolve_items(self):
+ return [Base(name='name-1'), Base(name='name-2')]
+
+ c = Container()
+ c = await Resolver(enable_from_attribute_in_type_adapter=True).resolve(c)
+ assert c.items[0].name == 'name-1'
+
+
diff --git a/tests/pydantic_v2/utils/test_utils.py b/tests/pydantic_v2/utils/test_utils.py
index 40764ed..0104954 100644
--- a/tests/pydantic_v2/utils/test_utils.py
+++ b/tests/pydantic_v2/utils/test_utils.py
@@ -25,10 +25,10 @@ class D(C):
class E(C):
world: str
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(B)) == [('hello', True)]
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(C)) == [('hello', False)]
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(D)) == []
- assert list(pydantic_resolve.utils.class_util.get_class_field_without_default_value(E)) == [('world', False)]
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(B)) == [('hello', True)]
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(C)) == [('hello', False)]
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(D)) == []
+ assert list(pydantic_resolve.utils.class_util.get_fields_default_value_not_provided(E)) == [('world', False)]
class User(BaseModel):
diff --git a/tox.ini b/tox.ini
index 6acca94..9fa24e3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,66 +6,95 @@ envlist =
py39pyd2
py310pyd1
py310pyd2
-setenv = VIRTUALENV_DISCOVERY=pyenv
+ py311pyd1
+ py311pyd2
+ py312pyd1
+ py312pyd2
+setenv = VIRTUALENV_DISCOVERY=builtin
[testenv]
allowlist_externals = poetry
-
-[testenv:py38-duration]
-basepython = python3.8
-commands_pre =
- poetry install --no-root --sync
-commands =
- poetry run pytest --durations=20 tests/pydantic_v1
+# install into tox's venv (not Poetry's) to enable caching/reuse between runs
+setenv =
+ POETRY_VIRTUALENVS_CREATE = false
+ POETRY_NO_INTERACTION = 1
+ PIP_DISABLE_PIP_VERSION_CHECK = 1
[testenv:py38pyd1]
basepython = python3.8
commands_pre =
- poetry install --no-root --sync
+ poetry install --sync
commands =
- poetry run pytest tests/pydantic_v1 tests/common
+ pytest tests/pydantic_v1 tests/common
[testenv:py38pyd2]
commands_pre =
- poetry install --no-root --sync
+ poetry install --sync
pip install pydantic==2.*
basepython = python3.8
commands =
- poetry run pytest tests/pydantic_v2 tests/common
+ pytest tests/pydantic_v2 tests/common
[testenv:py39pyd1]
basepython = python3.9
commands_pre =
- poetry install --no-root --sync
+ poetry install --sync
commands =
- poetry run pytest tests/pydantic_v1 tests/common
+ pytest tests/pydantic_v1 tests/common
[testenv:py39pyd2]
commands_pre =
- poetry install --no-root --sync
+ poetry install --sync
pip install pydantic==2.*
basepython = python3.9
commands =
- poetry run pytest tests/pydantic_v2 tests/common
+ pytest tests/pydantic_v2 tests/common
[testenv:py310pyd1]
basepython = python3.10
commands_pre =
- poetry install --no-root --sync
+ poetry install --sync
commands =
- ; poetry run pytest tests/pydantic_v1
- poetry run coverage run --data-file=./cov/.result1 -m pytest tests/pydantic_v1 tests/common
- ; poetry run coverage xml -o ./cov/coverage.xml
-
+ coverage run --data-file=./cov/.result1 -m pytest tests/pydantic_v1 tests/common
[testenv:py310pyd2]
basepython = python3.10
commands_pre =
- poetry install --no-root --sync
+ poetry install --sync
+ pip install pydantic==2.*
+commands =
+ coverage run --data-file=./cov/.result2 -m pytest tests/pydantic_v2 tests/common
+
+[testenv:py311pyd1]
+basepython = python3.11
+commands_pre =
+ poetry install --sync
+commands =
+ pytest tests/pydantic_v1 tests/common
+
+[testenv:py311pyd2]
+commands_pre =
+ poetry install --sync
+ pip install pydantic==2.*
+basepython = python3.11
+commands =
+ pytest tests/pydantic_v2 tests/common
+
+[testenv:py312pyd1]
+basepython = python3.12
+commands_pre =
+ poetry install --sync
+commands =
+ pytest tests/pydantic_v1 tests/common
+
+[testenv:py312pyd2]
+commands_pre =
+ poetry install --sync
pip install pydantic==2.*
+basepython = python3.12
commands =
- ; poetry run pytest tests/pydantic_v2
- poetry run coverage run --data-file=./cov/.result2 -m pytest tests/pydantic_v2 tests/common
+ pytest tests/pydantic_v2 tests/common
+
[testenv:coverage]
description = Combine coverage data and generate report