Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion bengal/health/validators/tracks.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def validate(
results = []

# Check if tracks data exists
if not hasattr(site.data, "tracks") or not site.data.tracks:
if "tracks" not in site.data or not site.data.tracks:
results.append(
CheckResult.info(
"No tracks defined",
Expand Down
13 changes: 7 additions & 6 deletions bengal/orchestration/menu.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

from bengal.orchestration.utils.i18n import get_i18n_config
from bengal.utils.observability.logger import get_logger
from bengal.utils.primitives.dotdict import DotDict
from bengal.utils.primitives.hashing import hash_str

logger = get_logger(__name__)
Expand Down Expand Up @@ -275,9 +276,9 @@ def _compute_menu_cache_key(
# If data-driven, include the data keys
if isinstance(dropdown_cfg, str) and dropdown_cfg.startswith("data:"):
data_key = dropdown_cfg[5:]
if hasattr(self.site.data, data_key):
data = getattr(self.site.data, data_key)
if isinstance(data, dict):
if data_key in self.site.data:
data = self.site.data[data_key]
if isinstance(data, dict | DotDict):
dropdown_configs[-1]["data_keys"] = sorted(data.keys())

# Include bundles config
Expand Down Expand Up @@ -806,11 +807,11 @@ def _add_data_children(
seen_*: Deduplication sets
"""
# Get data from site.data
if not hasattr(self.site.data, data_key):
if data_key not in self.site.data:
return

data = getattr(self.site.data, data_key)
if not data or not isinstance(data, dict):
data = self.site.data[data_key]
if not data or not isinstance(data, dict | DotDict):
return

section_url = getattr(section, "_path", None) or f"/{section.name}/"
Expand Down
31 changes: 15 additions & 16 deletions bengal/utils/concurrency/async_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,29 +31,28 @@
from __future__ import annotations

import asyncio
from functools import cache
from typing import TYPE_CHECKING, Any

if TYPE_CHECKING:
from collections.abc import Coroutine

# Lazy uvloop detection - only check when first needed
# This avoids ~200ms import overhead when uvloop is installed but not used
_uvloop_checked = False
_uvloop_module = None


@cache
def _get_uvloop():
"""Lazily import uvloop on first use."""
global _uvloop_checked, _uvloop_module
if not _uvloop_checked:
_uvloop_checked = True
try:
import uvloop

_uvloop_module = uvloop
except ImportError:
_uvloop_module = None
return _uvloop_module
"""
Lazily import uvloop on first use.

Cached via ``functools.cache`` so the import attempt happens exactly
once across all threads (avoids the ~200ms uvloop import overhead on
every call, and avoids a check-then-act race under Python 3.14t
free-threading).
"""
try:
import uvloop
except ImportError:
return None
return uvloop


def run_async[T](coro: Coroutine[Any, Any, T]) -> T:
Expand Down
25 changes: 4 additions & 21 deletions bengal/utils/concurrency/retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,28 +110,19 @@ def retry_with_backoff[T](
... )

"""
last_error: Exception | None = None

for attempt in range(retries + 1):
try:
return func()
except exceptions as e:
last_error = e

if attempt < retries:
delay = calculate_backoff(attempt, base_delay, max_delay, jitter)

if on_retry:
on_retry(attempt, e)

time.sleep(delay)
else:
raise

# Should never reach here (loop always runs at least once and re-raises)
if last_error is None:
raise RuntimeError("retry_with_backoff: no attempts executed")
raise last_error
# range(retries + 1) yields >= 1 iteration for retries >= 0; loop always returns or raises.
raise AssertionError("unreachable: retry_with_backoff loop did not terminate")


async def async_retry_with_backoff[T](
Expand Down Expand Up @@ -169,24 +160,16 @@ async def async_retry_with_backoff[T](
... )

"""
last_error: Exception | None = None

for attempt in range(retries + 1):
try:
return await coro_func()
except exceptions as e:
last_error = e

if attempt < retries:
delay = calculate_backoff(attempt, base_delay, max_delay, jitter)

if on_retry:
on_retry(attempt, e)

await asyncio.sleep(delay)
else:
raise

if last_error is None:
raise RuntimeError("async_retry_with_backoff: no attempts executed")
raise last_error
# range(retries + 1) yields >= 1 iteration for retries >= 0; loop always returns or raises.
raise AssertionError("unreachable: async_retry_with_backoff loop did not terminate")
12 changes: 8 additions & 4 deletions bengal/utils/concurrency/thread_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ def _check_factory_signature(self) -> bool:
params = list(sig.parameters.values())
return len(params) > 0

def _cache_key(self, key: str | None) -> str:
"""Build the per-thread attribute name for a logical key."""
return f"_cache_{self._name}_{key or 'default'}"

def get(self, key: str | None = None) -> T:
"""
Get or create cached instance for current thread.
Expand All @@ -97,7 +101,7 @@ def get(self, key: str | None = None) -> T:
Returns:
Cached or newly created instance
"""
cache_key = f"_cache_{self._name}_{key or 'default'}"
cache_key = self._cache_key(key)

if not hasattr(self._local, cache_key):
instance = self._factory(key) if self._factory_accepts_key and key else self._factory()
Expand All @@ -112,14 +116,14 @@ def clear(self, key: str | None = None) -> None:
Args:
key: Specific key to clear, or None to clear default
"""
cache_key = f"_cache_{self._name}_{key or 'default'}"
cache_key = self._cache_key(key)
if hasattr(self._local, cache_key):
delattr(self._local, cache_key)

def clear_all(self) -> None:
"""Clear all cached instances for current thread."""
# Find all cache keys for this cache name
to_delete = [attr for attr in dir(self._local) if attr.startswith(f"_cache_{self._name}_")]
prefix = f"_cache_{self._name}_"
to_delete = [attr for attr in dir(self._local) if attr.startswith(prefix)]
for attr in to_delete:
delattr(self._local, attr)

Expand Down
24 changes: 12 additions & 12 deletions bengal/utils/primitives/dates.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,17 @@
# Type alias for date-like values
type DateLike = datetime | date_type | str | None

_DEFAULT_DATE_FORMATS: tuple[str, ...] = (
"%Y-%m-%d", # 2025-10-09
"%Y/%m/%d", # 2025/10/09
"%d-%m-%Y", # 09-10-2025
"%d/%m/%Y", # 09/10/2025
"%B %d, %Y", # October 09, 2025
"%b %d, %Y", # Oct 09, 2025
"%Y-%m-%d %H:%M:%S", # 2025-10-09 14:30:00
"%Y/%m/%d %H:%M:%S", # 2025/10/09 14:30:00
)


def parse_date(
value: DateLike, formats: list[str] | None = None, on_error: str = "return_none"
Expand Down Expand Up @@ -76,18 +87,7 @@ def parse_date(
continue

# Try common formats
default_formats = [
"%Y-%m-%d", # 2025-10-09
"%Y/%m/%d", # 2025/10/09
"%d-%m-%Y", # 09-10-2025
"%d/%m/%Y", # 09/10/2025
"%B %d, %Y", # October 09, 2025
"%b %d, %Y", # Oct 09, 2025
"%Y-%m-%d %H:%M:%S", # 2025-10-09 14:30:00
"%Y/%m/%d %H:%M:%S", # 2025/10/09 14:30:00
]

for fmt in default_formats:
for fmt in _DEFAULT_DATE_FORMATS:
try:
return datetime.strptime(value, fmt)
except ValueError:
Expand Down
52 changes: 28 additions & 24 deletions bengal/utils/primitives/dotdict.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,15 @@
from collections.abc import ItemsView, Iterator, KeysView, ValuesView


def _wrap_dict_value(cache: dict[str, Any], key: str, value: Any) -> Any:
"""Lazily wrap a dict value as DotDict, caching for identity stability."""
if isinstance(value, dict) and not isinstance(value, DotDict):
if key not in cache:
cache[key] = DotDict(value)
return cache[key]
return value


class DotDict:
"""
Dictionary wrapper that allows dot notation access without method name conflicts.
Expand All @@ -37,7 +46,9 @@ class DotDict:
- Recursive wrapping of nested dicts (with caching for performance)
- Dict-like interface (but not inheriting from dict)
- No method name collisions
- Returns '' for missing keys (consistent with ParamsContext)
- Returns '' for missing keys (consistent with ParamsContext) — note
this means ``hasattr(d, key)`` is always True; use ``key in d`` or
``d.get(key)`` to test membership.

Usage:
>>> # Create from dict
Expand Down Expand Up @@ -85,15 +96,17 @@ def __getattribute__(self, key: str) -> Any:
This ensures that if a data field has the same name as a method
(like 'items', 'keys', 'values'), the data field is returned.

For Jinja2 compatibility, if a key doesn't exist in data and isn't
a real attribute, we return None instead of raising AttributeError.
This allows templates to safely check `if obj.field` without errors.
For Jinja2 compatibility, missing keys return ``""`` (empty string),
not ``None`` and not ``AttributeError``. This is intentional and
load-bearing — see ``themes/default/templates/SAFE_PATTERNS.md`` —
but a side effect is that ``hasattr(d, key)`` is **always True** on
a DotDict. Use ``key in d`` or ``d.get(key)`` to test for membership.

Args:
key: The attribute name

Returns:
The data value if it exists, the attribute, or None
The data value if it exists, a real attribute, or ``""`` on miss.
"""
# Special case: internal attributes need normal access
if key in ("_data", "_cache", "__class__", "__dict__"):
Expand All @@ -102,23 +115,13 @@ def __getattribute__(self, key: str) -> Any:
# Check if key exists in data first
data = object.__getattribute__(self, "_data")
if key in data:
value = data[key]
# Recursively wrap nested dicts (with caching)
if isinstance(value, dict) and not isinstance(value, DotDict):
cache = object.__getattribute__(self, "_cache")
# Check cache first
if key not in cache:
cache[key] = DotDict(value)
return cache[key]
return value
return _wrap_dict_value(object.__getattribute__(self, "_cache"), key, data[key])

# Try to get as a real attribute (methods like .get(), .keys())
try:
return object.__getattribute__(self, key)
except AttributeError:
# Key doesn't exist in data or as attribute
# Return empty string for Jinja2 compatibility (consistent with ParamsContext)
# Empty string is falsy, so `{% if obj.field %}` still works as expected
# Miss — empty string preserves Jinja2 falsy-chain ergonomics.
return ""

def __setattr__(self, key: str, value: Any) -> None:
Expand All @@ -144,13 +147,7 @@ def __delattr__(self, key: str) -> None:
# Dict interface methods
def __getitem__(self, key: str) -> Any:
"""Bracket notation access with caching."""
value = self._data[key]
if isinstance(value, dict) and not isinstance(value, DotDict):
# Check cache first
if key not in self._cache:
self._cache[key] = DotDict(value)
return self._cache[key]
return value
return _wrap_dict_value(self._cache, key, self._data[key])

def __setitem__(self, key: str, value: Any) -> None:
"""Bracket notation assignment. Invalidates cache for the key."""
Expand Down Expand Up @@ -206,6 +203,13 @@ def from_dict(cls, data: dict[str, Any]) -> DotDict:
"""
Create DotDict from a regular dict, recursively wrapping nested dicts.

The eager recursion (and especially the dict-in-list wrap below) is
required: ``__getattribute__`` only lazily wraps top-level dict
*values*, not dicts nested inside lists. Templates iterate
list-of-dicts (e.g. ``site.data.tracks``) and access ``.attr`` on
each item, which only works if those items are pre-wrapped here.
See ``plan/foundation-leaf-hygiene.md`` Sprint 0 Q2 for details.

Args:
data: Source dictionary

Expand Down
25 changes: 11 additions & 14 deletions bengal/utils/primitives/lru_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,22 +156,19 @@ def get_or_set(
with self._lock:
if not self._enabled:
self._misses += 1
if pass_key:
return factory(key)
return factory()
return factory(key) if pass_key else factory()

if key in self._cache:
# Check TTL
if self._ttl is not None:
ts = self._timestamps.get(key, 0)
if time.monotonic() - ts > self._ttl:
del self._cache[key]
del self._timestamps[key]
# Fall through to compute
else:
self._cache.move_to_end(key)
self._hits += 1
return self._cache[key]
# Expired entries are evicted and treated as a miss.
expired = (
self._ttl is not None
and time.monotonic() - self._timestamps.get(key, 0) > self._ttl
)
if expired:
del self._cache[key]
del self._timestamps[key]
else:
# Cache hit
self._cache.move_to_end(key)
self._hits += 1
return self._cache[key]
Expand Down
Loading
Loading