From f9bfaba692251b34734b5dc5e6c2c83a709cecc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B6ren=20Nikolaus?= Date: Sun, 26 Oct 2025 21:35:27 +0100 Subject: [PATCH 1/2] feat(proxy): Add mapping accessor, refresh, and LRU caches - Implement snapshot semantics; return primitives directly Add mapping property to expose dict methods without field collisions - Add refresh() and unwrap(); implement get, __contains__, __reversed__ - Add bounded LRU caches for schemas, proxy types, and attrs with locks - Improve subschema extraction (nullable, defs, chain, unions, tuples, lists, dict) - Support serializer functions with info_arg; avoid mutating schema type - Do not set __pydantic_validator__ on child proxies - Use Sequence for serialized data; wrap dicts in MappingProxyType --- src/deigma/proxy.py | 411 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 330 insertions(+), 81 deletions(-) diff --git a/src/deigma/proxy.py b/src/deigma/proxy.py index 9405971..09b3a9e 100644 --- a/src/deigma/proxy.py +++ b/src/deigma/proxy.py @@ -1,12 +1,12 @@ from collections import OrderedDict -from collections.abc import Callable, Iterable, Mapping +from collections.abc import Mapping, Sequence from copy import deepcopy from threading import Lock, RLock from types import MappingProxyType from typing import Generic, NamedTuple, TypeGuard, TypeVar from pydantic import BaseModel, TypeAdapter -from pydantic_core import SchemaSerializer, SchemaValidator, core_schema +from pydantic_core import SchemaSerializer, core_schema from pydantic_core.core_schema import CoreSchema, SerializerFunction from deigma.serialize import Dataclass @@ -23,22 +23,76 @@ # anyway to build subschema TypeAdapters... Would probably be even more work to # implement this correctly than to just keep piggybacking on pydantic's CoreSchema # internals. -def _extract_subschema(schema: CoreSchema, name: str) -> CoreSchema: +def _extract_subschema(schema: CoreSchema, name_or_idx) -> CoreSchema: + """Extract a subschema for a field/item from a parent schema. + + Args: + schema: The parent schema + name_or_idx: Either a string (for field names) or int (for list/tuple indices) + + Returns: + The subschema for that field/item, or the original schema if not found + """ match schema: + # Nullable wrapper + case {"type": "nullable", "schema": inner}: + return _extract_subschema(inner, name_or_idx) + # Definitions wrapper + case {"type": "definitions", "schema": inner}: + return _extract_subschema(inner, name_or_idx) + # Chain wrapper (multiple schemas chained together) + case {"type": "chain", "steps": steps} if steps: + # Recurse through the last step (most specific) + return _extract_subschema(steps[-1], name_or_idx) + # Lax-or-strict wrapper + case {"type": "lax-or-strict", "strict_schema": strict}: + # Prefer strict schema for subfield extraction + return _extract_subschema(strict, name_or_idx) + # JSON-or-python wrapper + case {"type": "json-or-python", "json_schema": inner} | {"type": "json-or-python", "python_schema": inner}: + # Recurse through the inner schema + return _extract_subschema(inner, name_or_idx) # BaseModel - case {"type": "model", "schema": schema}: - return _extract_subschema(schema, name) + case {"type": "model", "schema": inner}: + return _extract_subschema(inner, name_or_idx) # Dataclass - case {"type": "dataclass", "schema": schema}: - return _extract_subschema(schema, name) + case {"type": "dataclass", "schema": inner}: + return _extract_subschema(inner, name_or_idx) # BaseModel fields case {"type": "model-fields", "fields": fields}: - return fields[name]["schema"] + if isinstance(name_or_idx, str) and name_or_idx in fields: + return fields[name_or_idx]["schema"] # Dataclass fields case {"type": "dataclass-args", "fields": fields}: - for field in fields: - if field["name"] == name: - return field["schema"] + if isinstance(name_or_idx, str): + for field in fields: + if field["name"] == name_or_idx: + return field["schema"] + # List with items schema + case {"type": "list", "items_schema": item}: + return item + # Tuple with variable items + case {"type": "tuple-variable", "items_schema": item}: + return item + # Tuple with positional items + case {"type": "tuple-positional", "items_schema": items}: + if isinstance(name_or_idx, int) and 0 <= name_or_idx < len(items): + return items[name_or_idx] + # Dict with values schema + case {"type": "dict", "values_schema": v}: + return v + # Union - best effort: return first choice + case {"type": "union", "choices": choices} if choices: + return choices[0] + # Tagged union + case {"type": "tagged-union", "choices": choices} if choices: + # Return first choice schema + first_choice = next(iter(choices.values()), None) + if first_choice: + # Tagged union choices can be tuples (schema, discriminator) + if isinstance(first_choice, tuple): + return first_choice[0] + return first_choice return schema @@ -53,17 +107,44 @@ def _unwrap_proxy(proxy: "SerializationProxy[T]") -> T: return proxy.obj -def _unwrap_proxy_and_apply(func: SerializerFunction) -> SerializerFunction: - def apply_to_unwrapped(proxy: "SerializationProxy[T]") -> T: - return func(_unwrap_proxy(proxy)) +def _unwrap_proxy_and_apply( + func: SerializerFunction, pass_info: bool = False +) -> SerializerFunction: + """Wrap a serializer function to unwrap proxies before calling it. + + Args: + func: The original serializer function + pass_info: Whether the function expects an info parameter + + Note: SerializerFunction is a union of many function signatures with different + arities. We wrap them generically here, which pyright can't fully verify. + """ + if pass_info: + + def apply_with_info(proxy: "SerializationProxy[T]", info, *args): + return func(_unwrap_proxy(proxy), info, *args) # pyright: ignore[reportCallIssue, reportArgumentType] + + return apply_with_info # pyright: ignore[reportReturnType] + else: + + def apply_to_unwrapped(proxy: "SerializationProxy[T]", *args): + return func(_unwrap_proxy(proxy), *args) # pyright: ignore[reportCallIssue, reportArgumentType] - return apply_to_unwrapped + return apply_to_unwrapped # pyright: ignore[reportReturnType] +# Cache size constants for memory management +WRAPPED_SCHEMA_CACHE_SIZE = 256 +PROXY_TYPE_CACHE_SIZE = 256 +ATTR_CACHE_SIZE = 512 + +# Type-check tuples (hoisted to module scope for micro-optimization) +_MAPPING_TYPES = (dict, MappingProxyType, Mapping) +_COLLECTION_TYPES = (dict, list, tuple) + # Bounded cache for wrapped schemas to prevent memory leaks in long-running applications # Using OrderedDict for LRU eviction # Store tuple (orig_schema, wrapped_schema) to prevent id() reuse bugs -_WRAPPED_SCHEMA_CACHE_SIZE = 256 _wrapped_schema_cache: OrderedDict[int, tuple[CoreSchema, CoreSchema]] = OrderedDict() _WRAPPED_SCHEMA_CACHE_LOCK = Lock() @@ -72,17 +153,7 @@ def _wrap_core_schema(schema: CoreSchema) -> CoreSchema: """Wrap a CoreSchema to make it proxy-aware. Uses bounded LRU cache to avoid expensive deepcopy.""" schema_id = id(schema) - # Lock-free fast path: check cache without lock first - # Note: OrderedDict.get() can raise during concurrent modifications, so we catch any errors - try: - tup = _wrapped_schema_cache.get(schema_id) - if tup is not None and tup[0] is schema: - return tup[1] - except (ValueError, RuntimeError): - # Race condition detected, fall through to locked path - pass - - # Slow path: take lock and recheck + # Check cache under lock (OrderedDict isn't thread-safe even for reads) with _WRAPPED_SCHEMA_CACHE_LOCK: tup = _wrapped_schema_cache.get(schema_id) if tup is not None: @@ -93,32 +164,44 @@ def _wrap_core_schema(schema: CoreSchema) -> CoreSchema: return wrapped # Build wrapped schema (outside lock to minimize critical section) + # Key insight: Don't mutate "type" - only wrap serialization behavior match schema: # something we can reference to (e.g. BaseModel, Dataclass, ...) case {"ref": ref}: wrapped_schema = core_schema.definitions_schema( schema=core_schema.definition_reference_schema( schema_ref=ref, - serialization={ - "type": "function-plain", - "function": _unwrap_proxy, - "info_arg": False, - }, + serialization=core_schema.plain_serializer_function_ser_schema( + _unwrap_proxy, + info_arg=False, + ), ), definitions=[schema], ) - # primitive, already has a custom serializer + # Has custom serializer with info_arg + case {"serialization": {"function": func, "info_arg": True}}: + wrapped_schema = deepcopy(schema) + # We're wrapping the serializer function to unwrap proxies first. + # Pyright can't verify all possible serialization schema types support "function". + wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply( # pyright: ignore[reportGeneralTypeIssues, reportArgumentType] + func, pass_info=True + ) + # Has custom serializer without info_arg case {"serialization": {"function": func}}: wrapped_schema = deepcopy(schema) - wrapped_schema["type"] = f"SerializationProxy[{schema['type']}]" - wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply(func) - # primitive, no custom serializer + # We're wrapping the serializer function to unwrap proxies first. + # Pyright can't verify all possible serialization schema types support "function". + wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply( # pyright: ignore[reportGeneralTypeIssues, reportArgumentType] + func, pass_info=False + ) + # No custom serializer - add one case _: wrapped_schema = deepcopy(schema) - wrapped_schema["type"] = f"SerializationProxy[{schema['type']}]" - wrapped_schema["serialization"] = core_schema.plain_serializer_function_ser_schema( - _unwrap_proxy, - info_arg=False, + wrapped_schema["serialization"] = ( + core_schema.plain_serializer_function_ser_schema( + _unwrap_proxy, + info_arg=False, + ) ) # Cache with LRU eviction @@ -127,65 +210,85 @@ def _wrap_core_schema(schema: CoreSchema) -> CoreSchema: _wrapped_schema_cache.move_to_end(schema_id) # Evict oldest entry if cache is too large - if len(_wrapped_schema_cache) > _WRAPPED_SCHEMA_CACHE_SIZE: + if len(_wrapped_schema_cache) > WRAPPED_SCHEMA_CACHE_SIZE: _wrapped_schema_cache.popitem(last=False) return wrapped_schema class SerializationProxy(Generic[T]): + """A read-only, lazily-nested proxy over the serialized form of an object. + + Snapshot Semantics: + - Takes a snapshot of the object's serialized state at build() time + - All field serializers (PlainSerializer, etc.) are applied during the initial dump + - Iteration yields KEYS for mappings, ELEMENTS for sequences (not proxies) + - Primitives (str, int, etc.) are returned directly without wrapping + - Mutations to the wrapped object aren't reflected unless refresh() is called + + Thread Safety: + - The serialized snapshot is immutable (wrapped in MappingProxyType) + - Child proxies use bounded LRU caches with proper locking + - Safe for concurrent template rendering + + Field Name Collisions: + - Methods like items(), keys(), values() are NOT provided to avoid shadowing fields + - Use .mapping.items() / .mapping.keys() / .mapping.values() when needed + - Access fields directly: proxy.items or proxy['items'] + + Use Cases: + - Template rendering (Jinja2) where you want dict-like access with serialization + - Read-only views of objects for logging/debugging + - Deterministic serialization snapshots for concurrent operations + """ + core_schema: CoreSchema __pydantic_serializer__: SchemaSerializer - __pydantic_validator__: SchemaValidator + # Note: __pydantic_validator__ is intentionally not set to avoid + # child proxies getting the wrong validator (root instead of subschema) # Bounded cache for proxy types to prevent memory leaks - _PROXY_TYPE_CACHE_SIZE = 256 _proxy_type_cache: OrderedDict[int, type["SerializationProxy"]] = OrderedDict() _PROXY_TYPE_CACHE_LOCK = Lock() def __init__( self, obj: T, - serialized: Mapping | Iterable | object, + serialized: Mapping | Sequence | object, root_adapter: TypeAdapter, ): self.obj = obj self.serialized = serialized self.root_adapter = root_adapter - # Cache for accessed attributes to avoid rebuilding proxies + # Bounded LRU cache for accessed attributes to avoid rebuilding proxies # Keys are either strings (for attributes) or tuples (for items) - self._attr_cache: dict[str | tuple, "SerializationProxy"] = {} + self._attr_cache: OrderedDict[str | tuple, "SerializationProxy"] = OrderedDict() self._attr_cache_lock = RLock() @classmethod def _build( cls, obj: T, - serialized: Mapping | Iterable | object, + serialized: Mapping | Sequence | object, adapter: TypeAdapter, core_schema: CoreSchema, ): # Normalize: wrap dicts to ensure immutability - if isinstance(serialized, dict) and not isinstance(serialized, MappingProxyType): + if isinstance(serialized, dict) and not isinstance( + serialized, MappingProxyType + ): serialized = MappingProxyType(serialized) schema_id = id(core_schema) - # Lock-free fast path: check cache without lock first - # Note: OrderedDict.get() can raise during concurrent modifications, so we catch any errors - try: - proxy_type = cls._proxy_type_cache.get(schema_id) - if proxy_type is not None and getattr(proxy_type, "core_schema", None) is core_schema: - return proxy_type(obj, serialized, adapter) - except (ValueError, RuntimeError): - # Race condition detected, fall through to locked path - pass - - # Slow path: take lock and recheck + # Check cache under lock (OrderedDict isn't thread-safe even for reads) with cls._PROXY_TYPE_CACHE_LOCK: proxy_type = cls._proxy_type_cache.get(schema_id) # Guard against id() reuse by verifying identity - if proxy_type is not None and getattr(proxy_type, "core_schema", None) is core_schema: + if ( + proxy_type is not None + and getattr(proxy_type, "core_schema", None) is core_schema + ): cls._proxy_type_cache.move_to_end(schema_id) return proxy_type(obj, serialized, adapter) @@ -198,7 +301,9 @@ def _build( "core_schema": core_schema, "__pydantic_serializer__": SchemaSerializer(wrapped_core_schema), "__pydantic_core_schema__": wrapped_core_schema, - "__pydantic_validator__": adapter.validator, + # Note: We don't set __pydantic_validator__ because child proxies + # would get the wrong validator (root instead of subschema). + # Serialization works without it. }, ) @@ -206,11 +311,14 @@ def _build( with cls._PROXY_TYPE_CACHE_LOCK: # Re-check if someone else beat us existing = cls._proxy_type_cache.get(schema_id) - if existing is None or getattr(existing, "core_schema", None) is not core_schema: + if ( + existing is None + or getattr(existing, "core_schema", None) is not core_schema + ): cls._proxy_type_cache[schema_id] = proxy_type cls._proxy_type_cache.move_to_end(schema_id) # Evict oldest entry if cache is too large - if len(cls._proxy_type_cache) > cls._PROXY_TYPE_CACHE_SIZE: + if len(cls._proxy_type_cache) > PROXY_TYPE_CACHE_SIZE: cls._proxy_type_cache.popitem(last=False) else: proxy_type = existing @@ -235,19 +343,74 @@ def build( core_schema = adapter.core_schema return cls._build(obj, serialized, adapter, core_schema) + def unwrap(self) -> T: + """Get the original wrapped object. + + Returns: + The original object that was wrapped by this proxy + """ + return self.obj + + @property + def mapping(self) -> Mapping: + """Access the underlying serialized mapping directly. + + Use this to access mapping methods when your data has fields with those names. + For example, if you have a field named 'items': + - Access the field: proxy.items or proxy['items'] + - Access the mapping method: proxy.mapping.items() + + Examples: + >>> proxy.mapping.keys() # Get all keys + >>> proxy.mapping.values() # Get all values + >>> proxy.mapping.items() # Get (key, value) pairs + >>> proxy.mapping.get('x', 0) # Get with default + + Raises: + TypeError: If the serialized data is not a mapping (e.g., a list). + Use iteration or indexing directly for sequences. + """ + ser = self.serialized + # Ensure it's wrapped for immutability + if isinstance(ser, dict) and not isinstance(ser, MappingProxyType): + ser = MappingProxyType(ser) + # Guard against calling .items() on a sequence + if isinstance(ser, Mapping): + return ser + raise TypeError( + f"This proxy does not wrap a mapping (got {type(ser).__name__}); " + "use indexing/iteration on the sequence instead." + ) + + def refresh(self) -> None: + """Refresh the serialized snapshot after the wrapped object has been mutated. + + This recomputes the serialization and clears the attribute cache. + Useful when the underlying object changes and you want the proxy to reflect those changes. + """ + self.serialized = self.root_adapter.dump_python(self.obj) + if isinstance(self.serialized, dict): + self.serialized = MappingProxyType(self.serialized) + with self._attr_cache_lock: + self._attr_cache.clear() + def __getattr__(self, name: str): - # Check attribute cache first + # Check attribute cache first (LRU: move to end on access) with self._attr_cache_lock: if name in self._attr_cache: + self._attr_cache.move_to_end(name) return self._attr_cache[name] - if isinstance(self.serialized, (dict, MappingProxyType, Mapping)) and name in self.serialized: + # Hoist to local for micro-optimization + ser = self.serialized + if isinstance(ser, _MAPPING_TYPES) and name in ser: sub_schema = _extract_subschema(self.core_schema, name) - child_ser = self.serialized[name] + child_ser = ser[name] - # For primitive types (non-dict/list serialized values), return the serialized value directly - # This ensures field serializers (like PlainSerializer) are properly applied - if not isinstance(child_ser, (dict, list, tuple)): + # For primitive types (non-dict/list serialized values), return the serialized value directly. + # This preserves snapshot semantics: field serializers were already applied during build(), + # so we return the pre-computed value rather than re-serializing on every access. + if not isinstance(child_ser, _COLLECTION_TYPES): return child_ser # Wrap child dicts to prevent mutation @@ -259,9 +422,13 @@ def __getattr__(self, name: str): self.root_adapter, sub_schema, ) - # Cache the built proxy + # Cache the built proxy with LRU eviction with self._attr_cache_lock: + # Prune BEFORE insert to maintain strict bound + if len(self._attr_cache) >= ATTR_CACHE_SIZE: + self._attr_cache.popitem(last=False) self._attr_cache[name] = proxy + self._attr_cache.move_to_end(name) return proxy return getattr(self.obj, name) @@ -270,14 +437,19 @@ def __getitem__(self, key): cache_key = ("__item__", key) with self._attr_cache_lock: if cache_key in self._attr_cache: + self._attr_cache.move_to_end(cache_key) return self._attr_cache[cache_key] + # Hoist to local for micro-optimization + ser = self.serialized sub_schema = _extract_subschema(self.core_schema, key) - child_ser = self.serialized[key] + # ser is Mapping|Sequence|object, but we know it supports __getitem__ at runtime + child_ser = ser[key] # pyright: ignore[reportIndexIssue] - # For primitive types (non-dict/list serialized values), return the serialized value directly - # This ensures field serializers (like PlainSerializer) are properly applied - if not isinstance(child_ser, (dict, list, tuple)): + # For primitive types (non-dict/list serialized values), return the serialized value directly. + # This preserves snapshot semantics: field serializers were already applied during build(), + # so we return the pre-computed value rather than re-serializing on every access. + if not isinstance(child_ser, _COLLECTION_TYPES): return child_ser # Wrap child dicts to prevent mutation @@ -286,33 +458,110 @@ def __getitem__(self, key): # Try to keep the real underlying object if possible; otherwise fall back to serialized try: - child_obj = self.obj[key] + # obj type is T which is generic, may or may not support indexing + child_obj = self.obj[key] # pyright: ignore[reportIndexIssue] except Exception: child_obj = child_ser - proxy = self._build( + # child_obj/child_ser may be non-collection primitives or collections + proxy = self._build( # pyright: ignore[reportArgumentType] child_obj, child_ser, self.root_adapter, sub_schema, ) - # Cache the built proxy + # Cache the built proxy with LRU eviction with self._attr_cache_lock: + # Prune BEFORE insert to maintain strict bound + if len(self._attr_cache) >= ATTR_CACHE_SIZE: + self._attr_cache.popitem(last=False) self._attr_cache[cache_key] = proxy + self._attr_cache.move_to_end(cache_key) return proxy def __iter__(self): - return iter(self.serialized) + """Iterate over the serialized data. + + Behavior depends on the wrapped type: + - Mapping: Yields KEYS (standard dict behavior). Use proxy.mapping.items() + for (key, value) pairs. + - Sequence: Yields ELEMENTS. + + NOTE: Returned elements are pre-serialized snapshot values, NOT proxies. + All field serializers were already applied at build() time. + + Examples: + for key in proxy: ... # Iterate mapping keys + for k, v in proxy.mapping.items(): ... # Mapping key-value pairs + for item in proxy: ... # Iterate sequence elements + """ + try: + # serialized is Mapping|Sequence|object, try/except handles non-iterable case + return iter(self.serialized) # pyright: ignore[reportCallIssue, reportArgumentType] + except TypeError: + raise TypeError(f"'{type(self).__name__}' object is not iterable") def __len__(self): - return len(self.serialized) + try: + # serialized is Mapping|Sequence|object, try/except handles non-Sized case + return len(self.serialized) # pyright: ignore[reportArgumentType] + except TypeError: + raise TypeError(f"object of type '{type(self).__name__}' has no len()") def __index__(self): - return self.serialized.__index__() + try: + # serialized is Mapping|Sequence|object, try/except handles missing __index__ + return self.serialized.__index__() # pyright: ignore[reportAttributeAccessIssue] + except (TypeError, AttributeError): + raise TypeError( + f"'{type(self).__name__}' object cannot be interpreted as an integer" + ) def __bool__(self): - return bool(self.serialized) + try: + return bool(self.serialized) + except Exception: + # Fallback to True if serialized value doesn't support bool() + return True + + # Mapping-like methods for Jinja ergonomics + def __contains__(self, key): + """Check if key exists in the serialized data.""" + try: + return key in self.serialized + except TypeError: + return False + + def get(self, key, default=None): + """Get a value from the serialized data with a default fallback. + + For mappings, uses dict-like .get(). For sequences, falls back to indexing. + + Note: Use with caution if you have a field named 'get'. In that case, + access it via __getitem__: proxy['get'] instead of proxy.get. + """ + # Fast path for mappings + if isinstance(self.serialized, Mapping): + return self.serialized.get(key, default) + # Fallback for sequences/indexable types + try: + return self[key] + except (KeyError, IndexError, TypeError): + return default + + # Note: We intentionally do NOT provide keys(), values(), items() methods + # to avoid shadowing common field names like 'items', 'keys', 'values'. + # If you need mapping methods, use: proxy.mapping.items() / .keys() / .values() + # For templates: {% for k in obj %} or {{ obj['key'] }} work without collisions + + def __reversed__(self): + """Return reversed iterator if serialized supports it.""" + try: + # serialized is Mapping|Sequence|object, try/except handles non-reversible case + return reversed(self.serialized) # pyright: ignore[reportCallIssue, reportArgumentType] + except TypeError: + raise TypeError(f"'{type(self).__name__}' object is not reversible") def __repr__(self): match self.obj: From b903dccc4069e370ce61144e3be8edcc53b035c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B6ren=20Nikolaus?= Date: Sun, 26 Oct 2025 21:52:17 +0100 Subject: [PATCH 2/2] chore(types): Integrate Pyright/Ruff and fix type issues - Add pyright and ruff to dev dependencies and lockfile - Tighten typing across codebase for pyright: - Use runtime_checkable on Protocols - Refine template decorators to return type[T] - Apply pydantic_dataclass directly to preserve class types - Add targeted pyright ignore comments for dynamic attrs/returns - Update transform utilities to access dynamic template attrs safely - Clean up tests: adopt TypeAlias/Annotated, remove unused imports/vars No functional behavior changes; improves static type safety and linting compliance --- pyproject.toml | 2 + src/deigma/proxy.py | 19 +++--- src/deigma/serialize.py | 1 + src/deigma/template.py | 49 +++++++------- src/deigma/transform.py | 30 +++++---- src/deigma/types.py | 1 + tests/benches/test_serialization_proxy.py | 6 +- tests/integration/test_field_serialization.py | 65 ++++++++++++------- tests/integration/test_template_rendering.py | 2 +- uv.lock | 52 +++++++++++++++ 10 files changed, 154 insertions(+), 73 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6a6119a..bcd718f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,8 +14,10 @@ dependencies = [ [dependency-groups] dev = [ + "pyright>=1.1.407", "pytest>=8.3.4", "pytest-benchmark>=5.1.0", + "ruff>=0.14.2", ] [build-system] diff --git a/src/deigma/proxy.py b/src/deigma/proxy.py index 09b3a9e..5a8c2e4 100644 --- a/src/deigma/proxy.py +++ b/src/deigma/proxy.py @@ -83,7 +83,7 @@ def _extract_subschema(schema: CoreSchema, name_or_idx) -> CoreSchema: return v # Union - best effort: return first choice case {"type": "union", "choices": choices} if choices: - return choices[0] + return choices[0] # pyright: ignore[reportReturnType] # Tagged union case {"type": "tagged-union", "choices": choices} if choices: # Return first choice schema @@ -91,8 +91,8 @@ def _extract_subschema(schema: CoreSchema, name_or_idx) -> CoreSchema: if first_choice: # Tagged union choices can be tuples (schema, discriminator) if isinstance(first_choice, tuple): - return first_choice[0] - return first_choice + return first_choice[0] # pyright: ignore[reportReturnType, reportArgumentType] + return first_choice # pyright: ignore[reportReturnType] return schema @@ -183,16 +183,18 @@ def _wrap_core_schema(schema: CoreSchema) -> CoreSchema: wrapped_schema = deepcopy(schema) # We're wrapping the serializer function to unwrap proxies first. # Pyright can't verify all possible serialization schema types support "function". - wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply( # pyright: ignore[reportGeneralTypeIssues, reportArgumentType] - func, pass_info=True + wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply( # pyright: ignore[reportGeneralTypeIssues] + func, # pyright: ignore[reportArgumentType] + pass_info=True ) # Has custom serializer without info_arg case {"serialization": {"function": func}}: wrapped_schema = deepcopy(schema) # We're wrapping the serializer function to unwrap proxies first. # Pyright can't verify all possible serialization schema types support "function". - wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply( # pyright: ignore[reportGeneralTypeIssues, reportArgumentType] - func, pass_info=False + wrapped_schema["serialization"]["function"] = _unwrap_proxy_and_apply( # pyright: ignore[reportGeneralTypeIssues] + func, # pyright: ignore[reportArgumentType] + pass_info=False ) # No custom serializer - add one case _: @@ -464,8 +466,9 @@ def __getitem__(self, key): child_obj = child_ser # child_obj/child_ser may be non-collection primitives or collections + # _build expects specific types but we pass dynamic values from serialization proxy = self._build( # pyright: ignore[reportArgumentType] - child_obj, + child_obj, # pyright: ignore[reportArgumentType] child_ser, self.root_adapter, sub_schema, diff --git a/src/deigma/serialize.py b/src/deigma/serialize.py index 8d9c7f7..d15c5cf 100644 --- a/src/deigma/serialize.py +++ b/src/deigma/serialize.py @@ -28,6 +28,7 @@ def __dataclass_fields__(self) -> dict[str, Field]: ... if TYPE_CHECKING: @final + @runtime_checkable class Dataclass(Protocol): __dataclass_fields__: ClassVar[dict[str, Any]] diff --git a/src/deigma/template.py b/src/deigma/template.py index c2f9dde..6bc0919 100644 --- a/src/deigma/template.py +++ b/src/deigma/template.py @@ -36,7 +36,7 @@ def template( path: None = None, serialize: Serialize = DEFAULT_SERIALIZE, use_proxy: bool = USE_PROXY, -) -> Callable[[type[T]], type[Template]]: ... +) -> Callable[[type[T]], type[T]]: ... @overload @@ -46,7 +46,7 @@ def template( path: str | PathLike, serialize: Serialize = DEFAULT_SERIALIZE, use_proxy: bool = USE_PROXY, -) -> Callable[[type[T]], type[Template]]: ... +) -> Callable[[type[T]], type[T]]: ... @dataclass_transform() @@ -56,7 +56,7 @@ def template( path: str | PathLike | None = None, serialize: Serialize = DEFAULT_SERIALIZE, use_proxy: bool = USE_PROXY, -) -> Callable[[type[T]], type[Template]]: +) -> Callable[[type[T]], type[T]]: if source is None and path is None: raise ValueError("Either source or path must be provided") @@ -87,17 +87,17 @@ def inline_template( *, serialize: Serialize = DEFAULT_SERIALIZE, use_proxy: bool = USE_PROXY, -) -> Callable[[type[T]], type[Template]]: +) -> Callable[[type[T]], type[T]]: def decorator(cls: type[T]) -> type[T]: config = ConfigDict(arbitrary_types_allowed=True) - cls = pydantic_dataclass(init=False, config=config)( - type(cls.__name__, (cls,), dict(cls.__dict__)) - ) - cls.__is_deigma_template__ = True - cls._source = cleandoc(source) + # Apply pydantic_dataclass directly to preserve type information + cls = pydantic_dataclass(config=config)(cls) # pyright: ignore[reportAssignmentType] + # Add template-specific class attributes (not visible to type checker) + cls.__is_deigma_template__ = True # pyright: ignore[reportAttributeAccessIssue] + cls._source = cleandoc(source) # pyright: ignore[reportAttributeAccessIssue] engine = Jinja2Engine(serialize=serialize) - cls._engine = engine - cls._variables = engine.introspect_variables(source) + cls._engine = engine # pyright: ignore[reportAttributeAccessIssue] + cls._variables = engine.introspect_variables(source) # pyright: ignore[reportAttributeAccessIssue] static_fields = set(cls.__annotations__) properties = { @@ -105,8 +105,8 @@ def decorator(cls: type[T]) -> type[T]: } fields = static_fields | properties - if not set(cls._variables).issubset(fields): - variables = cls._variables + variables = cls._variables # pyright: ignore[reportAttributeAccessIssue] + if not set(variables).issubset(fields): msg = ( "Template variables mismatch. Template fields must match variables in source:\n\n" f"fields on type: {fields}, variables in source: {variables}" @@ -123,38 +123,41 @@ def decorator(cls: type[T]) -> type[T]: raise ValueError(msg) - cls._compiled_template = engine.compile_template(cls._source) - cls._type_adapter = TypeAdapter(cls) + cls._compiled_template = engine.compile_template(cls._source) # pyright: ignore[reportAttributeAccessIssue] + cls._type_adapter = TypeAdapter(cls) # pyright: ignore[reportAttributeAccessIssue] if use_proxy: def __str__(instance): proxied = { - field: getattr(instance._proxy, field) for field in cls._variables + field: getattr(instance._proxy, field) # pyright: ignore[reportAttributeAccessIssue] + for field in cls._variables # pyright: ignore[reportAttributeAccessIssue] } - return instance._compiled_template.render(proxied) + return instance._compiled_template.render(proxied) # pyright: ignore[reportAttributeAccessIssue] original_init = cls.__init__ def __init__(instance, *args, **kwargs): original_init(instance, *args, **kwargs) - instance._proxy = SerializationProxy.build(instance, cls._type_adapter) + instance._proxy = SerializationProxy.build( # pyright: ignore[reportAttributeAccessIssue] + instance, cls._type_adapter # pyright: ignore[reportAttributeAccessIssue] + ) - cls.__init__ = __init__ + cls.__init__ = __init__ # pyright: ignore[reportAttributeAccessIssue] else: def __str__(instance): - serialized = cls._type_adapter.dump_python(instance) + serialized = cls._type_adapter.dump_python(instance) # pyright: ignore[reportAttributeAccessIssue] rendered_fields = { field: _render_field_maybe( getattr(instance, field), serialized[field] ) - for field in cls._variables + for field in cls._variables # pyright: ignore[reportAttributeAccessIssue] } - return instance._compiled_template.render(rendered_fields) + return instance._compiled_template.render(rendered_fields) # pyright: ignore[reportAttributeAccessIssue] - cls.__str__ = __str__ + cls.__str__ = __str__ # pyright: ignore[reportAttributeAccessIssue] return cls diff --git a/src/deigma/transform.py b/src/deigma/transform.py index 0d9012c..d739308 100644 --- a/src/deigma/transform.py +++ b/src/deigma/transform.py @@ -18,7 +18,8 @@ class TemplateKwargs(TypedDict, total=False): def copy(cls: type[_T]) -> type[_T]: copy_ = type(cls.__name__, (cls,), {}) copy_.__annotations__ = cls.__annotations__ - return template(cls._source, cls._engine._env.serialize)(copy_) + # Access dynamically added template attributes + return template(cls._source, cls._engine._env.serialize)(copy_) # pyright: ignore[reportAttributeAccessIssue, reportCallIssue] def replace(instance: _T, **kwargs: Any) -> _T: @@ -29,26 +30,28 @@ def replace(instance: _T, **kwargs: Any) -> _T: def with_serialize(cls: type[_T], serialize: Serialize) -> type[_T]: if is_template_type(cls): - copy_: type[_T] = copy(cls) - copy_._engine._env.serialize = serialize + copy_: type[_T] = copy(cls) # pyright: ignore[reportArgumentType] + # Access and modify dynamically added template attributes + copy_._engine._env.serialize = serialize # pyright: ignore[reportAttributeAccessIssue] return copy_ return cls def with_source(cls: type[_T], source: str) -> type[_T]: if is_template_type(cls): - copy_: type[_T] = copy(cls) - copy_._source = source - copy_._compiled_template = copy_._engine._env.from_string(cleandoc(source)) - parsed_source = copy_._engine._env.parse(source) - copy_._variables = meta.find_undeclared_variables(parsed_source) + copy_: type[_T] = copy(cls) # pyright: ignore[reportArgumentType] + # Access and modify dynamically added template attributes + copy_._source = source # pyright: ignore[reportAttributeAccessIssue] + copy_._compiled_template = copy_._engine._env.from_string(cleandoc(source)) # pyright: ignore[reportAttributeAccessIssue] + parsed_source = copy_._engine._env.parse(source) # pyright: ignore[reportAttributeAccessIssue] + copy_._variables = meta.find_undeclared_variables(parsed_source) # pyright: ignore[reportAttributeAccessIssue] return copy_ return cls def with_(cls_or_instance: type[_T] | _T, **kwargs: Unpack[TemplateKwargs]) -> type[_T]: - if is_template_type(cls_or_instance): - copy_: type[_T] = copy(cls_or_instance) + if is_template_type(cls_or_instance): # pyright: ignore[reportArgumentType] + copy_: type[_T] = copy(cls_or_instance) # pyright: ignore[reportArgumentType] match kwargs: case {"source": source}: copy_ = with_source(copy_, source) @@ -56,7 +59,8 @@ def with_(cls_or_instance: type[_T] | _T, **kwargs: Unpack[TemplateKwargs]) -> t copy_ = with_serialize(copy_, serialize) return copy_ if is_template(instance := cls_or_instance): - fields = {k: v for k, v in vars(instance).items() if k in instance._variables} - copy_: type[_T] = copy(type(instance)) - return with_(copy_, **kwargs)(**fields) + # Access dynamically added template attributes + fields = {k: v for k, v in vars(instance).items() if k in instance._variables} # pyright: ignore[reportAttributeAccessIssue] + copy_: type[_T] = copy(type(instance)) # pyright: ignore[reportArgumentType, reportAssignmentType] + return with_(copy_, **kwargs)(**fields) # pyright: ignore[reportReturnType] raise ValueError(f"Expected a Template type or instance, got {cls_or_instance!r}") diff --git a/src/deigma/types.py b/src/deigma/types.py index 4a808ba..ee01bd9 100644 --- a/src/deigma/types.py +++ b/src/deigma/types.py @@ -10,6 +10,7 @@ def __is_deigma_template__(self) -> Literal[True]: ... if TYPE_CHECKING: from pydantic._internal._dataclasses import PydanticDataclass + @runtime_checkable class Template(Template, PydanticDataclass): @property def __is_deigma_template__(self) -> Literal[True]: ... diff --git a/tests/benches/test_serialization_proxy.py b/tests/benches/test_serialization_proxy.py index 04745b9..5da6d7e 100644 --- a/tests/benches/test_serialization_proxy.py +++ b/tests/benches/test_serialization_proxy.py @@ -10,7 +10,7 @@ from typing import Any import pytest -from pydantic import BaseModel, Field, TypeAdapter, field_serializer +from pydantic import BaseModel, TypeAdapter, field_serializer from deigma.proxy import SerializationProxy @@ -406,7 +406,7 @@ def complete_workflow(): proxy = SerializationProxy.build(nested_model) _ = proxy.id _ = proxy.data.name - items_len = len(proxy.items) + _ = len(proxy.items) count = 0 for item in proxy.items: count += 1 @@ -423,7 +423,7 @@ def test_benchmark_direct_complete_workflow(benchmark, nested_model: NestedModel def complete_workflow(): _ = nested_model.id _ = nested_model.data.name - items_len = len(nested_model.items) + _ = len(nested_model.items) count = 0 for item in nested_model.items: count += 1 diff --git a/tests/integration/test_field_serialization.py b/tests/integration/test_field_serialization.py index 0015e8a..f1ac38f 100644 --- a/tests/integration/test_field_serialization.py +++ b/tests/integration/test_field_serialization.py @@ -6,31 +6,55 @@ """ from dataclasses import dataclass -from typing import Annotated, TypedDict +from typing import Annotated, TypeAlias, TypedDict import pytest -from pydantic import PlainSerializer, field_serializer +from pydantic import PlainSerializer, WrapSerializer, field_serializer from deigma import template -# Fixtures for reusable test models -@pytest.fixture -def user_inline_type(): - """TypedDict User for inline serialization tests.""" +# Module-level type definitions for use in tests +class User(TypedDict): + """TypedDict for user data.""" + first_name: str + last_name: str + + +UserInlineType: TypeAlias = Annotated[ + User, PlainSerializer(lambda user: f"{user['first_name']} {user['last_name']}") +] + +UserWrapType: TypeAlias = Annotated[ + User, + WrapSerializer( + lambda user, nxt: f"User: {nxt(user)['first_name']} {nxt(user)['last_name']}", + return_type=str, + ), +] + +Keywords: TypeAlias = Annotated[list[str], PlainSerializer(lambda xs: ", ".join(xs))] + +SQLKeywordName: TypeAlias = Annotated[str, PlainSerializer(lambda keyword: keyword.upper())] + +PhoneNumber: TypeAlias = Annotated[ + str, + PlainSerializer( + lambda phone: f"({phone[:3]}) {phone[3:6]}-{phone[6:]}", return_type=str + ), +] - class User(TypedDict): - first_name: str - last_name: str +# Fixtures for backward compatibility +@pytest.fixture +def user_inline_type() -> type[User]: + """Fixture returning User type.""" return User # Field Serializer with Decorator -def test_field_serializer_decorator(user_inline_type): +def test_field_serializer_decorator(): """Test field serialization using @field_serializer.""" - User = user_inline_type - @template("{{ user }}") class UserTemplate: user: User @@ -44,16 +68,11 @@ def inline_user(self, user: User) -> str: # Plain Serializer Annotation -def test_plain_serializer_annotation(user_inline_type): +def test_plain_serializer_annotation(): """Test field serialization using PlainSerializer annotation.""" - User = user_inline_type - UserInline = Annotated[ - User, PlainSerializer(lambda user: f"{user['first_name']} {user['last_name']}") - ] - @template("{{ user }}") class UserTemplate: - user: UserInline + user: UserInlineType result = str(UserTemplate(user={"first_name": "Li", "last_name": "Si"})) assert result.strip() == "Li Si" @@ -62,8 +81,6 @@ class UserTemplate: # SQL Keyword Example from README def test_sql_keyword_example(): """Test the SQL keyword example from README - tests field serializers in loops.""" - SQLKeywordName = Annotated[str, PlainSerializer(lambda keyword: keyword.upper())] - @dataclass class SQLKeyword: name: SQLKeywordName @@ -96,8 +113,6 @@ class SQLKeywordListingTemplate: def test_sql_keyword_literal_rendering(): """Test field serializers applied when rendering compound object natively.""" - SQLKeywordName = Annotated[str, PlainSerializer(lambda keyword: keyword.upper())] - @dataclass class SQLKeyword: name: SQLKeywordName @@ -269,11 +284,11 @@ def counting_serializer(value: str) -> str: counter.count += 1 return value.upper() - Name = Annotated[str, PlainSerializer(counting_serializer)] + Name = Annotated[str, PlainSerializer(counting_serializer)] # pyright: ignore[reportInvalidTypeForm] @dataclass class Person: - name: Name + name: Name # pyright: ignore[reportInvalidTypeForm] title: str @template( diff --git a/tests/integration/test_template_rendering.py b/tests/integration/test_template_rendering.py index fd372e6..10d9396 100644 --- a/tests/integration/test_template_rendering.py +++ b/tests/integration/test_template_rendering.py @@ -11,7 +11,7 @@ from pydantic import Field from deigma import replace, template -from deigma.serialize import serialize_json, serialize_str +from deigma.serialize import serialize_json # Fixtures diff --git a/uv.lock b/uv.lock index b960593..ab5e885 100644 --- a/uv.lock +++ b/uv.lock @@ -31,8 +31,10 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "pyright" }, { name = "pytest" }, { name = "pytest-benchmark" }, + { name = "ruff" }, ] [package.metadata] @@ -43,8 +45,10 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "pyright", specifier = ">=1.1.407" }, { name = "pytest", specifier = ">=8.3.4" }, { name = "pytest-benchmark", specifier = ">=5.1.0" }, + { name = "ruff", specifier = ">=0.14.2" }, ] [[package]] @@ -116,6 +120,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "packaging" version = "24.2" @@ -210,6 +223,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186, upload-time = "2024-12-18T11:29:37.649Z" }, ] +[[package]] +name = "pyright" +version = "1.1.407" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/1b/0aa08ee42948b61745ac5b5b5ccaec4669e8884b53d31c8ec20b2fcd6b6f/pyright-1.1.407.tar.gz", hash = "sha256:099674dba5c10489832d4a4b2d302636152a9a42d317986c38474c76fe562262", size = 4122872, upload-time = "2025-10-24T23:17:15.145Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/93/b69052907d032b00c40cb656d21438ec00b3a471733de137a3f65a49a0a0/pyright-1.1.407-py3-none-any.whl", hash = "sha256:6dd419f54fcc13f03b52285796d65e639786373f433e243f8b94cf93a7444d21", size = 5997008, upload-time = "2025-10-24T23:17:13.159Z" }, +] + [[package]] name = "pytest" version = "8.3.5" @@ -238,6 +264,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9e/d6/b41653199ea09d5969d4e385df9bbfd9a100f28ca7e824ce7c0a016e3053/pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89", size = 44259, upload-time = "2024-10-30T11:51:45.94Z" }, ] +[[package]] +name = "ruff" +version = "0.14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/34/8218a19b2055b80601e8fd201ec723c74c7fe1ca06d525a43ed07b6d8e85/ruff-0.14.2.tar.gz", hash = "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", size = 5539663, upload-time = "2025-10-23T19:37:00.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/dd/23eb2db5ad9acae7c845700493b72d3ae214dce0b226f27df89216110f2b/ruff-0.14.2-py3-none-linux_armv6l.whl", hash = "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", size = 12533390, upload-time = "2025-10-23T19:36:18.044Z" }, + { url = "https://files.pythonhosted.org/packages/5a/8c/5f9acff43ddcf3f85130d0146d0477e28ccecc495f9f684f8f7119b74c0d/ruff-0.14.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", size = 12887187, upload-time = "2025-10-23T19:36:22.664Z" }, + { url = "https://files.pythonhosted.org/packages/99/fa/047646491479074029665022e9f3dc6f0515797f40a4b6014ea8474c539d/ruff-0.14.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", size = 11925177, upload-time = "2025-10-23T19:36:24.778Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/c44cf7fe6e59ab24a9d939493a11030b503bdc2a16622cede8b7b1df0114/ruff-0.14.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", size = 12358285, upload-time = "2025-10-23T19:36:26.979Z" }, + { url = "https://files.pythonhosted.org/packages/45/01/47701b26254267ef40369aea3acb62a7b23e921c27372d127e0f3af48092/ruff-0.14.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", size = 12303832, upload-time = "2025-10-23T19:36:29.192Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5c/ae7244ca4fbdf2bee9d6405dcd5bc6ae51ee1df66eb7a9884b77b8af856d/ruff-0.14.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", size = 13036995, upload-time = "2025-10-23T19:36:31.861Z" }, + { url = "https://files.pythonhosted.org/packages/27/4c/0860a79ce6fd4c709ac01173f76f929d53f59748d0dcdd662519835dae43/ruff-0.14.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", size = 14512649, upload-time = "2025-10-23T19:36:33.915Z" }, + { url = "https://files.pythonhosted.org/packages/7f/7f/d365de998069720a3abfc250ddd876fc4b81a403a766c74ff9bde15b5378/ruff-0.14.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", size = 14088182, upload-time = "2025-10-23T19:36:36.983Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ea/d8e3e6b209162000a7be1faa41b0a0c16a133010311edc3329753cc6596a/ruff-0.14.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", size = 13599516, upload-time = "2025-10-23T19:36:39.208Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ea/c7810322086db68989fb20a8d5221dd3b79e49e396b01badca07b433ab45/ruff-0.14.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", size = 13272690, upload-time = "2025-10-23T19:36:41.453Z" }, + { url = "https://files.pythonhosted.org/packages/a9/39/10b05acf8c45786ef501d454e00937e1b97964f846bf28883d1f9619928a/ruff-0.14.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", size = 13496497, upload-time = "2025-10-23T19:36:43.61Z" }, + { url = "https://files.pythonhosted.org/packages/59/a1/1f25f8301e13751c30895092485fada29076e5e14264bdacc37202e85d24/ruff-0.14.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", size = 12266116, upload-time = "2025-10-23T19:36:45.625Z" }, + { url = "https://files.pythonhosted.org/packages/5c/fa/0029bfc9ce16ae78164e6923ef392e5f173b793b26cc39aa1d8b366cf9dc/ruff-0.14.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", size = 12281345, upload-time = "2025-10-23T19:36:47.618Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ab/ece7baa3c0f29b7683be868c024f0838770c16607bea6852e46b202f1ff6/ruff-0.14.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", size = 12629296, upload-time = "2025-10-23T19:36:49.789Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7f/638f54b43f3d4e48c6a68062794e5b367ddac778051806b9e235dfb7aa81/ruff-0.14.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", size = 13371610, upload-time = "2025-10-23T19:36:51.882Z" }, + { url = "https://files.pythonhosted.org/packages/8d/35/3654a973ebe5b32e1fd4a08ed2d46755af7267da7ac710d97420d7b8657d/ruff-0.14.2-py3-none-win32.whl", hash = "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", size = 12415318, upload-time = "2025-10-23T19:36:53.961Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/3758bcf9e0b6a4193a6f51abf84254aba00887dfa8c20aba18aa366c5f57/ruff-0.14.2-py3-none-win_amd64.whl", hash = "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", size = 13565279, upload-time = "2025-10-23T19:36:56.578Z" }, + { url = "https://files.pythonhosted.org/packages/2e/5d/aa883766f8ef9ffbe6aa24f7192fb71632f31a30e77eb39aa2b0dc4290ac/ruff-0.14.2-py3-none-win_arm64.whl", hash = "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a", size = 12554956, upload-time = "2025-10-23T19:36:58.714Z" }, +] + [[package]] name = "typing-extensions" version = "4.12.2"