Skip to content

Commit 27b6c95

Browse files
authored
Merge branch 'litestar-org:main' into feat/postgres-extension-dialects
2 parents 3529d86 + 1c9b19d commit 27b6c95

20 files changed

Lines changed: 226 additions & 253 deletions

File tree

docs/changelog.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ Database Event Channels
8787
- Introduced the ``events`` extension migrations (``ext_events_0001``) which create the durable queue table plus composite index.
8888
- Added the first native backend (AsyncPG LISTEN/NOTIFY) enabled via ``driver_features["events_backend"] = "listen_notify"``; the API automatically falls back to the queue backend for other adapters.
8989
- Introduced experimental Oracle Advanced Queuing support (sync adapters) via ``driver_features["events_backend"] = "advanced_queue"`` with automatic fallback when AQ is unavailable.
90-
- Documented configuration patterns (queue table naming, lease/retention windows, Oracle ``INMEMORY`` toggle, Postgres native mode) in :doc:`/guides/events/database-event-channels`.
90+
- Documented configuration patterns (queue table naming, lease/retention windows, Oracle ``INMEMORY`` toggle, Postgres native mode) for database event channels.
9191
- Event telemetry now tracks ``events.publish``, ``events.publish.native``, ``events.deliver``, ``events.ack``, ``events.nack``, ``events.shutdown`` and listener lifecycle, so Prometheus/Otel exporters see event workloads alongside query metrics.
9292
- Added adapter-specific runtime hints (asyncmy, duckdb, bigquery/adbc) plus a ``poll_interval`` extension option so operators can tune leases and cadence per database.
9393
- Publishing, dequeue, ack, nack, and shutdown operations now emit ``sqlspec.events.*`` spans whenever ``extension_config["otel"]`` is enabled, giving full trace coverage without extra plumbing.
@@ -144,7 +144,7 @@ Simple search and replace in your codebase:
144144
- Reduces cognitive load when switching between adapters
145145
- Clearer API for new users
146146

147-
**See:** :doc:`/guides/migration/connection-config` for detailed migration guide with before/after examples for all adapters.
147+
**See** the connection configuration section in :doc:`usage/configuration` for detailed migration guidance with before/after examples for all adapters.
148148

149149
Query Stack Documentation Suite
150150
--------------------------------
@@ -308,7 +308,7 @@ Example conversion:
308308
**Documentation:**
309309

310310
- Complete CLI reference: :doc:`usage/cli`
311-
- Workflow guide: :ref:`hybrid-versioning-guide`
311+
- Workflow guide for hybrid versioning
312312
- CI integration examples for GitHub Actions and GitLab CI
313313

314314
Shell Completion Support
@@ -377,7 +377,7 @@ Extension migrations now receive automatic version prefixes and configuration ha
377377
"include_extensions": ["adk"] # Simple string list
378378
}
379379
380-
**Configuration Guide**: See :doc:`/migration_guides/extension_config`
380+
**Configuration Guide**: See :doc:`usage/migrations` for extension configuration details.
381381

382382
Features
383383
--------

docs/conf.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@
7777
"sphinxcontrib.mermaid",
7878
"numpydoc",
7979
"sphinx_iconify",
80-
"sphinx_docsearch",
8180
"sphinx_datatables",
8281
"jupyter_sphinx",
8382
"nbsphinx",
@@ -167,9 +166,6 @@
167166
# https://sphinx-copybutton.readthedocs.io/en/latest/use.html#strip-and-configure-input-prompts-for-code-cells
168167
copybutton_prompt_text = "$ "
169168

170-
docsearch_app_id = os.getenv("DOCSEARCH_APP_ID", "disabled")
171-
docsearch_api_key = os.getenv("DOCSEARCH_SEARCH_API_KEY", "disabled")
172-
docsearch_index_name = os.getenv("DOCSEARCH_INDEX_NAME", "disabled")
173169
nbsphinx_requirejs_path = ""
174170
jupyter_sphinx_require_url = ""
175171

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,6 @@ doc = [
9999
"sphinx-autodoc-typehints",
100100
"numpydoc",
101101
"sphinx-iconify",
102-
"sphinx-docsearch",
103102
"jupyter-sphinx",
104103
"nbsphinx",
105104
]

sqlspec/adapters/adbc/core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@
7070
"resolve_rowcount",
7171
)
7272

73-
_COLUMN_NAME_CACHE_MAX_SIZE: int = 256
73+
COLUMN_CACHE_MAX_SIZE: int = 256
7474

7575
DIALECT_PATTERNS: "dict[str, tuple[str, ...]]" = {
7676
"postgres": ("postgres", "postgresql"),
@@ -722,7 +722,7 @@ def resolve_column_names(description: "list[Any] | None", cache: "dict[int, tupl
722722
return cached[1]
723723

724724
column_names = [col[0] for col in description]
725-
if len(cache) >= _COLUMN_NAME_CACHE_MAX_SIZE:
725+
if len(cache) >= COLUMN_CACHE_MAX_SIZE:
726726
cache.pop(next(iter(cache)))
727727
cache[cache_key] = (description, column_names)
728728
return column_names

sqlspec/adapters/bigquery/core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@
7272
HTTP_BAD_REQUEST = 400
7373
HTTP_FORBIDDEN = 403
7474
HTTP_SERVER_ERROR = 500
75-
_COLUMN_NAME_CACHE_MAX_SIZE = 256
75+
COLUMN_CACHE_MAX_SIZE = 256
7676

7777

7878
def _identity(value: Any) -> Any:
@@ -546,7 +546,7 @@ def resolve_column_names(schema: Any | None, cache: "dict[int, tuple[Any, list[s
546546
return cached[1]
547547

548548
column_names = [field.name for field in schema]
549-
if len(cache) >= _COLUMN_NAME_CACHE_MAX_SIZE:
549+
if len(cache) >= COLUMN_CACHE_MAX_SIZE:
550550
cache.pop(next(iter(cache)))
551551
cache[cache_key] = (schema, column_names)
552552
return column_names

sqlspec/adapters/oracledb/core.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@
6666
_VERSION_COMPONENTS: int = 3
6767
TYPE_CONVERTER = OracleOutputConverter()
6868
_LOB_TYPE_NAME_MARKERS: "tuple[str, ...]" = ("LOB", "BFILE")
69-
_FAST_SCALAR_TYPES: "tuple[type[Any], ...]" = (bool, int, float, str, bytes, bytearray, type(None))
70-
_ROW_METADATA_CACHE_MAX_SIZE: int = 256
69+
_SCALAR_PASSTHROUGH_TYPES: "tuple[type[Any], ...]" = (bool, int, float, str, bytes, bytearray, type(None))
70+
ROW_CACHE_MAX_SIZE: int = 256
7171

7272
# Oracle ORA error code ranges for category detection
7373
ORA_CHECK_CONSTRAINT = 2290
@@ -415,7 +415,7 @@ def resolve_row_metadata(
415415
normalized_column_names = normalize_column_names(column_names, driver_features)
416416
requires_lob_coercion = _description_requires_lob_coercion(description)
417417

418-
if len(cache) >= _ROW_METADATA_CACHE_MAX_SIZE:
418+
if len(cache) >= ROW_CACHE_MAX_SIZE:
419419
cache.pop(next(iter(cache)))
420420
cache[cache_key] = (description, normalized_column_names, requires_lob_coercion)
421421
return normalized_column_names, requires_lob_coercion
@@ -425,7 +425,7 @@ def _row_requires_lob_coercion(row: "tuple[Any, ...]") -> bool:
425425
"""Return True when a row contains readable values that need LOB coercion."""
426426
for value in row:
427427
value_type = type(value)
428-
if value_type in _FAST_SCALAR_TYPES:
428+
if value_type in _SCALAR_PASSTHROUGH_TYPES:
429429
continue
430430
if is_readable(value):
431431
return True
@@ -448,7 +448,7 @@ def _coerce_sync_row_values(row: "tuple[Any, ...]") -> "tuple[Any, ...]":
448448
coerced_values: list[Any] | None = None
449449
for index, value in enumerate(row):
450450
value_type = type(value)
451-
if value_type in _FAST_SCALAR_TYPES:
451+
if value_type in _SCALAR_PASSTHROUGH_TYPES:
452452
if coerced_values is not None:
453453
coerced_values.append(value)
454454
continue
@@ -494,7 +494,7 @@ async def _coerce_async_row_values(row: "tuple[Any, ...]") -> "tuple[Any, ...]":
494494
coerced_values: list[Any] | None = None
495495
for index, value in enumerate(row):
496496
value_type = type(value)
497-
if value_type in _FAST_SCALAR_TYPES:
497+
if value_type in _SCALAR_PASSTHROUGH_TYPES:
498498
if coerced_values is not None:
499499
coerced_values.append(value)
500500
continue

sqlspec/adapters/psycopg/driver.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@
7272
)
7373

7474
logger = get_logger("sqlspec.adapters.psycopg")
75-
_COLUMN_NAME_CACHE_MAX_SIZE = 256
75+
COLUMN_CACHE_MAX_SIZE = 256
7676

7777

7878
class PsycopgPipelineMixin:
@@ -566,7 +566,7 @@ def _resolve_column_names(self, description: Any) -> list[str]:
566566

567567
column_names = [col.name for col in description]
568568

569-
if len(self._column_name_cache) >= _COLUMN_NAME_CACHE_MAX_SIZE:
569+
if len(self._column_name_cache) >= COLUMN_CACHE_MAX_SIZE:
570570
self._column_name_cache.pop(next(iter(self._column_name_cache)))
571571
self._column_name_cache[cache_key] = (description, column_names)
572572
return column_names
@@ -1051,7 +1051,7 @@ def _resolve_column_names(self, description: Any) -> list[str]:
10511051

10521052
column_names = [col.name for col in description]
10531053

1054-
if len(self._column_name_cache) >= _COLUMN_NAME_CACHE_MAX_SIZE:
1054+
if len(self._column_name_cache) >= COLUMN_CACHE_MAX_SIZE:
10551055
self._column_name_cache.pop(next(iter(self._column_name_cache)))
10561056
self._column_name_cache[cache_key] = (description, column_names)
10571057
return column_names

sqlspec/adapters/spanner/core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
"supports_write",
4242
)
4343

44-
_COLUMN_NAME_CACHE_MAX_SIZE: int = 128
44+
COLUMN_CACHE_MAX_SIZE: int = 128
4545

4646

4747
def build_profile() -> "DriverParameterProfile":
@@ -136,7 +136,7 @@ def resolve_column_names(fields: "Sequence[Any] | None", cache: "dict[int, tuple
136136
return cached[1]
137137

138138
column_names = [field.name for field in fields]
139-
if len(cache) >= _COLUMN_NAME_CACHE_MAX_SIZE:
139+
if len(cache) >= COLUMN_CACHE_MAX_SIZE:
140140
cache.pop(next(iter(cache)))
141141
cache[cache_key] = (fields, column_names)
142142
return column_names

sqlspec/adapters/sqlite/driver.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,9 @@ def execute_many(
239239
return DMLResult(operation, affected_rows)
240240
return super().execute_many(statement, parameters, *filters, statement_config=statement_config, **kwargs)
241241

242-
def _qc_execute_direct(self, sql: str, params: "tuple[Any, ...] | list[Any]", cached: "CachedQuery") -> "SQLResult":
242+
def _stmt_cache_execute_direct(
243+
self, sql: str, params: "tuple[Any, ...] | list[Any]", cached: "CachedQuery"
244+
) -> "SQLResult":
243245
"""Execute cached query through SQLite connection.execute fast path.
244246
245247
This bypasses cursor context-manager overhead for repeated cached
@@ -269,7 +271,6 @@ def _qc_execute_direct(self, sql: str, params: "tuple[Any, ...] | list[Any]", ca
269271
if column_names is None:
270272
description = cursor.description
271273
column_names = [col[0] for col in description] if description else []
272-
cached.column_names = column_names
273274
execution_result = self.create_execution_result(
274275
cursor,
275276
selected_data=fetched_data,
@@ -278,7 +279,7 @@ def _qc_execute_direct(self, sql: str, params: "tuple[Any, ...] | list[Any]", ca
278279
is_select_result=True,
279280
row_format="tuple",
280281
)
281-
direct_statement = self._qc_build_direct(
282+
direct_statement = self._stmt_cache_build_direct(
282283
sql, params, cached, params, params_are_simple=True, compiled_sql=cached.compiled_sql
283284
)
284285
return self.build_statement_result(direct_statement, execution_result)

sqlspec/core/result/_base.py

Lines changed: 14 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
"""
1111

1212
from abc import ABC, abstractmethod
13-
from collections.abc import Iterable, Iterator
13+
from collections.abc import Iterable, Iterator, Sequence
1414
from typing import TYPE_CHECKING, Any, cast, overload
1515

1616
from mypy_extensions import mypyc_attr
@@ -48,13 +48,9 @@
4848

4949
T = TypeVar("T")
5050
_EMPTY_RESULT_STATEMENT = SQL("-- empty stack result --")
51-
_EMPTY_RESULT_DATA: list[Any] = []
52-
_EMPTY_DML_METADATA: dict[str, Any] = {}
53-
_EMPTY_DML_COLUMN_NAMES: list[str] = []
54-
_EMPTY_DML_INSERTED_IDS: list[int | str] = []
55-
_EMPTY_DML_STATEMENT_RESULTS: list["SQLResult"] = []
56-
_EMPTY_DML_ERRORS: list[str] = []
57-
_TWO_COLUMNS_FASTPATH = 2
51+
_EMPTY_RESULT_DATA: "tuple[()]" = ()
52+
_DEFAULT_DML_METADATA: dict[str, Any] = {}
53+
_TWO_COLUMN_THRESHOLD = 2
5854

5955

6056
@mypyc_attr(allow_interpreted_subclasses=False)
@@ -186,7 +182,7 @@ class SQLResult(StatementResult):
186182
def __init__(
187183
self,
188184
statement: "SQL",
189-
data: "list[Any] | None" = None,
185+
data: "Sequence[Any] | None" = None,
190186
rows_affected: int = 0,
191187
last_inserted_id: int | str | None = None,
192188
execution_time: float | None = None,
@@ -294,7 +290,7 @@ def _get_rows(self) -> "list[dict[str, Any]]":
294290
elif len(col_names) == 1:
295291
key = col_names[0]
296292
self._materialized_dicts = [{key: row[0]} for row in raw]
297-
elif len(col_names) == _TWO_COLUMNS_FASTPATH:
293+
elif len(col_names) == _TWO_COLUMN_THRESHOLD:
298294
key0, key1 = col_names
299295
self._materialized_dicts = [{key0: row[0], key1: row[1]} for row in raw]
300296
else:
@@ -980,7 +976,7 @@ def is_success(self) -> bool:
980976
return True
981977

982978
def get_data(self) -> "list[Any]":
983-
return _EMPTY_RESULT_DATA
979+
return []
984980

985981

986982
@mypyc_attr(allow_interpreted_subclasses=False)
@@ -1001,7 +997,7 @@ def __init__(self, op_type: "OperationType", rows_affected: int = 0) -> None:
1001997
self.rows_affected = rows_affected
1002998
self.last_inserted_id = None
1003999
self.execution_time = None
1004-
self.metadata = _EMPTY_DML_METADATA
1000+
self.metadata = _DEFAULT_DML_METADATA
10051001

10061002
self.error = None
10071003
self._operation_type = op_type
@@ -1010,12 +1006,12 @@ def __init__(self, op_type: "OperationType", rows_affected: int = 0) -> None:
10101006
self._row_format = "dict"
10111007
self._materialized_dicts = None
10121008

1013-
self.column_names = _EMPTY_DML_COLUMN_NAMES
1009+
self.column_names: list[str] = []
10141010
self.total_count = 0
10151011
self.has_more = False
1016-
self.inserted_ids = _EMPTY_DML_INSERTED_IDS
1017-
self.statement_results = _EMPTY_DML_STATEMENT_RESULTS
1018-
self.errors = _EMPTY_DML_ERRORS
1012+
self.inserted_ids: list[int | str] = []
1013+
self.statement_results: list[SQLResult] = []
1014+
self.errors: list[str] = []
10191015
self.total_statements = 0
10201016
self.successful_statements = 0
10211017

@@ -1026,11 +1022,11 @@ def is_success(self) -> bool:
10261022
return self.rows_affected >= 0
10271023

10281024
def get_data(self, *, schema_type: "type[SchemaT] | None" = None) -> "list[Any]":
1029-
return _EMPTY_RESULT_DATA
1025+
return []
10301026

10311027
def set_metadata(self, key: str, value: Any) -> None:
10321028
# Copy-on-write to preserve low-allocation defaults for hot DML paths.
1033-
if self.metadata is _EMPTY_DML_METADATA:
1029+
if self.metadata is _DEFAULT_DML_METADATA:
10341030
self.metadata = {key: value}
10351031
return
10361032
self.metadata[key] = value

0 commit comments

Comments
 (0)