diff --git a/.gitignore b/.gitignore index 2dabd85..7ebe127 100644 --- a/.gitignore +++ b/.gitignore @@ -54,6 +54,9 @@ Thumbs.db .ai/ nitin_docs/ +# Sphinx build output +docs/_build/ + # Jupyter notebooks *.ipynb diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..8726591 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,29 @@ +# Read the Docs configuration file +# https://docs.readthedocs.io/en/stable/config-file/v2.html + +version: 2 + +build: + os: ubuntu-24.04 + tools: + python: "3.11" + + jobs: + pre_create_environment: + - | + curl -Ls https://astral.sh/uv/install.sh | bash + + create_environment: + - ~/.local/bin/uv venv "${READTHEDOCS_VIRTUALENV_PATH}" + + install: + - | + UV_PROJECT_ENVIRONMENT="${READTHEDOCS_VIRTUALENV_PATH}" \ + ~/.local/bin/uv sync --frozen --group docs + +sphinx: + configuration: docs/conf.py + +formats: + - pdf + - epub diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..aad4168 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,113 @@ +# Agent Guide for sql-redis + +This document is for AI agents that want to **use** sql-redis to query Redis. +For agents that want to **modify** the library itself, see +[`docs/for-ais-only/`](docs/for-ais-only/). + +## What sql-redis is + +A SQL-to-Redis translator. It accepts a SQL `SELECT` string, looks up the +target index's schema in Redis, and emits a `FT.SEARCH` or `FT.AGGREGATE` +command. The library does not invent its own query language: SQL goes in, +Redis search results come out as a `QueryResult(rows, count)`. + +## When to reach for it + +- The agent has a SQL string (from a planner, an LLM, an ORM, a user) and + wants to run it against an existing RediSearch / RedisVL index. +- The agent needs vector search, full-text search, GEO filters, or date + filtering and wants a single uniform interface instead of building Redis + command lists by hand. +- The target index already exists. sql-redis does not run `FT.CREATE`. If + there is no index, the query fails. Create the index first. + +## When NOT to reach for it + +- Plain key-value `GET`/`SET` work, list/set/sorted-set commands, streams, + pub/sub. Use `redis-py` directly for those; sql-redis is a search-only + translator. +- Writes. There is no `INSERT`, `UPDATE`, or `DELETE`. Write through `redis-py`. +- Index creation. Use `FT.CREATE` directly via `redis-py` first. +- Cross-index joins, subqueries, `HAVING`, `DISTINCT`. Not implemented. + +## The minimum useful snippet + +```python +from redis import Redis +from sql_redis import create_executor + +client = Redis() +executor = create_executor(client) # lazy schema loading; no I/O yet + +result = executor.execute( + "SELECT title, price FROM products WHERE category = :cat AND price < :max LIMIT 10", + params={"cat": "electronics", "max": 500}, +) + +for row in result.rows: # row keys are bytes by default + print(row[b"title"], row[b"price"]) +``` + +Pass `decode_responses=True` to the `Redis` client if the agent prefers string +keys. + +## Surface map + +| Task | Symbol | +|---|---| +| Run a SQL query | [`Executor.execute`](sql_redis/executor.py) / [`AsyncExecutor.execute`](sql_redis/executor.py) | +| Build an executor | [`create_executor`](sql_redis/executor.py) / [`create_async_executor`](sql_redis/executor.py) | +| Read or refresh schema | [`SchemaRegistry`](sql_redis/schema.py) / [`AsyncSchemaRegistry`](sql_redis/schema.py) | +| Translate without executing | [`Translator.translate`](sql_redis/translator.py) returns a `TranslatedQuery` | + +Full reference, generated from docstrings, is at `docs/api/`. + +## Gotchas an agent should know + +1. **Field-key types.** Result rows come back with `bytes` keys when the + underlying `Redis` client uses default `decode_responses=False`. Use + `Redis(decode_responses=True)` if you want string keys end-to-end. +2. **Parameter substitution is token-based.** `:id` will not match inside + `:product_id`. String values are SQL-escaped automatically (`O'Brien` + becomes `'O''Brien'`). Pass `bytes` for vector parameters; they are + substituted as raw bytes after translation. +3. **`=` on a TEXT field is exact-phrase, not tokenized.** Use `fulltext()` + for tokenized AND search. See `docs/user_guide/how_to_guides/text-search.md`. +4. **Stopwords are stripped automatically** before queries reach Redis. A + `UserWarning` is emitted when this happens. Index with `STOPWORDS 0` to + keep them. +5. **`IS NULL` requires Redis 7.4+** and `INDEXMISSING` declared on the field. +6. **Lazy schema loading is the default.** The first query that touches an + index issues one `FT.INFO`. Pass `schema_cache_strategy="load_all"` to + `create_executor` if you want to fail fast on missing indexes at startup. +7. **No JOIN, subquery, HAVING, or DISTINCT.** The translator raises + `ValueError`; do not retry with rephrasing. +8. **GEO uses `POINT(lon, lat)` order.** Longitude first, matching Redis. + +## Error model an agent can expect + +| Exception | Meaning | Recovery | +|---|---|---| +| `ValueError` from `Translator.translate` | The SQL referenced an unknown index, unknown field, or unsupported clause. | Inspect the message; the index/field name is included. Do not retry the same SQL. | +| `redis.ResponseError` from `Executor.execute` | Redis rejected the generated command. The most common cases are wrong field type or `INDEXMISSING` not declared. | Check the index schema. The library wraps `ismissing()` failures with a hint about Redis 7.4 + `INDEXMISSING`. | +| `UserWarning` (not raised) | Stopwords stripped, or `IS NULL` used without `INDEXMISSING` declared. | Informational; does not affect the result. | + +## Discoverable artifacts in this repo + +| Artifact | Purpose | +|---|---| +| [`docs/llms.txt`](docs/llms.txt) | Flat index of every doc page with one-line summaries. Cheap to grep, good for in-context injection. | +| [`docs/api/`](docs/api/) | Sphinx `autoclass` reference for every public symbol. Source of truth for method signatures. | +| [`docs/user_guide/how_to_guides/`](docs/user_guide/how_to_guides/) | Task-oriented recipes (vector search, GEO, dates, async, parameters). | +| [`docs/concepts/`](docs/concepts/) | Why-style explanation: architecture, parameter substitution, schema-aware translation. | +| [`docs/for-ais-only/`](docs/for-ais-only/) | Internal repo map for agents modifying the library. | + +## Hub context + +sql-redis sits in the [Redis AI Hub](https://redis.io/ai-hub/) under the +*Experimental* tier as "SQL for Redis". Public docs URL: +[`docs.redisvl.com/projects/sql-redis/`](https://docs.redisvl.com/projects/sql-redis/). +The hub's docs standards (Diataxis layout, autoclass-driven API reference, +AI-agent affordances) are documented at +[`HUB_DOCS_STANDARDS.md`](https://github.com/redis/docs/blob/main/HUB_DOCS_STANDARDS.md) +in the hub repo. diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..f03d94c --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,4 @@ +# Git +- Do not add `Co-Authored-By: Claude ...` trailers to commit messages. Just use user profile only. +- Do not add the "🤖 Generated with Claude Code" line to PR bodies. +- Do not use emdashes or "--" diff --git a/Makefile b/Makefile index 7b96767..37ff08e 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: install format lint test clean check-types check-format check-sort-imports sort-imports build help +.PHONY: install format lint test clean check-types check-format check-sort-imports sort-imports build docs-build docs-serve help .DEFAULT_GOAL := help # Allow passing arguments to make targets (e.g., make test ARGS="...") @@ -49,6 +49,15 @@ build: ## Build wheel and source distribution @echo "🏗️ Building distribution packages" uv build +docs-build: ## Build documentation + @echo "📚 Building documentation" + uv run --group docs make -C docs html + +docs-serve: ## Serve documentation locally at http://localhost:8000 + @echo "🌐 Serving documentation at http://localhost:8000" + @echo "📁 Make sure docs are built first with 'make docs-build'" + uv run python -m http.server --directory docs/_build/html + clean: ## Clean up build artifacts and caches @echo "🧹 Cleaning up directory" find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true diff --git a/README.md b/README.md index a2fadc2..2881170 100644 --- a/README.md +++ b/README.md @@ -1,26 +1,26 @@ # sql-redis -A proof-of-concept SQL-to-Redis translator that converts SQL `SELECT` statements into Redis `FT.SEARCH` and `FT.AGGREGATE` commands. +[![Status: Experimental](https://img.shields.io/badge/status-experimental-orange)](https://redis.io/ai-hub/) -## Status +Query Redis collections with familiar SQL on top of RediSearch and RedisVL indexes. Converts SQL `SELECT` statements into Redis `FT.SEARCH` and `FT.AGGREGATE` commands. -This is an **early POC** demonstrating feasibility, not a production-ready library. The goal is to explore design decisions and validate the approach before committing to a full implementation. +> **Status: Experimental.** sql-redis is in the [Redis AI Hub](https://redis.io/ai-hub/) under the Experimental tier. The API can change between minor releases. Not yet production-ready; we are validating the design and the SQL surface in real use. -## Quick Example +## Install + +```bash +pip install sql-redis +``` + +## Quick example ```python from redis import Redis -from sql_redis import Translator -from sql_redis.schema import SchemaRegistry -from sql_redis.executor import Executor +from sql_redis import create_executor client = Redis() -registry = SchemaRegistry(client) -registry.load_all() # Loads index schemas from Redis +executor = create_executor(client) -executor = Executor(client, registry) - -# Simple query result = executor.execute(""" SELECT title, price FROM products @@ -30,426 +30,44 @@ result = executor.execute(""" """) for row in result.rows: - print(row["title"], row["price"]) - -# Vector search with params -result = executor.execute(""" - SELECT title, vector_distance(embedding, :vec) AS score - FROM products - LIMIT 5 -""", params={"vec": vector_bytes}) -``` - -## Design Decisions - -### Why SQL instead of a pandas-like Python DSL? - -We considered several interface options: - -| Approach | Example | Trade-offs | -|----------|---------|------------| -| **SQL** | `SELECT * FROM products WHERE price > 100` | Universal, well-understood, tooling exists | -| **Pandas-like** | `df[df.price > 100]` | Pythonic but limited to Python, no standard | -| **Builder pattern** | `query.select("*").where(price__gt=100)` | Type-safe but verbose, learning curve | - -**We chose SQL because:** - -1. **Universality** — SQL is the lingua franca of data. Developers, analysts, and tools all speak it. -2. **No new DSL to learn** — Users already know SQL. A pandas-like API requires learning our specific dialect. -3. **Tooling compatibility** — SQL strings can be generated by ORMs, query builders, or AI assistants. -4. **Clear mapping** — SQL semantics map reasonably well to RediSearch operations (SELECT→LOAD, WHERE→filter, GROUP BY→GROUPBY). - -The downside is losing Python's type checking and IDE support, but for a query interface, the universality trade-off is worth it. - -### Why sqlglot instead of writing a custom parser? - -**Options considered:** -- **Custom parser** (regex, hand-rolled recursive descent) -- **PLY/Lark** (parser generators) -- **sqlglot** (production SQL parser) -- **sqlparse** (tokenizer, not a full parser) - -**We chose sqlglot because:** - -1. **Battle-tested** — Used in production by companies like Tobiko (SQLMesh). Handles edge cases we'd miss. -2. **Full AST** — Provides a complete abstract syntax tree, not just tokens. We can traverse and analyze queries properly. -3. **Dialect support** — Handles SQL variations. Users can write MySQL-style or PostgreSQL-style queries. -4. **Active maintenance** — Regular releases, responsive maintainers, good documentation. - -The alternative was writing a custom parser, which would be error-prone and time-consuming for a POC. sqlglot lets us focus on the translation logic rather than parsing edge cases. - -### Why schema-aware translation? - -Redis field types determine query syntax: - -| Field Type | Redis Syntax | Example | -|------------|--------------|---------| -| TEXT | `@field:term` | `@title:laptop` | -| NUMERIC | `@field:[min max]` | `@price:[100 500]` | -| TAG | `@field:{value}` | `@category:{books}` | - -**Without schema knowledge**, we can't translate `category = 'books'` correctly — it could be `@category:books` (TEXT search) or `@category:{books}` (TAG exact match). - -**Our approach:** The `SchemaRegistry` fetches index schemas via `FT.INFO` at startup. The translator uses this to generate correct syntax per field type. - -This adds a Redis round-trip at initialization but ensures correct query generation. - -### Architecture: Why this layered design? - -``` -SQL String - ↓ -┌─────────────────┐ -│ SQLParser │ Parse SQL → ParsedQuery dataclass -└────────┬────────┘ - ↓ -┌─────────────────┐ -│ SchemaRegistry │ Load field types from Redis -└────────┬────────┘ - ↓ -┌─────────────────┐ -│ Analyzer │ Classify conditions by field type -└────────┬────────┘ - ↓ -┌─────────────────┐ -│ QueryBuilder │ Generate RediSearch syntax per type -└────────┬────────┘ - ↓ -┌─────────────────┐ -│ Translator │ Orchestrate pipeline, build command -└────────┬────────┘ - ↓ -┌─────────────────┐ -│ Executor │ Execute command, parse results -└────────┬────────┘ - ↓ -QueryResult(rows, count) -``` - -**Why separate components?** - -1. **Testability** — Each layer has focused unit tests. 100% coverage is achievable because responsibilities are clear. -2. **Single responsibility** — Parser doesn't know about Redis. QueryBuilder doesn't know about SQL. Changes are localized. -3. **Extensibility** — Adding a new field type (e.g., GEO) means updating Analyzer and QueryBuilder, not rewriting everything. - -**Why not a single monolithic translator?** - -Early prototypes combined parsing and translation. This led to: -- Tests that required Redis connections for simple SQL parsing tests -- Difficulty testing edge cases in isolation -- Tangled code that was hard to modify - -The layered approach emerged from TDD — writing tests first revealed natural boundaries. - -## What's Implemented - -- [x] Basic SELECT with field selection -- [x] WHERE with TEXT, NUMERIC, TAG field types -- [x] Comparison operators: `=`, `!=`, `<`, `<=`, `>`, `>=`, `BETWEEN`, `IN` -- [x] Boolean operators: `AND`, `OR` -- [x] Aggregations: `COUNT`, `SUM`, `AVG`, `MIN`, `MAX` -- [x] `GROUP BY` with multiple aggregations -- [x] `ORDER BY` with ASC/DESC -- [x] `LIMIT` and `OFFSET` pagination -- [x] Computed fields: `price * 0.9 AS discounted` -- [x] Vector KNN search: `vector_distance(field, :param)` -- [x] Hybrid search (filters + vector) -- [x] Full-text search: exact phrase, fuzzy, proximity, OR/union, LIKE patterns, BM25 scoring (see below) -- [x] GEO field queries with full operator support (see below) -- [x] Date functions: `YEAR()`, `MONTH()`, `DAY()`, `DATE_FORMAT()`, etc. (see below) -- [x] `IS NULL` / `IS NOT NULL` via `ismissing()` (requires Redis 7.4+, see below) -- [x] `exists()` function for field presence checks (see below) - -## What's Not Implemented (Yet...) - -- [ ] JOINs (Redis doesn't support cross-index joins) -- [ ] Subqueries -- [ ] HAVING clause -- [ ] DISTINCT -- [ ] Index creation from SQL (CREATE INDEX) - -### TEXT Search - -Full-text search on TEXT fields with multiple search modes: - -| Feature | SQL Syntax | RediSearch Output | Notes | -|---------|-----------|-------------------|-------| -| Exact phrase | `title = 'gaming laptop'` | `@title:"gaming laptop"` | Stopwords stripped | -| Tokenized search | `fulltext(title, 'gaming laptop')` | `@title:(gaming laptop)` | Stopwords stripped | -| Fuzzy LD=1 | `fuzzy(title, 'laptap')` | `@title:%laptap%` | | -| Fuzzy LD=2 | `fuzzy(title, 'laptap', 2)` | `@title:%%laptap%%` | | -| Fuzzy LD=3 | `fuzzy(title, 'laptap', 3)` | `@title:%%%laptap%%%` | | -| OR / union | `fulltext(title, 'laptop OR tablet')` | `@title:(laptop\|tablet)` | | -| Prefix | `title LIKE 'lap%'` | `@title:lap*` | | -| Suffix | `title LIKE '%top'` | `@title:*top` | | -| Contains | `title LIKE '%apt%'` | `@title:*apt*` | | -| Proximity (slop) | `fulltext(title, 'gaming laptop', 2)` | `@title:(gaming laptop) => { $slop: 2; }` | | -| Proximity + order | `fulltext(title, 'gaming laptop', 2, true)` | `@title:(gaming laptop) => { $slop: 2; $inorder: true; }` | | -| Optional term | `fulltext(title, 'laptop ~gaming')` | `@title:(laptop ~gaming)` | | -| BM25 score | `SELECT score() AS relevance FROM idx` | `FT.SEARCH ... WITHSCORES` | | -| Negation | `NOT fulltext(title, 'refurbished')` | `-@title:refurbished` | | - -**Examples:** - -```sql --- Exact phrase match (stopwords like "of" are stripped automatically) -SELECT * FROM products WHERE title = 'bank of america' --- Produces: @title:"bank america" - --- Fuzzy search for typos (Levenshtein distance 2) -SELECT * FROM products WHERE fuzzy(title, 'laptap', 2) - --- OR search across terms -SELECT * FROM products WHERE fulltext(title, 'laptop OR tablet OR phone') - --- Proximity: terms within 3 words of each other, in order -SELECT * FROM products WHERE fulltext(title, 'gaming laptop', 3, true) - --- Suffix/contains pattern matching -SELECT * FROM products WHERE title LIKE '%phone%' - --- BM25 relevance scoring -SELECT title, score() AS relevance FROM products WHERE fulltext(title, 'laptop') - --- Multi-field search -SELECT * FROM products WHERE fulltext(title, 'laptop') OR fulltext(description, 'laptop') -``` - -**Stopword handling:** - -Both `=` (exact phrase) and `fulltext()` (tokenized search) automatically strip [Redis default stopwords](https://redis.io/docs/latest/develop/ai/search-and-query/advanced-concepts/stopwords/) before sending queries to RediSearch. This is necessary because RediSearch does not index stopwords, so including them in queries causes syntax errors or failed matches. A `UserWarning` is emitted when stopwords are removed. - -For example, `WHERE title = 'bank of america'` produces `@title:"bank america"` because "of" is a default stopword and is never stored in the inverted index. The stripped phrase still matches correctly because the indexer assigns consecutive token positions after dropping stopwords. - -To include stopwords in your queries, create your index with `STOPWORDS 0`: - -``` -FT.CREATE myindex ON HASH PREFIX 1 doc: STOPWORDS 0 SCHEMA title TEXT -``` - -**Notes:** -- `=` on TEXT fields performs **exact phrase** matching (double-quoted) -- `fulltext()` performs **tokenized** AND search (parenthesized) -- Both operators strip stopwords and emit a warning when they do -- `fuzzy()` and `fulltext()` only work on TEXT fields; using them on TAG or NUMERIC raises `ValueError` -- OR must be **uppercase**: `'laptop OR tablet'` triggers union; lowercase `'laptop or tablet'` is treated as a regular three-word AND search -- Special characters (`@`, `|`, `-`, `*`, `+`, etc.) in search terms are automatically escaped - -### IS NULL / IS NOT NULL (ismissing) - -Check for missing (absent) fields using standard SQL `IS NULL` / `IS NOT NULL` syntax. Requires **Redis 7.4+** (RediSearch 2.10+) with `INDEXMISSING` declared on the field. - -| SQL | RediSearch Output | -|-----|-------------------| -| `WHERE email IS NULL` | `ismissing(@email)` | -| `WHERE email IS NOT NULL` | `-ismissing(@email)` | - -```sql --- Find users without an email -SELECT * FROM users WHERE email IS NULL - --- Find users with an email -SELECT * FROM users WHERE email IS NOT NULL - --- Combine with other filters -SELECT * FROM users WHERE category = 'eng' AND email IS NULL -``` - -**Note:** The field must be declared with `INDEXMISSING` in the index schema. A warning is emitted at translation time as a reminder. - -### exists() — Field Presence Check - -Check whether a field has a value using `exists()` in SELECT or HAVING. This uses `FT.AGGREGATE` with `APPLY exists(@field)`. - -```sql --- Check if fields exist (returns 1 or 0) -SELECT name, exists(email) AS has_email FROM users - --- Filter to only rows where a field exists -SELECT name FROM users HAVING exists(email) = 1 - --- Combine with other computed fields -SELECT name, exists(email) AS has_email, exists(phone) AS has_phone FROM users -``` - -**Note:** `exists()` is different from `IS NOT NULL` — it works via `FT.AGGREGATE APPLY` and doesn't require `INDEXMISSING` on the field, but returns `1`/`0` rather than filtering rows directly. - -### DATE/DATETIME Handling - -Redis does not have a native DATE field type. Dates are stored as **NUMERIC fields** with Unix timestamps. - -**sql-redis automatically converts ISO 8601 date literals to Unix timestamps:** - -```sql --- Date literal (automatically converted to timestamp 1704067200) -SELECT * FROM events WHERE created_at > '2024-01-01' - --- Datetime literal with time -SELECT * FROM events WHERE created_at > '2024-01-01T12:00:00' - --- Date range with BETWEEN -SELECT * FROM events WHERE created_at BETWEEN '2024-01-01' AND '2024-01-31' - --- Multiple date conditions -SELECT * FROM events WHERE created_at > '2024-01-01' AND created_at < '2024-12-31' -``` - -**Supported date formats:** -- Date: `'2024-01-01'` (interpreted as midnight UTC) -- Datetime: `'2024-01-01T12:00:00'` or `'2024-01-01 12:00:00'` -- Datetime with timezone: `'2024-01-01T12:00:00Z'`, `'2024-01-01T12:00:00+00:00'` - -**Note:** All dates without timezone are interpreted as UTC. You can also use raw Unix timestamps if preferred: - -```sql -SELECT * FROM events WHERE created_at > 1704067200 -``` - -### Date Functions - -Extract date parts using SQL functions that map to Redis `APPLY` expressions: - -| SQL Function | Redis Function | Description | -|--------------|----------------|-------------| -| `YEAR(field)` | `year(@field)` | Extract year (e.g., 2024) | -| `MONTH(field)` | `monthofyear(@field)` | Extract month (0-11) | -| `DAY(field)` | `dayofmonth(@field)` | Extract day of month (1-31) | -| `HOUR(field)` | `hour(@field)` | Round to hour | -| `MINUTE(field)` | `minute(@field)` | Round to minute | -| `DAYOFWEEK(field)` | `dayofweek(@field)` | Day of week (0=Sunday) | -| `DAYOFYEAR(field)` | `dayofyear(@field)` | Day of year (0-365) | -| `DATE_FORMAT(field, fmt)` | `timefmt(@field, fmt)` | Format timestamp | - -**Examples:** - -```sql --- Extract year and month -SELECT name, YEAR(created_at) AS year, MONTH(created_at) AS month FROM events - --- Filter by year -SELECT name FROM events WHERE YEAR(created_at) = 2024 - --- Group by date parts -SELECT YEAR(created_at) AS year, COUNT(*) FROM events GROUP BY year - --- Format dates -SELECT name, DATE_FORMAT(created_at, '%Y-%m-%d') AS date FROM events -``` - -**Note:** Redis's `monthofyear()` returns 0-11 (not 1-12), and `dayofweek()` returns 0 for Sunday. - -#### Limitations - -- `NOT YEAR(field) = 2024` is not supported (raises `ValueError`) -- `DATE_FORMAT()` is only supported in SELECT, not in WHERE (raises `ValueError`) -- Date functions combined with `OR` are not supported (raises `ValueError`) - -### GEO Field Support - -GEO fields are **fully implemented** with standard SQL-like syntax: - -| Feature | Status | -|---------|--------| -| Coordinate order | ✅ `POINT(lon, lat)` — matches Redis native format | -| Default unit | ✅ Meters (`m`) — SQL standard | -| All operators | ✅ `<`, `<=`, `>`, `>=`, `BETWEEN` | -| Distance calculation | ✅ `geo_distance()` in SELECT clause | -| Combined filters | ✅ GEO + TEXT/TAG/NUMERIC | - -#### Coordinate Order: `POINT(lon, lat)` - -Use **longitude first**, matching Redis's native GEO format: - -```sql --- San Francisco coordinates: lon=-122.4194, lat=37.7749 -SELECT name FROM stores WHERE geo_distance(location, POINT(-122.4194, 37.7749)) < 5000 + print(row[b"title"], row[b"price"]) ``` -#### Units +## Documentation -| Unit | Code | Example | -|------|------|---------| -| Meters | `m` | `geo_distance(location, POINT(-122.4194, 37.7749)) < 5000` | -| Kilometers | `km` | `geo_distance(location, POINT(-122.4194, 37.7749), 'km') < 5` | -| Miles | `mi` | `geo_distance(location, POINT(-122.4194, 37.7749), 'mi') < 3` | -| Feet | `ft` | `geo_distance(location, POINT(-122.4194, 37.7749), 'ft') < 16400` | +Full documentation is published at **[docs.redisvl.com/projects/sql-redis/](https://docs.redisvl.com/projects/sql-redis/)**. -**Default is meters** when no unit is specified. +- **Getting started:** [User Guide](https://docs.redisvl.com/projects/sql-redis/en/latest/user_guide/getting-started.html) +- **How-to guides:** [How-to Guides](https://docs.redisvl.com/projects/sql-redis/en/latest/user_guide/how_to_guides/) +- **Concepts and design:** [Concepts](https://docs.redisvl.com/projects/sql-redis/en/latest/concepts/) +- **API reference:** [API](https://docs.redisvl.com/projects/sql-redis/en/latest/api/) +- **SQL syntax catalog:** [SQL Syntax](https://docs.redisvl.com/projects/sql-redis/en/latest/api/sql-syntax.html) -#### Operators +## For AI agents -All comparison operators are supported: +- **[`AGENTS.md`](AGENTS.md):** how to use sql-redis from an agent, including gotchas and the error model. +- **[`docs/llms.txt`](docs/llms.txt):** flat index of every doc page with one-line summaries. +- **[`docs/for-ais-only/`](docs/for-ais-only/):** repository map, build and test guide, and intentional failure modes for agents modifying the library. -```sql --- Less than (uses optimized GEOFILTER) -SELECT name FROM stores WHERE geo_distance(location, POINT(-122.4194, 37.7749)) < 5000 +To build the docs locally: --- Less than or equal (uses optimized GEOFILTER) -SELECT name FROM stores WHERE geo_distance(location, POINT(-122.4194, 37.7749)) <= 5000 - --- Greater than (uses FT.AGGREGATE with FILTER) -SELECT name FROM stores WHERE geo_distance(location, POINT(-122.4194, 37.7749)) > 100000 - --- Greater than or equal (uses FT.AGGREGATE with FILTER) -SELECT name FROM stores WHERE geo_distance(location, POINT(-122.4194, 37.7749)) >= 100000 - --- Between (uses FT.AGGREGATE with FILTER) -SELECT name FROM stores WHERE geo_distance(location, POINT(-122.4194, 37.7749), 'km') BETWEEN 10 AND 100 -``` - -#### Distance Calculation in SELECT - -Calculate distances for all results using `geo_distance()` in the SELECT clause: - -```sql --- Get distance to each store (returns meters) -SELECT name, geo_distance(location, POINT(-122.4194, 37.7749)) AS distance -FROM stores - --- With explicit unit -SELECT name, geo_distance(location, POINT(-122.4194, 37.7749), 'km') AS distance_km -FROM stores -``` - -#### Combined Filters - -Combine GEO filters with other field types: - -```sql --- GEO + TAG filter -SELECT name FROM stores -WHERE category = 'retail' AND geo_distance(location, POINT(-122.4194, 37.7749)) < 5000 - --- GEO + NUMERIC filter -SELECT name FROM stores -WHERE rating >= 4.0 AND geo_distance(location, POINT(-122.4194, 37.7749), 'mi') < 10 - --- GEO + TEXT filter -SELECT name FROM stores -WHERE name = 'Downtown' AND geo_distance(location, POINT(-122.4194, 37.7749)) < 10000 +```bash +uv sync --group docs +make docs-build +make docs-serve # http://localhost:8000 ``` ## Development ```bash -# Install dependencies -uv sync --all-extras - -# Run tests (requires Docker for testcontainers) -uv run pytest - -# Run with coverage -uv run pytest --cov=sql_redis --cov-report=html +make install # uv sync +make test # requires Docker for testcontainers +make test-cov # with coverage report +make lint # format + mypy ``` -## Testing Philosophy - -This project uses strict TDD with 100% test coverage as a hard requirement. The approach: - -1. **Write failing tests first** — Define expected behavior before implementation -2. **One test at a time** — Implement just enough to pass each test -3. **No untestable code** — If we can't test it, we don't write it -4. **Integration tests mirror raw Redis** — `test_sql_queries.py` verifies SQL produces same results as equivalent `FT.AGGREGATE` commands in `test_redis_queries.py` +The project uses strict TDD with 100% coverage enforced in CI. See [`docs/concepts/testing-philosophy.md`](docs/concepts/testing-philosophy.md). -Coverage is enforced in CI. Pragmas (`# pragma: no cover`) are forbidden — if code can't be tested, it shouldn't exist. +## License +MIT diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..5c2dc9c --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,14 @@ +# Minimal makefile for Sphinx documentation + +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/SPEC.md b/docs/SPEC.md new file mode 100644 index 0000000..dc0c79e --- /dev/null +++ b/docs/SPEC.md @@ -0,0 +1,115 @@ +# Documentation Spec + +## Goal + +Replace the single 456-line README with a Diátaxis-aligned Sphinx documentation site, modeled on `redis-vl-python`'s `docs/` layout, so that: + +1. Each piece of content lives in the quadrant that matches its purpose (learning, task, reference, understanding). +2. The full public API surface (currently 11 exported symbols) is discoverable. +3. Docstrings in `sql_redis/` are the single source of truth for API reference (via Sphinx `autoclass`). +4. The site can be built locally and published to Read the Docs. + +## Diátaxis Recap + +| Quadrant | User need | Voice | Folder | +|---|---|---|---| +| Tutorial | "Teach me" (learning) | Hand-holding, end-to-end | `user_guide/getting-started.md` | +| How-to | "Help me do X" (task) | Recipe, presumes knowledge | `user_guide/how_to_guides/` | +| Reference | "Tell me what" (info) | Dry, complete, accurate | `api/`, `api/sql-syntax.md` | +| Explanation | "Help me understand" (theory) | Discursive, context, history | `concepts/` | + +Tutorials and how-to are both in `user_guide/` (matching the redis-vl-python layout) but kept structurally distinct. + +## Folder Layout + +``` +docs/ + index.md landing page with grid cards + conf.py Sphinx config (myst, autoclass, sphinx_book_theme, sphinx_design) + Makefile standard sphinx makefile + + concepts/ EXPLANATION + index.md grid landing + architecture.md layered pipeline + diagram + why-sql.md SQL vs pandas-like vs builder + why-sqlglot.md sqlglot vs custom parser + schema-aware-translation.md why FT.INFO matters, lazy vs eager + parameter-substitution.md token-based substitution rationale (from PARAMETER_SUBSTITUTION.md) + testing-philosophy.md TDD, 100% coverage + + user_guide/ TUTORIAL + HOW-TO + index.md grid landing + installation.md pip install, Redis setup + getting-started.md first end-to-end query + how_to_guides/ + index.md + use-parameters.md token substitution, vector params + vector-search.md KNN, hybrid filter+vector + text-search.md exact phrase, fuzzy, proximity, BM25 + geo-queries.md POINT, units, operators + date-queries.md ISO literals, YEAR/MONTH/DAY + missing-fields.md IS NULL, exists() + lazy-vs-eager-schemas.md SchemaCacheStrategy + async-usage.md AsyncExecutor, AsyncSchemaRegistry + + api/ REFERENCE + index.md TOC + translator.rst Translator, TranslatedQuery (autoclass) + schema.rst SchemaRegistry, AsyncSchemaRegistry (autoclass) + executor.rst Executor, AsyncExecutor, QueryResult, factories (autoclass) + sql-syntax.md reference tables (TEXT, GEO, dates) extracted from README + + examples/ + index.md placeholder pointing back to user_guide +``` + +## API Reference Generation + +Use `sphinx.ext.autodoc` + `sphinx.ext.napoleon` (Google-style docstrings, which the codebase already uses). + +Each `.rst` file declares the symbols with `autoclass :members:` so the docstrings already in `executor.py`, `schema.py`, `translator.py` become the rendered reference. No duplication, no drift. + +## README + +Trim to about 80 lines: tagline, one-screen quick example, install, link to docs site, status note. The reference tables and design discussions move to docs. + +## Root Files + +- `PR_NOTES.md`: delete (transient PR description, has no place at repo root). +- `PARAMETER_SUBSTITUTION.md`: content migrated to `concepts/parameter-substitution.md`, root file deleted. + +## Build Targets + +Root `Makefile` gains, mirroring redis-vl-python: + +```make +docs-build: uv run make -C docs html +docs-serve: uv run python -m http.server --directory docs/_build/html +``` + +`docs/Makefile` is the standard Sphinx-generated catch-all that delegates to `sphinx-build`. + +## Dependencies + +Add a `docs` dependency group to `pyproject.toml`: + +```toml +[dependency-groups] +docs = [ + "sphinx>=7.3", + "sphinx-book-theme>=1.1", + "sphinx-design>=0.6", + "sphinx-copybutton>=0.5", + "myst-parser>=3.0", +] +``` + +## Read the Docs + +Add `.readthedocs.yaml` so the site can be published. Build uses uv with `--group docs`. + +## Out of Scope + +- No Jupyter notebook tutorials (per user instruction). +- No CONTRIBUTING.md (separate concern). +- No MCP / connector pages (those don't exist for sql-redis). diff --git a/docs/api/executor.rst b/docs/api/executor.rst new file mode 100644 index 0000000..16ed567 --- /dev/null +++ b/docs/api/executor.rst @@ -0,0 +1,98 @@ +******** +Executor +******** + +The executor runs a translated SQL query against Redis and parses the response +into a :class:`~sql_redis.QueryResult`. There are sync and async variants and +factory functions that wire up a schema registry for you. + +.. list-table:: + :widths: 30 70 + :header-rows: 1 + + * - Symbol + - Description + * - :ref:`executor_api` + - Sync executor. + * - :ref:`asyncexecutor_api` + - Async executor. + * - :ref:`createexecutor_api` + - Factory for the sync executor with a configurable cache strategy. + * - :ref:`createasyncexecutor_api` + - Factory for the async executor. + * - :ref:`queryresult_api` + - Result rows and total count. + * - :ref:`schemacachestrategy_api` + - ``"lazy"`` or ``"load_all"`` literal. + +.. _executor_api: + +Executor +======== + +.. currentmodule:: sql_redis + +.. autoclass:: Executor + :members: + :inherited-members: + +.. _asyncexecutor_api: + +AsyncExecutor +============= + +.. currentmodule:: sql_redis + +.. autoclass:: AsyncExecutor + :members: + :inherited-members: + +.. _createexecutor_api: + +create_executor +=============== + +.. currentmodule:: sql_redis + +.. autofunction:: create_executor + +.. _createasyncexecutor_api: + +create_async_executor +===================== + +.. currentmodule:: sql_redis + +.. autofunction:: create_async_executor + +.. _queryresult_api: + +QueryResult +=========== + +.. currentmodule:: sql_redis + +.. autoclass:: QueryResult + :members: + +.. _schemacachestrategy_api: + +SchemaCacheStrategy +=================== + +.. currentmodule:: sql_redis + +.. autodata:: SchemaCacheStrategy + +.. _version_api: + +__version__ +=========== + +.. currentmodule:: sql_redis + +.. autodata:: __version__ + :annotation: = "" + +The installed package version, as a string. Useful for log lines, bug +reports, and version-gated feature checks. diff --git a/docs/api/index.md b/docs/api/index.md new file mode 100644 index 0000000..cf4dce5 --- /dev/null +++ b/docs/api/index.md @@ -0,0 +1,19 @@ +--- +myst: + html_meta: + "description lang=en": | + sql-redis API reference. Generated from docstrings. +--- + +# API Reference + +Reference documentation for the public sql-redis API. Each class and function is generated from the docstrings in the source. + +```{toctree} +:maxdepth: 2 + +translator +schema +executor +sql-syntax +``` diff --git a/docs/api/schema.rst b/docs/api/schema.rst new file mode 100644 index 0000000..c55a536 --- /dev/null +++ b/docs/api/schema.rst @@ -0,0 +1,40 @@ +***************** +Schema Registries +***************** + +The schema registry caches index field types loaded from Redis via ``FT.INFO``. +There are sync and async variants. Conceptual background is in +:doc:`/concepts/schema-aware-translation`. + +.. list-table:: + :widths: 25 75 + :header-rows: 1 + + * - Class + - Description + * - :ref:`schemaregistry_api` + - Sync registry. Lazy and eager loading, polling for index changes. + * - :ref:`asyncschemaregistry_api` + - Async registry. Coalesced concurrent loads, cancellation-safe. + +.. _schemaregistry_api: + +SchemaRegistry +============== + +.. currentmodule:: sql_redis + +.. autoclass:: SchemaRegistry + :members: + :inherited-members: + +.. _asyncschemaregistry_api: + +AsyncSchemaRegistry +=================== + +.. currentmodule:: sql_redis + +.. autoclass:: AsyncSchemaRegistry + :members: + :inherited-members: diff --git a/docs/api/sql-syntax.md b/docs/api/sql-syntax.md new file mode 100644 index 0000000..52b4335 --- /dev/null +++ b/docs/api/sql-syntax.md @@ -0,0 +1,99 @@ +# SQL Syntax Reference + +The complete catalog of SQL clauses, operators, and functions sql-redis recognises, with their RediSearch translation. + +## Supported + +| Clause / feature | Status | +|---|---| +| `SELECT` field list and `*` | yes | +| `SELECT expr AS alias` (computed fields) | yes | +| `WHERE` with `TEXT`, `NUMERIC`, `TAG`, `GEO` | yes | +| Comparison operators `=`, `!=`, `<`, `<=`, `>`, `>=` | yes | +| `BETWEEN` | yes | +| `IN` | yes | +| Boolean `AND`, `OR`, `NOT` | yes | +| Aggregations `COUNT`, `SUM`, `AVG`, `MIN`, `MAX` | yes | +| `GROUP BY` | yes | +| `ORDER BY` `ASC` / `DESC` | yes | +| `LIMIT` and `OFFSET` | yes | +| Vector KNN via `vector_distance(field, :param)` | yes | +| Hybrid search (filters + vector) | yes | +| Full-text modes (exact phrase, fuzzy, proximity, OR, `LIKE`, BM25) | yes | +| GEO via `geo_distance(field, POINT(lon, lat))` | yes | +| Date functions (`YEAR`, `MONTH`, `DAY`, `DATE_FORMAT`, ...) | yes | +| `IS NULL` / `IS NOT NULL` (Redis 7.4+) | yes | +| `exists()` for field presence | yes | + +## Not supported + +| Clause | Why | +|---|---| +| `JOIN` | Redis has no cross-index join. | +| Subqueries | Out of scope for the POC. | +| `HAVING` | Out of scope (use `WHERE` plus `GROUP BY` where possible). | +| `DISTINCT` | Out of scope. | +| `CREATE INDEX` | sql-redis does not create schemas. Use `FT.CREATE`. | + +## TEXT search + +| Feature | SQL syntax | RediSearch output | +|---|---|---| +| Exact phrase | `title = 'gaming laptop'` | `@title:"gaming laptop"` | +| Tokenized AND | `fulltext(title, 'gaming laptop')` | `@title:(gaming laptop)` | +| Fuzzy LD=1 | `fuzzy(title, 'laptap')` | `@title:%laptap%` | +| Fuzzy LD=2 | `fuzzy(title, 'laptap', 2)` | `@title:%%laptap%%` | +| Fuzzy LD=3 | `fuzzy(title, 'laptap', 3)` | `@title:%%%laptap%%%` | +| OR / union | `fulltext(title, 'a OR b')` | `@title:(a\|b)` | +| Prefix | `title LIKE 'lap%'` | `@title:lap*` | +| Suffix | `title LIKE '%top'` | `@title:*top` | +| Contains | `title LIKE '%apt%'` | `@title:*apt*` | +| Proximity (slop) | `fulltext(title, 'a b', 2)` | `@title:(a b) => { $slop: 2; }` | +| Proximity + order | `fulltext(title, 'a b', 2, true)` | `@title:(a b) => { $slop: 2; $inorder: true; }` | +| Optional term | `fulltext(title, 'a ~b')` | `@title:(a ~b)` | +| Negation | `NOT fulltext(title, 'x')` | `-@title:x` | +| BM25 score | `score() AS rel` | `WITHSCORES` | + +See {doc}`/user_guide/how_to_guides/text-search` for a task-oriented walkthrough. + +## GEO + +| Feature | Notes | +|---|---| +| Coordinates | `POINT(lon, lat)`, longitude first | +| Default unit | meters | +| Units | `m`, `km`, `mi`, `ft` | +| Operators | `<`, `<=`, `>`, `>=`, `BETWEEN` | +| In `SELECT` | `geo_distance(loc, POINT(lon, lat)) AS d` | + +See {doc}`/user_guide/how_to_guides/geo-queries`. + +## Date functions + +| SQL function | Redis function | Notes | +|---|---|---| +| `YEAR(field)` | `year(@field)` | | +| `MONTH(field)` | `monthofyear(@field)` | 0-11 | +| `DAY(field)` | `dayofmonth(@field)` | 1-31 | +| `HOUR(field)` | `hour(@field)` | | +| `MINUTE(field)` | `minute(@field)` | | +| `DAYOFWEEK(field)` | `dayofweek(@field)` | 0 = Sunday | +| `DAYOFYEAR(field)` | `dayofyear(@field)` | 0-365 | +| `DATE_FORMAT(field, fmt)` | `timefmt(@field, fmt)` | `SELECT` only | + +ISO 8601 date and datetime literals in `WHERE` are converted to Unix timestamps automatically. + +See {doc}`/user_guide/how_to_guides/date-queries`. + +## Missing fields + +| Feature | SQL | Output | +|---|---|---| +| Filter by absence | `WHERE email IS NULL` | `ismissing(@email)` | +| Filter by presence | `WHERE email IS NOT NULL` | `-ismissing(@email)` | +| Add 0/1 column | `SELECT exists(email) AS has_email` | `APPLY exists(@email)` | +| Filter via aggregate | `HAVING exists(email) = 1` | `FILTER` after `APPLY` | + +`IS NULL` requires Redis 7.4+ and `INDEXMISSING` on the field. + +See {doc}`/user_guide/how_to_guides/missing-fields`. diff --git a/docs/api/translator.rst b/docs/api/translator.rst new file mode 100644 index 0000000..38ffa35 --- /dev/null +++ b/docs/api/translator.rst @@ -0,0 +1,23 @@ +********** +Translator +********** + +The translator turns a SQL string into a Redis ``FT.SEARCH`` or ``FT.AGGREGATE`` +command. It does not execute anything; use :class:`~sql_redis.Executor` for that. + +Translator +========== + +.. currentmodule:: sql_redis + +.. autoclass:: Translator + :members: + :inherited-members: + +TranslatedQuery +=============== + +.. currentmodule:: sql_redis + +.. autoclass:: TranslatedQuery + :members: diff --git a/docs/concepts/architecture.md b/docs/concepts/architecture.md new file mode 100644 index 0000000..fe58cb9 --- /dev/null +++ b/docs/concepts/architecture.md @@ -0,0 +1,75 @@ +# Architecture + +sql-redis sits between an application and Redis, turning a SQL `SELECT` string into a `FT.SEARCH` or `FT.AGGREGATE` command and parsing the reply. + +## The two top-level objects + +A user's program touches two classes: + +``` + ┌──────────────────────────────────────────┐ + │ Executor │ + │ ┌────────────────────────────────────┐ │ + │ │ Translator │ │ + │ │ ┌──────────┐ ┌────────────────┐ │ │ + │ │ │ SQLParser│→ │ Analyzer │ │ │ + │ │ └──────────┘ └───────┬────────┘ │ │ + │ │ ▼ │ │ + │ │ ┌────────────────┐ │ │ + │ │ │ QueryBuilder │ │ │ + │ │ └────────────────┘ │ │ + │ └────────────────────────────────────┘ │ + │ │ + │ ┌──────────────────────────┐ │ + │ │ SchemaRegistry │ │ + │ │ (consulted by Analyzer │ │ + │ │ and Translator) │ │ + │ └──────────────────────────┘ │ + └──────────────────────────────────────────┘ +``` + +`Executor` runs the query end to end. Internally it owns a `Translator` (which owns a parser, an analyzer, and a query builder), plus a `SchemaRegistry`. Both top-level classes have async siblings: `AsyncExecutor` and `AsyncSchemaRegistry`. + +## What each layer does + +``` +SQL string + │ + ▼ parse +ParsedQuery (sqlglot AST plus extracted index, fields, conditions) + │ + ▼ analyze, consulting SchemaRegistry for field types +AnalyzedQuery (each WHERE condition tagged with its field's Redis type) + │ + ▼ build per-type RediSearch syntax +TranslatedQuery (command + index + query string + args + score alias) + │ + ▼ execute against Redis, parse the reply +QueryResult (rows, count) +``` + +- **`SQLParser`** wraps sqlglot. Pure: no Redis dependency. Output is a `ParsedQuery`, the library's own dataclass. +- **`Analyzer`** decides how each `WHERE` condition will translate, based on the underlying field's type. This is the only place the schema registry is consulted during translation. See {doc}`schema-aware-translation` for why this lookup is necessary. +- **`QueryBuilder`** is stateless. Given a tagged condition, it knows how to emit `@field:term`, `@field:[min max]`, `@field:{value}`, and so on. +- **`Translator`** is the orchestrator. It calls parse, analyze, build in order and packages the result into a `TranslatedQuery`. It also decides whether the final command is `FT.SEARCH` or `FT.AGGREGATE` (see {doc}`search-vs-aggregate`). +- **`Executor`** is the only layer that talks to Redis at query time. It substitutes parameters ({doc}`parameter-substitution`), sends the command, parses the reply into rows ({doc}`result-shape`). + +## Why this layering exists + +Each concern is genuinely independent. + +**Testability.** Each layer can be unit-tested in isolation. A `SQLParser` test does not need Redis. A `QueryBuilder` test does not need a parsed AST it cannot construct by hand. 100% coverage is achievable because no class has a dependency it cannot fake. + +**Single responsibility.** The parser does not know about Redis. The query builder does not know about SQL. A change to one rarely cascades. + +**Extensibility.** Adding a new field type, say a future GEO variant, means updating the analyzer and query builder. The parser, schema registry, and executor are unaffected. + +## Why not a single monolithic translator + +Early prototypes combined parsing and translation. That led to: + +- Tests that needed Redis connections to verify pure SQL parsing. +- Difficulty isolating edge cases: a single failure could be in any of three responsibilities. +- Tangled code that resisted modification. + +The layered split emerged from TDD. Writing the test first repeatedly forced a question of "what does this class actually need?" and the boundaries fell out naturally. diff --git a/docs/concepts/async-invariants.md b/docs/concepts/async-invariants.md new file mode 100644 index 0000000..a0330a3 --- /dev/null +++ b/docs/concepts/async-invariants.md @@ -0,0 +1,48 @@ +# Async Invariants + +`AsyncExecutor` and `AsyncSchemaRegistry` are not just sync code with `await` sprinkled in. The async path makes three guarantees that the sync path has no need for, because async exposes new failure modes (cancellation, concurrency) that sync does not. + +## Invariant 1: at most one in-flight `FT.INFO` per index + +A burst of concurrent `executor.execute()` calls for an index whose schema is not yet cached would, in a naive implementation, all race to issue `FT.INFO`. That is a thundering herd against Redis: N callers, N round-trips, identical results, only one cache write needed. + +The async registry coalesces these. The first caller starts an `asyncio.Task` that issues the `FT.INFO`; subsequent callers find the task already in flight and `await` the same task. Only the first caller pays the round-trip cost; the others get the result for free as soon as it lands. + +Implementation: `AsyncSchemaRegistry._loading` is a `dict[str, Task]`. `ensure_schema(index)` checks for an in-flight task before starting one. + +## Invariant 2: cancellation is shielded + +A user can cancel an `await executor.execute(...)`, deliberately (`task.cancel()`) or by side effect (`asyncio.wait_for(...)` timeout). When that happens, the cancellation must not propagate into a shared schema-load task that other awaiters are still relying on. + +The fix is `asyncio.shield`. The shared `FT.INFO` task is awaited inside a shield, so the caller's cancellation aborts the *await*, not the underlying task. The task keeps running and resolves for any remaining awaiters. + +Without shielding, one caller's `wait_for` timeout could cancel the shared task, and every concurrent waiter would see `CancelledError` from a query that had nothing wrong with it. The sync registry needs none of this because its `FT.INFO` call cannot be cancelled mid-flight. + +## Invariant 3: `invalidate()` cancels any in-flight load deliberately + +The previous invariant says caller cancellation must not stop the shared task. The reverse is also true: cache invalidation must stop it. + +If you call `invalidate("products")` while an `FT.INFO("products")` is in flight, the in-flight task is cancelled and removed from `_loading`. Otherwise the task could complete after invalidation and write a now-stale schema back into the cache. The race window is narrow but real. + +The next call to `ensure_schema("products")` finds nothing in `_loading`, starts a fresh task, and gets the post-invalidate state. + +## What this means for callers + +In practice, you get three properties that are easy to take for granted but expensive to implement: + +1. A burst of concurrent first-time queries against a new index issues exactly one `FT.INFO`. Coalescing is automatic; you never write any locking yourself. +2. Cancelling a query (timeout or `cancel()`) does not affect concurrent queries. The shared schema fetch survives. +3. After `invalidate()`, the next access reads fresh state. There is no risk of an in-flight stale-write race. + +If you are implementing something analogous in your own code (a different cache that loads from Redis), the three invariants are a useful checklist. + +## What is *not* an invariant + +The following are explicitly **not** guaranteed: + +- **`FT.INFO` order across indexes.** `load_all()` uses `asyncio.gather`; schemas land in arrival order, not in `FT._LIST` order. Code that depends on a specific load order is wrong. +- **Sync registry parity.** The sync `SchemaRegistry` has none of these mechanisms. Its `get_schema()` is blocking and serial; concurrent threads using a shared sync registry can race. If you need thread safety, wrap it yourself or use the async registry on an event loop. + +## Reference + +The recipes for using these mechanisms (cancellation-safe queries, post-alteration invalidation, change watching) are in {doc}`/user_guide/how_to_guides/async-usage` and {doc}`/user_guide/how_to_guides/lazy-vs-eager-schemas`. diff --git a/docs/concepts/index.md b/docs/concepts/index.md new file mode 100644 index 0000000..76f69b9 --- /dev/null +++ b/docs/concepts/index.md @@ -0,0 +1,101 @@ +--- +myst: + html_meta: + "description lang=en": | + Concepts behind sql-redis. Architecture and design decisions. +--- + +# Concepts + +Foundational reading for sql-redis. Each page explains a single design choice or sub-system, with enough context to make informed extensions or contributions. + +::::{grid} 2 +:gutter: 3 + +:::{grid-item-card} 🏗️ Architecture +:link: architecture +:link-type: doc + +The two top-level objects (Executor, SchemaRegistry) and the layered translator they contain. +::: + +:::{grid-item-card} 🤔 Why SQL? +:link: why-sql +:link-type: doc + +The interface choice. SQL versus a pandas-style DSL versus a builder API. +::: + +:::{grid-item-card} 🪛 Why sqlglot? +:link: why-sqlglot +:link-type: doc + +The parser choice. sqlglot versus a hand-rolled recursive-descent parser. +::: + +:::{grid-item-card} 🗂️ Schema-aware translation +:link: schema-aware-translation +:link-type: doc + +Why field types matter, how the schema registry caches them, lazy versus eager loading. +::: + +:::{grid-item-card} 🔀 FT.SEARCH vs FT.AGGREGATE +:link: search-vs-aggregate +:link-type: doc + +Which Redis command runs for a given SQL, why the choice is forced, and which feature combinations are illegal. +::: + +:::{grid-item-card} 🔣 Parameter substitution +:link: parameter-substitution +:link-type: doc + +The token-based substitution algorithm and the bugs it fixes. +::: + +:::{grid-item-card} 🧬 Vector substitution +:link: vector-substitution +:link-type: doc + +Why bytes parameters take a different path: two-stage substitution that keeps vectors out of the SQL string. +::: + +:::{grid-item-card} 🔁 Async invariants +:link: async-invariants +:link-type: doc + +Coalesced FT.INFO loads, shielded reads, invalidate-cancels-in-flight. The three guarantees the async path provides. +::: + +:::{grid-item-card} 📋 Result shape +:link: result-shape +:link-type: doc + +What QueryResult.rows actually contains, why it varies with the command, scoring, and client decoding. +::: + +:::{grid-item-card} 🧪 Testing philosophy +:link: testing-philosophy +:link-type: doc + +TDD, 100% coverage, and why integration tests do not mock Redis. +::: + +:::: + +```{toctree} +:maxdepth: 2 +:hidden: + +architecture +why-sql +why-sqlglot +schema-aware-translation +search-vs-aggregate +parameter-substitution +vector-substitution +async-invariants +result-shape +testing-philosophy +``` diff --git a/docs/concepts/parameter-substitution.md b/docs/concepts/parameter-substitution.md new file mode 100644 index 0000000..4c89e2c --- /dev/null +++ b/docs/concepts/parameter-substitution.md @@ -0,0 +1,45 @@ +# Parameter Substitution + +`Executor.execute(sql, params=...)` lets a caller inject runtime values into a SQL string. The implementation is deliberately simple. Understanding why is worth a page because two earlier approaches were tried and rejected. + +## What goes wrong without care + +A first-cut implementation (`for k, v in params: sql.replace(f":{k}", str(v))`) breaks in two ways that occur in real data, not contrived examples. + +**Apostrophes break SQL parsing.** Names like `O'Brien`, words like `it's`, products like `McDonald's`: dropping these directly into a SQL string produces `name = 'O'Brien'`, which is a syntax error. The fix is the SQL-standard escape: `'` becomes `''`. So the substitution must wrap strings in quotes and escape internal quotes before inserting them. + +**Similar parameter names overlap.** `:id` is a prefix of `:product_id`. A naive replace of `:id` matches inside `:product_id` and corrupts both. The fix is to recognise complete parameter tokens, not substrings. + +These are not edge cases. The first hit any database with European names; the second hits anything with `:id` and a more specific identifier on the same query. + +## The three approaches considered + +| Approach | Lines | Deps | Fixes both bugs | +|---|---|---|---| +| `str.replace()` per param | 3 | none | no | +| Token-based regex | 30 | stdlib `re` | yes | +| sqlglot parse-and-rewrite | 60 | `sqlglot` | yes | + +We chose token-based. + +## Why token-based, not parser-based + +A regex split on the parameter pattern, keeping each `:name` whole, fixes both bugs without invoking a SQL parser. The pattern is: + +``` +(:[a-zA-Z_][a-zA-Z0-9_]*) +``` + +Two properties of this pattern matter. First, the parenthesis means the matched delimiter is *kept* in the split output, so the substitution function can inspect it and either replace it (it's a known parameter) or leave it (it isn't). Second, the regex demands a complete identifier ending at a non-identifier character, so `:id` and `:product_id` come out as different tokens. + +The sqlglot route would be more general, in particular for the theoretical case of a colon literal embedded in a SQL string (`'admin:test@example.com'`). It would also be slower, dependency-heavier, and would have to fall back somewhere when sqlglot fails to parse a query that the existing pipeline accepts. The colon-in-literal pattern has not appeared in any real query we have seen; users pass the troublesome string as a parameter, not a literal. The trade-off was clear. + +## Why bytes are not stringified + +A substitution function that handled string and numeric types but not bytes would force vector queries to either pre-encode their vectors as base64 strings (and have RediSearch reject them) or use a side-channel API. Neither is good. + +The library's answer is the **two-stage substitution** described in {doc}`vector-substitution`. Briefly: bytes parameters are intentionally *skipped* at the string-substitution stage. The translator emits a `$vector` placeholder, and the executor injects the raw bytes into the Redis command list after translation, where Redis accepts them natively. From the caller's perspective, vector params look identical to other params. + +## What this concept does not cover + +The full table of which Python types substitute into what SQL form lives in the how-to ({doc}`/user_guide/how_to_guides/use-parameters`). The reference for the regex pattern itself lives in `sql_redis/executor.py::_substitute_params`. This page is the *why*, not the *how* or *what*. diff --git a/docs/concepts/result-shape.md b/docs/concepts/result-shape.md new file mode 100644 index 0000000..9890aa9 --- /dev/null +++ b/docs/concepts/result-shape.md @@ -0,0 +1,95 @@ +# Result Shape + +Every `Executor.execute()` call returns a `QueryResult` with two attributes: `rows` (a list of dicts) and `count` (an integer). Both look simple but have a few corners that are worth understanding once so you do not re-derive them from test failures. + +## What `count` means + +The integer in `count` is what Redis returned at position 0 of the reply. Its meaning depends on which command ran (see {doc}`search-vs-aggregate`): + +- **`FT.SEARCH`** (no aggregation): `count` is the **total number of matching documents** in the index, regardless of `LIMIT`. So `count` can be much larger than `len(rows)`. This is useful for pagination. +- **`FT.AGGREGATE`** (any aggregation, GROUP BY, computed field, date function): `count` is the **number of rows in the reply**, which is what you got back. After `LIMIT`, the two are equal. + +## What `rows` looks like + +A row is always a `dict`, but the *keys* and *values* depend on three orthogonal factors. + +### Factor 1: client decoding + +A `Redis()` client without `decode_responses=True` returns bytes. A row therefore looks like: + +```python +{b"title": b"gaming laptop", b"price": b"1499"} +``` + +`Redis(decode_responses=True)` returns strings: + +```python +{"title": "gaming laptop", "price": "1499"} +``` + +The library does not normalise this. If you want strings, configure the client. If you want native types (e.g., `1499` as `int`), parse them yourself. + +### Factor 2: which command ran + +`FT.SEARCH` rows contain only the document fields you asked for in `SELECT`. `FT.AGGREGATE` rows can include computed columns, group keys, and reduced values; original document fields appear only if you asked for them. + +```python +# FT.SEARCH: SELECT title, price FROM products WHERE ... +{b"title": b"gaming laptop", b"price": b"1499"} + +# FT.AGGREGATE: SELECT category, COUNT(*) AS n FROM products GROUP BY category +{b"category": b"electronics", b"n": b"2"} + +# FT.AGGREGATE: SELECT name, geo_distance(loc, POINT(...)) AS d FROM stores +{b"name": b"Downtown", b"d": b"4823.5"} +``` + +### Factor 3: scoring + +If your SELECT contains `score()`, the underlying `FT.SEARCH` runs with `WITHSCORES` and an extra column appears in every row: + +```python +# SELECT title, score() AS relevance FROM products WHERE fulltext(...) +{b"title": b"gaming laptop", b"relevance": "0.5"} +``` + +The score column name is whatever alias you wrote (`score() AS relevance` produces `relevance`). If you used `score()` without `AS`, the library falls back to a stable internal name. + +The score's value type is `str` (or `bytes` depending on decoding); if you need a `float`, convert at the call site. + +### Factor 4: `RETURN 0` + +If your SELECT is just `score()` with no document fields, the underlying command uses `RETURN 0` (no document fields returned). Each row contains only the score column: + +```python +# SELECT score() AS s FROM products WHERE fulltext(title, 'laptop') +{b"s": "0.5"} +``` + +This is mostly useful when you want a relevance-only ranking without paying to ship the document body back. + +## Score-column collision avoidance + +What happens if your document has a field literally named `__score`? The library detects the collision and renames the score column. The deterministic fallback is `__score_`. So: + +- `score() AS __score` against documents with no `__score` field: row key is `__score`. +- `score() AS __score` against documents with a field named `__score`: row key is `__score___score`. + +You will not normally see this; it is a defensive measure for the unusual case. + +## Why the library does not "fix" this + +It would be tempting to normalise everything: always strings, always `float` scores, always uniform shape. The library does not, for three reasons. + +1. **Bytes is the right default for Redis.** Some fields legitimately contain binary data. Force-decoding to UTF-8 would corrupt those. +2. **The shape difference between `FT.SEARCH` and `FT.AGGREGATE` is intrinsic to the Redis commands.** Hiding it would lie about what is happening. +3. **One-call-site logic is cheap; library-wide policy is expensive.** A user who wants a uniform shape can write a 5-line decoder once, customised to their data. The library shipping that decoder for everyone is the wrong place for the policy. + +## When in doubt + +```python +result = executor.execute(sql) +print(repr(result.rows[0]) if result.rows else "(no rows)") +``` + +The repr tells you the keys, values, and types in front of you. Build your downstream code against what you actually see. diff --git a/docs/concepts/schema-aware-translation.md b/docs/concepts/schema-aware-translation.md new file mode 100644 index 0000000..4cd0137 --- /dev/null +++ b/docs/concepts/schema-aware-translation.md @@ -0,0 +1,47 @@ +# Schema-aware Translation + +A SQL clause does not map to a single RediSearch syntax. The right output depends on the underlying field's type. + +| Field Type | Redis Syntax | Example | +|---|---|---| +| `TEXT` | `@field:term` | `@title:laptop` | +| `NUMERIC` | `@field:[min max]` | `@price:[100 500]` | +| `TAG` | `@field:{value}` | `@category:{books}` | +| `GEO` | `@field:[lon lat radius unit]` | `@loc:[-122.4 37.7 5 km]` | + +## Why schema awareness is necessary + +Consider `WHERE category = 'books'`. Without knowing what `category` is in Redis, the translator has two valid outputs and they return different rows: + +- If `category` is `TEXT`, the right output is `@category:books` (a tokenized term match). +- If `category` is `TAG`, the right output is `@category:{books}` (an exact tag match). + +A naive translator that always emits one or the other will silently produce wrong results in the case it picked badly. The library refuses to make that choice without information; it asks Redis for the schema. + +## How the registry resolves the choice + +The schema registry calls `FT.INFO` on the index and parses the response into a `{field_name: field_type}` map. The map is cached in process memory. When the analyzer encounters `WHERE category = 'books'`, it consults the cache, learns that `category` is a `TAG`, and tells the query builder to emit the tag-match form. + +A single `FT.INFO` call captures every field on an index, so the per-query overhead after the first lookup is zero. + +## Lazy versus eager: a startup-cost tradeoff + +The registry can fill its cache in two ways. Both end at the same place; they differ only in *when* the round-trips happen. + +**Lazy** is the default. Schemas are loaded on demand, the first time a given index is referenced in a query. A process that touches three indexes pays for three `FT.INFO` calls, spread across the queries that needed them. A process that never queries a given index never asks Redis about it. Startup is essentially free. + +**Eager** loads everything at construction: one `FT._LIST` followed by one `FT.INFO` per index. Construction blocks until they all return. Subsequent queries do no schema I/O. The cost moves to startup, but a missing or misspelled index name fails immediately rather than at first use. + +The right choice depends on whether startup latency or first-query latency matters more for your workload. Recipes for both modes live in {doc}`/user_guide/how_to_guides/lazy-vs-eager-schemas`. + +## Cache coherence + +A cached schema can drift from reality. If you alter or drop an index after the schema has been read, the next translation will be based on the old layout. The library cannot detect this on its own; RediSearch does not emit keyspace notifications for `FT.*` commands. + +The user is therefore responsible for invalidating the cache when their index changes. The mechanism is provided as an explicit call rather than automatic, because automatic invalidation would either require polling (expensive) or a hook the application has to wire anyway. The recipe is in {doc}`/user_guide/how_to_guides/lazy-vs-eager-schemas`. + +There is also a polling mode for processes that want to detect index creation and deletion in the background; see the same how-to. + +## Async coalescing + +In an async process, a burst of requests for a freshly-seen index can produce a thundering herd of `FT.INFO` calls. The async registry deduplicates these: concurrent calls for the same index share a single in-flight request, and only the first caller pays for the round-trip. The concept of in-flight coalescing is part of the broader async story; see {doc}`async-invariants`. diff --git a/docs/concepts/search-vs-aggregate.md b/docs/concepts/search-vs-aggregate.md new file mode 100644 index 0000000..a92cb7e --- /dev/null +++ b/docs/concepts/search-vs-aggregate.md @@ -0,0 +1,44 @@ +# `FT.SEARCH` versus `FT.AGGREGATE` + +Redis exposes two top-level search commands, and they are not interchangeable. sql-redis picks one per query based on what the SQL asks for. Knowing which command runs for a given SQL is useful: it predicts what features can combine, what cannot, and what the result rows will look like. + +## What the two commands do + +**`FT.SEARCH`** is the document-retrieval command. Given a query string, it finds matching documents and returns them with their fields. It supports relevance scoring (`WITHSCORES` and BM25), score-based sorting, and the optimised `GEOFILTER` clause. It cannot compute aggregations, group, or apply post-query filters. + +**`FT.AGGREGATE`** is the analytical command. It runs a query and then a pipeline: `LOAD`, `APPLY`, `GROUPBY`, `REDUCE`, `FILTER`, `SORTBY`. It is the only path to `COUNT`, `SUM`, `AVG`, computed columns, date-function projections, and post-aggregation filtering. It does not support `WITHSCORES`. + +## Which SQL forces which command + +The translator picks `FT.AGGREGATE` if **any** of the following is true. Otherwise it uses `FT.SEARCH`. + +| Trigger | Why | +|---|---| +| `SELECT COUNT(*)`, `SUM(x)`, `AVG(x)`, `MIN(x)`, `MAX(x)` | Aggregations require `REDUCE`. | +| `GROUP BY` | `GROUPBY` is an aggregate-only pipeline stage. | +| Computed expression in `SELECT` (`SELECT price * 0.9 AS d`) | Needs `APPLY`. | +| `geo_distance(...)` in `SELECT` | The distance is computed via `APPLY`. | +| `geo_distance(...) > N` or `>= N` or `BETWEEN ... AND ...` in `WHERE` | `FT.SEARCH`'s `GEOFILTER` only handles "within radius". Greater-than is implemented as a post-query `FILTER`. | +| `YEAR(field)`, `MONTH(field)`, etc. anywhere | Date functions are computed via `APPLY`; predicates on them become `FILTER`. | +| `HAVING exists(field) = 1` (or any `HAVING`) | `FILTER` is aggregate-only. | + +Equivalently, `FT.SEARCH` runs when the query is a pure document fetch: `SELECT field-list FROM idx WHERE ... ORDER BY ... LIMIT ...`, optionally with `score()`, optionally with `geo_distance(...) <`/`<=` filters that fit `GEOFILTER`. + +## Combinations that are not allowed + +Some pairs are forced incompatible by the underlying Redis commands; sql-redis surfaces this with a `ValueError` rather than silently dropping a clause. + +- **`score()` plus aggregation, GROUP BY, `geo_distance` >/>=/BETWEEN, date functions, or HAVING.** `score()` requires `WITHSCORES`, which is `FT.SEARCH` only. Anything that forces `FT.AGGREGATE` therefore conflicts. Error message: *"score() is not supported with FT.AGGREGATE queries"*. +- **`OR` combined with `geo_distance(...) >/>=/BETWEEN`.** The greater-than family is implemented as a top-level `FILTER`, which is ANDed with the rest of the query. Combining it with `OR` at the SQL level would silently change semantics. Error: *"Geo distance comparisons (>, >=, BETWEEN) cannot be combined with OR"*. +- **`OR` combined with date-function predicates** for the same reason. Date predicates become `FILTER` clauses. + +These constraints are not bugs to fix; they are the cost of the abstraction. The translator could in principle synthesise post-query workarounds, but doing so would break the user's expectation that one SQL produces one Redis command. + +## Why this matters for callers + +Two practical consequences: + +1. **The result-row shape changes.** `FT.SEARCH` returns rows with field-value pairs straight from the indexed documents, possibly with a score column. `FT.AGGREGATE` returns rows of computed fields, group keys, and reduced values; the original document fields are present only if the SQL asks for them. See {doc}`result-shape`. +2. **`LIMIT` semantics differ subtly.** Both commands honour `LIMIT`, but the `count` returned by `FT.AGGREGATE` reflects the post-pipeline row count, while `FT.SEARCH`'s `count` is the total match count regardless of the limit. The library exposes both as `QueryResult.count`; the meaning depends on which path ran. + +If you need to know which command was issued for a given SQL, call `Translator.translate(sql)` directly and inspect `TranslatedQuery.command`. diff --git a/docs/concepts/testing-philosophy.md b/docs/concepts/testing-philosophy.md new file mode 100644 index 0000000..aa8c20f --- /dev/null +++ b/docs/concepts/testing-philosophy.md @@ -0,0 +1,22 @@ +# Testing Philosophy + +sql-redis uses strict TDD with 100% test coverage as a hard requirement. + +## Practices + +1. **Write failing tests first.** Define the expected behavior before writing the implementation. +2. **One test at a time.** Implement just enough to pass the test in front of you. +3. **No untestable code.** If a branch cannot be tested, it should not exist. Coverage pragmas (`# pragma: no cover`) are forbidden. +4. **Integration tests mirror raw Redis.** `test_sql_queries.py` runs SQL through the translator and executor, and `test_redis_queries.py` runs the equivalent `FT.AGGREGATE` commands directly. Both must produce the same rows. + +## Why no mocks for Redis + +A mocked Redis happily returns whatever the test sets up. That tells us our code matches our assumptions about Redis, which is not the same thing as matching Redis itself. Mocked tests pass; production breaks. + +Integration tests use `testcontainers[redis]` to start a real Redis with the search module on a free port. The startup cost is paid once per test session. + +## Why 100% coverage is achievable here + +Because the layers are decoupled (see {doc}`architecture`), each component has a clear contract and a small surface. There are no untestable branches because there are no hidden dependencies. + +Coverage is enforced in CI. diff --git a/docs/concepts/vector-substitution.md b/docs/concepts/vector-substitution.md new file mode 100644 index 0000000..69079cc --- /dev/null +++ b/docs/concepts/vector-substitution.md @@ -0,0 +1,57 @@ +# Vector Substitution + +Vectors are the only parameter type that does not get inlined into the SQL string. Understanding why explains a small surprise users hit the first time they pass an embedding: the placeholder is preserved, then replaced later, in a different stage. + +## The problem with stringifying a vector + +A query embedding is a `bytes` object: a packed `float32` array, perhaps 1536 entries long for an OpenAI-style embedding. That is roughly 6 KB of binary data per query. Two consequences make string substitution infeasible. + +**Encoding.** RediSearch expects the vector as raw bytes in the command list, not as a quoted SQL literal. There is no `'\x00\x01\x02...'` syntax we could substitute that would round-trip through the SQL parser and survive into the Redis command intact. + +**Sanity.** Even if it were syntactically possible, splicing 6 KB of binary bytes into a SQL string and re-parsing it would be wasteful at every layer: the parameter substitutor, the SQL parser, the analyzer, and the query builder would all carry around bytes-as-strings until the executor finally peeled them off again. + +## The two-stage answer + +sql-redis splits parameter substitution into two stages: + +``` +SQL with :params Redis command list + │ │ + ▼ ▼ +┌──────────────────┐ ┌──────────────────┐ +│ Stage 1: │ │ Stage 2: │ +│ String params │ │ Bytes params │ +│ inlined into SQL │ │ injected into │ +│ (escaping, quoting)│ │ command args │ +└─────────┬────────┘ └─────────┬────────┘ + │ │ + ▼ │ + parse → translate │ + │ │ + ▼ │ + command list with │ + "$vector" placeholder ────────┘ + │ + ▼ + execute_command(*cmd) +``` + +**Stage 1 (in `_substitute_params`).** Every parameter that is `int`, `float`, or `str` is inlined into the SQL string with appropriate quoting and escaping. Parameters whose value is `bytes` are deliberately *skipped*: the `:vec` placeholder is left in place. Other types (`None`, `bool`, `list`) are also skipped, on the assumption that they are handled elsewhere in the pipeline. + +**Translation.** The SQL is parsed as usual. The translator emits a query that uses `$vector` (a literal four-character placeholder) wherever a vector argument is expected. From the parser's perspective there is no difference between "a parameter is missing" and "the user wrote `$vector`"; the placeholder survives translation. + +**Stage 2 (in `Executor.execute`).** The executor scans `params` for any `bytes` value, finds the `$vector` token in the command list, and replaces it with the bytes. The replacement happens after translation, on the command list, not on the SQL string. The bytes never participate in parsing. + +## Why one vector per query is enough + +The current implementation supports a single bytes parameter per query. This matches the underlying RediSearch capability: a vector query has one query vector, used by the `KNN` clause. A query with two `vector_distance(...)` calls is not a thing. + +If multi-vector queries become a thing later, the same scheme generalises: emit `$vector_a`, `$vector_b`; the executor matches each placeholder to the named bytes parameter. + +## Implications for callers + +- The same `params` dict can mix `str`, `int`, `float`, and `bytes` freely. Each type takes its own path. +- A `bytes` value never appears in the SQL string, so debugging tools that print the substituted SQL will still show `:vec`. +- A `bytes` value does *not* go through `_substitute_params` quoting, so it cannot accidentally produce malformed SQL. The downside is that a misnamed `:vec` placeholder is not detected at substitution time; it produces a Redis error at execution. + +The user-facing recipe is in {doc}`/user_guide/how_to_guides/use-parameters` ("Vectors") and {doc}`/user_guide/how_to_guides/vector-search`. diff --git a/docs/concepts/why-sql.md b/docs/concepts/why-sql.md new file mode 100644 index 0000000..80af488 --- /dev/null +++ b/docs/concepts/why-sql.md @@ -0,0 +1,18 @@ +# Why SQL? + +The query interface is the most visible design choice in the library. We considered three options. + +| Approach | Example | Trade-offs | +|---|---|---| +| **SQL** | `SELECT * FROM products WHERE price > 100` | Universal, well understood, tooling exists | +| **Pandas-like** | `df[df.price > 100]` | Pythonic but Python-only, no standard | +| **Builder pattern** | `query.select("*").where(price__gt=100)` | Type-safe but verbose, with a learning curve | + +## We chose SQL because: + +1. **Universality.** SQL is the lingua franca of data. Developers, analysts, and tools all speak it. +2. **No new DSL to learn.** Users already know SQL. A pandas-like API requires learning our specific dialect. +3. **Tooling compatibility.** SQL strings can be generated by ORMs, query builders, or AI assistants. +4. **Clear mapping.** SQL semantics map reasonably well to RediSearch operations: `SELECT` to `LOAD`, `WHERE` to filter, `GROUP BY` to `GROUPBY`. + +The downside is losing Python type checking and IDE support, but for a query interface, the universality trade-off is worth it. diff --git a/docs/concepts/why-sqlglot.md b/docs/concepts/why-sqlglot.md new file mode 100644 index 0000000..e12f01d --- /dev/null +++ b/docs/concepts/why-sqlglot.md @@ -0,0 +1,19 @@ +# Why sqlglot? + +Once we picked SQL as the surface, we needed a parser. + +## Options considered + +- **Custom parser** (regex, hand-rolled recursive descent) +- **PLY or Lark** (parser generators) +- **sqlglot** (production SQL parser) +- **sqlparse** (a tokenizer, not a full parser) + +## We chose sqlglot because: + +1. **Battle-tested.** Used in production by SQLMesh and many other projects. Handles edge cases we would miss. +2. **Full AST.** Provides a complete abstract syntax tree, not just tokens. We can traverse and analyze queries properly. +3. **Dialect support.** Handles SQL variations. Users can write MySQL-style or PostgreSQL-style queries. +4. **Active maintenance.** Regular releases, responsive maintainers, good documentation. + +The alternative was writing a custom parser, which would be error-prone and time-consuming. sqlglot lets us focus on the translation logic rather than parsing edge cases. diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..9986781 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,67 @@ +"""Sphinx configuration for sql-redis documentation.""" + +import os +import sys + +sys.path.insert(0, os.path.abspath("..")) + +from sql_redis import __version__ + +project = "sql-redis" +copyright = "2026, Redis Inc." +author = "Redis Applied AI" +version = __version__ +release = version + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", + "sphinx.ext.intersphinx", + "sphinx_design", + "sphinx_copybutton", + "myst_parser", +] + +source_suffix = { + ".rst": "restructuredtext", + ".md": "markdown", +} + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "SPEC.md"] + +html_theme = "sphinx_book_theme" +html_title = "sql-redis" +html_static_path = ["_static"] if os.path.isdir("_static") else [] + +html_theme_options = { + "repository_url": "https://github.com/redis-developer/sql-redis", + "use_repository_button": True, + "use_edit_page_button": True, + "use_issues_button": True, + "repository_branch": "main", + "path_to_docs": "docs", + "show_navbar_depth": 2, + "navigation_depth": 4, + "show_toc_level": 3, + "home_page_in_toc": True, +} + +myst_enable_extensions = ["colon_fence", "deflist"] +myst_heading_anchors = 3 + +autoclass_content = "both" +autodoc_member_order = "groupwise" +autodoc_typehints = "description" +add_module_names = False + +napoleon_google_docstring = True +napoleon_numpy_docstring = False +napoleon_include_init_with_doc = True + +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), + "redis": ("https://redis-py.readthedocs.io/en/stable/", None), + "redisvl": ("https://docs.redisvl.com/", None), +} diff --git a/docs/examples/index.md b/docs/examples/index.md new file mode 100644 index 0000000..58a0ab5 --- /dev/null +++ b/docs/examples/index.md @@ -0,0 +1,36 @@ +# Examples + +Worked examples and applied patterns built on the sql-redis primitives. The how-to guides under {doc}`/user_guide/how_to_guides/index` are the current source of runnable examples; this section will grow as more end-to-end scenarios are added. + +::::{grid} 2 +:gutter: 3 + +:::{grid-item-card} 🔍 Full-text search +:link: /user_guide/how_to_guides/text-search +:link-type: doc + +Phrase, fuzzy, proximity, BM25. +::: + +:::{grid-item-card} 🧮 Vector and hybrid search +:link: /user_guide/how_to_guides/vector-search +:link-type: doc + +KNN, filter-then-vector. +::: + +:::{grid-item-card} 🌍 GEO queries +:link: /user_guide/how_to_guides/geo-queries +:link-type: doc + +POINT, units, operators. +::: + +:::{grid-item-card} 📅 Date queries +:link: /user_guide/how_to_guides/date-queries +:link-type: doc + +ISO literals and date functions. +::: + +:::: diff --git a/docs/for-ais-only/BUILD_AND_TEST.md b/docs/for-ais-only/BUILD_AND_TEST.md new file mode 100644 index 0000000..fc8a3cd --- /dev/null +++ b/docs/for-ais-only/BUILD_AND_TEST.md @@ -0,0 +1,86 @@ +# Build and Test + +## Prerequisites + +- Python 3.9 or newer. +- `uv` (https://docs.astral.sh/uv/). +- Docker. The integration tests use `testcontainers[redis]` to spin up a + real Redis with the search module on a free port. Without Docker the tests + cannot run. + +## Make targets + +``` +make install uv sync (dev group) +make format black + isort +make check-format black --check +make check-types mypy ./sql_redis +make lint format + check-types +make test pytest +make test-verbose pytest -vv -s +make test-cov pytest with coverage report (terminal + htmlcov/) +make check lint + test +make build uv build (wheel + sdist) +make docs-build Build Sphinx HTML to docs/_build/html +make docs-serve Serve docs/_build/html on http://localhost:8000 +make clean Remove caches and build output +``` + +## Coverage policy + +100% line coverage is enforced in CI. `# pragma: no cover` is forbidden. If a +branch can't be tested, delete it. The two-pronged test layout (unit per +module + integration via real Redis) makes this achievable. + +## Running a single test + +``` +uv run pytest tests/test_executor.py::test_select_with_filter -vv +``` + +## Running with coverage HTML + +``` +make test-cov +open htmlcov/index.html +``` + +## Building the docs + +``` +uv sync --group docs +make docs-build # writes docs/_build/html +make docs-serve # http://localhost:8000 +``` + +The Sphinx build should complete with zero warnings. Treat any warning as a +breaking change. To enforce this in CI, run with `-W`: + +``` +uv run --group docs sphinx-build -W -b html docs docs/_build/html +``` + +## CI gates (target state) + +- `make check` (lint + tests) on every PR. +- `make docs-build` with `-W` on every PR. +- 100% coverage gate. + +## Fast iteration loops + +When changing parsing logic: + +``` +uv run pytest tests/test_parser.py tests/test_translator.py -x -vv +``` + +These do not need Redis and run in well under a second. + +When changing executor logic: + +``` +uv run pytest tests/test_executor.py tests/test_sql_queries.py -x -vv +``` + +The first run pays for the testcontainers Redis startup (a few seconds). +Subsequent runs in the same session reuse it. diff --git a/docs/for-ais-only/FAILURE_MODES.md b/docs/for-ais-only/FAILURE_MODES.md new file mode 100644 index 0000000..83f907a --- /dev/null +++ b/docs/for-ais-only/FAILURE_MODES.md @@ -0,0 +1,90 @@ +# Failure Modes + +Things that look like bugs but are intentional. Read this before "fixing" any +of them. + +## Result row keys are bytes + +`Executor.execute(...).rows` returns dicts whose keys are `bytes`, not +`str`, when the underlying `Redis` client uses default +`decode_responses=False`. This is consistent with raw `redis-py` behavior. +The fix at the call site is to construct `Redis(decode_responses=True)`. Do +not "fix" this in `executor.py` by force-decoding; that breaks users who +deliberately want bytes (binary fields, vectors). See +{doc}`/concepts/result-shape` for the full story. + +## Stopwords are silently stripped + +`WHERE title = 'bank of america'` becomes `@title:"bank america"` because +RediSearch does not index default stopwords. A `UserWarning` is emitted, but +the query proceeds. To preserve stopwords the user must create the index +with `STOPWORDS 0`. This is intentional: failing the query would be worse +than warning and proceeding. + +## `=` on TEXT fields is exact phrase, not tokenized AND + +`title = 'gaming laptop'` translates to `@title:"gaming laptop"` (phrase +match), not `@title:(gaming laptop)` (tokenized AND). For tokenized search +the user must call `fulltext(title, 'gaming laptop')`. The two are +semantically different; do not collapse them. + +## `OR` inside `fulltext()` is case-sensitive + +`fulltext(title, 'a OR b')` triggers a union (`@title:(a|b)`). +`fulltext(title, 'a or b')` is treated as a literal three-word AND search +(`@title:(a or b)`). This matches RediSearch's own grammar and is documented; +do not silently normalize the case. + +## `IS NULL` requires Redis 7.4+ AND `INDEXMISSING` + +`WHERE email IS NULL` translates to `ismissing(@email)`. If the field was +not declared with `INDEXMISSING` at index creation time, Redis returns a +syntax error. The executor catches this case and rewraps the +`redis.ResponseError` with a hint about Redis 7.4 + `INDEXMISSING`. If the +catch is breaking, check that the wrap heuristic still matches new +RediSearch error messages. + +## `Translator.translate(sql)` re-parses even if you already parsed + +`AsyncExecutor` deliberately calls `parse(sql)` first to extract the index +name, then calls `translate_parsed(parsed)` to avoid double-parsing. If you +add a new code path, prefer `translate_parsed` when you already have a +`ParsedQuery`. Calling `translate(sql)` from a code path that already parsed +is wasteful but not incorrect. + +## `AsyncSchemaRegistry.invalidate()` cancels in-flight loads + +Cancelling the in-flight `FT.INFO` is intentional: it prevents a +post-invalidate stale write into the cache. The shielded `await` in +`ensure_schema()` returns the current cache state when the underlying task +is cancelled, so other awaiters do not propagate `CancelledError`. If you +"simplify" by removing the shield, you reintroduce a race. See +{doc}`/concepts/async-invariants`. + +## Lazy schema-load failures are deferred + +The default `schema_cache_strategy="lazy"` means a missing index does not +fail at `create_executor()` time. It fails at the first `execute()` call +that touches the index. This is intentional. If you need fail-fast at +startup, pass `schema_cache_strategy="load_all"`. Do not change the default. + +## `score()` plus aggregation raises ValueError + +This is not a bug. `score()` requires `WITHSCORES`, which is `FT.SEARCH` +only. Anything that forces `FT.AGGREGATE` (aggregations, GROUP BY, computed +fields, date functions, geo > / >= / BETWEEN, HAVING) cannot coexist with +`score()`. The translator surfaces the conflict explicitly rather than +silently dropping one side. See {doc}`/concepts/search-vs-aggregate`. + +## `OR` plus geo > / >= / BETWEEN raises ValueError + +Same family as above. Greater-than-distance is implemented as a top-level +`FILTER` clause, which is ANDed with the rest of the query. Combining with +SQL-level `OR` would silently change semantics, so it is rejected. Same for +date-function predicates combined with `OR`. + +## Coverage gate failures are not flakes + +If CI reports coverage below 100%, do not retry. The failure is real. Either +add a test or delete the unreachable branch. The project explicitly forbids +`# pragma: no cover` (see {doc}`/concepts/testing-philosophy`). diff --git a/docs/for-ais-only/REPOSITORY_MAP.md b/docs/for-ais-only/REPOSITORY_MAP.md new file mode 100644 index 0000000..cbae43e --- /dev/null +++ b/docs/for-ais-only/REPOSITORY_MAP.md @@ -0,0 +1,94 @@ +# Repository Map + +A module-by-module guide to the sql-redis source tree, written for an agent +that needs to change something and wants to know where to look. + +## Source layout + +``` +sql_redis/ + __init__.py Public API exports. The 11 symbols here are the contract. + version.py __version__ string. + parser.py SQL string → ParsedQuery dataclass. Wraps sqlglot. + Owns: Condition, GeoDistanceCondition, ParsedQuery, + SQLParser, SQL_TO_REDIS_DATE_FUNCTIONS, + parse_date_to_timestamp. + analyzer.py ParsedQuery + SchemaRegistry → AnalyzedQuery. + Classifies each WHERE condition by field type so the + query builder knows whether to emit @field:term, + @field:[min max], @field:{value}, etc. + schema.py SchemaRegistry, AsyncSchemaRegistry. Caches FT.INFO + output. Lazy by default, eager via load_all(). + query_builder.py Per-field-type RediSearch syntax emission. + Stateless. Reads AnalyzedQuery, returns a query string. + translator.py Translator, TranslatedQuery. Orchestrates parser → + analyzer → query builder. Decides FT.SEARCH vs + FT.AGGREGATE; emits a TranslatedQuery (command, index, + query_string, args, score_alias). + executor.py Executor, AsyncExecutor, factories, QueryResult, + SchemaCacheStrategy. The only module that talks to + Redis at query time. +``` + +## Test layout + +``` +tests/ + test_parser.py sqlglot wrapping; pure logic, no Redis. + test_analyzer.py Field-type classification. + test_schema.py Sync registry: lazy, eager, invalidate. + test_async_schema.py Async registry: ensure_schema, cancellation, + coalescing, shielded loads. + test_query_builder.py Per-type syntax emission. + test_translator.py Pipeline orchestration. Uses fakes for the + schema registry. + test_executor.py End-to-end with testcontainers Redis. + test_async_executor.py Async equivalent. + test_sql_queries.py Integration: SQL → executor → real rows. + Mirrored by test_redis_queries.py which + runs the equivalent FT.AGGREGATE directly. + Both must produce identical rows. + test_redis_queries.py See above. + test_parameter_substitution.py The 12 TDD tests for token-based substitution. +``` + +## Where features live + +| SQL feature | Module(s) | +|---|---| +| `SELECT field, expr AS alias` | `parser.py` (extraction), `query_builder.py` (RETURN/LOAD) | +| `WHERE` operators (`=`, `<`, `BETWEEN`, `IN`) | `parser.py`, `analyzer.py`, `query_builder.py` | +| Boolean `AND`/`OR`/`NOT` | `query_builder.py` | +| Aggregations (`COUNT`, `SUM`, etc.) | `query_builder.py` (FT.AGGREGATE branch in `translator.py`) | +| `GROUP BY` | `query_builder.py` (GROUPBY) | +| `ORDER BY`, `LIMIT`, `OFFSET` | `query_builder.py` | +| Vector KNN (`vector_distance`) | `parser.py`, `query_builder.py` (KNN clause), `executor.py` (`$vector` byte injection) | +| Full-text (`fulltext`, `fuzzy`, `LIKE`) | `parser.py`, `query_builder.py` | +| GEO (`geo_distance`, `POINT`) | `parser.py::GeoDistanceCondition`, `query_builder.py` | +| Date functions (`YEAR`, `MONTH`, `DATE_FORMAT`) | `parser.py::SQL_TO_REDIS_DATE_FUNCTIONS` | +| `IS NULL`, `IS NOT NULL`, `exists()` | `parser.py`, `query_builder.py` | +| Parameter substitution | `executor.py::_substitute_params` | +| FT.SEARCH vs FT.AGGREGATE branching | `translator.py::translate_parsed` (the `use_aggregate` boolean) | +| Result-row parsing | `executor.py::Executor.execute` (the four parsing branches: WITHSCORES + RETURN 0, WITHSCORES, plain SEARCH, AGGREGATE) | + +## What to read before changing X + +- **Parser changes.** Read `parser.py` end to end. Then look at how + `translator.py::translate_parsed` consumes the result so you do not break + the `AsyncExecutor` path that reuses a pre-parsed query. +- **Analyzer/query-builder changes.** Read `analyzer.py` and the matching + branch in `query_builder.py`. The two are tightly coupled: a new field type + in the analyzer needs a corresponding emitter in the builder. +- **Executor changes.** The sync and async executors share `_ScoreParseMixin` + and the result-parsing branches. If you change the response shape, both + need updates. +- **Schema registry changes.** The two registry classes do not share code, + but they share semantics. Lazy loading, invalidation, and load coalescing + must be consistent across sync and async. + +## What is intentionally not exported + +`Analyzer`, `QueryBuilder`, `SQLParser`, `ParsedQuery`, `Condition`, +`AnalyzedQuery` are internal. Tests import them, but they are not part of the +public contract. If a user imports them, they are on their own when we +refactor. diff --git a/docs/for-ais-only/index.md b/docs/for-ais-only/index.md new file mode 100644 index 0000000..8c0861e --- /dev/null +++ b/docs/for-ais-only/index.md @@ -0,0 +1,53 @@ +--- +myst: + html_meta: + "description lang=en": | + Internal AI-agent guide to the sql-redis source tree. Repo map, build, test, and failure modes. +--- + +# For AI Agents Modifying sql-redis + +This section is the internal counterpart to the user-facing +[AGENTS.md](https://github.com/redis-applied-ai/sql-redis/blob/main/AGENTS.md). +It exists for an agent that has been asked to *change* the library: add a new +SQL feature, fix a parser bug, extend the schema registry. + +```{toctree} +:maxdepth: 1 + +REPOSITORY_MAP +BUILD_AND_TEST +FAILURE_MODES +``` + +## Decision tree + +Use this to find the right starting point fast. + +| Task | Read first | +|---|---| +| Add a new SQL clause or operator | [Repository map](REPOSITORY_MAP.md), then `parser.py`, then `analyzer.py`, then `query_builder.py` | +| Add a new field type (e.g., a future GEO variant) | [Repository map](REPOSITORY_MAP.md), then `analyzer.py` and `query_builder.py` | +| Fix a translation bug | [Repository map](REPOSITORY_MAP.md), then `translator.py`. Run integration tests under `tests/test_sql_queries.py`. | +| Change parameter substitution | {doc}`/concepts/parameter-substitution`, then `executor.py::_substitute_params` | +| Change schema loading semantics | {doc}`/concepts/schema-aware-translation`, then `schema.py` | +| Change async behavior | {doc}`/concepts/async-invariants`, then `executor.py::AsyncExecutor` and `schema.py::AsyncSchemaRegistry` | +| Change the FT.SEARCH/FT.AGGREGATE branching | {doc}`/concepts/search-vs-aggregate`, then `translator.py::translate_parsed` | +| Change the result-row shape | {doc}`/concepts/result-shape`, then `executor.py` (`Executor.execute` parsing branches) | +| Run tests | [Build and test](BUILD_AND_TEST.md) | +| Diagnose "this looks broken" | [Failure modes](FAILURE_MODES.md) before assuming a bug | + +## Project invariants the agent should preserve + +1. **No mocks for Redis in integration tests.** Real `testcontainers[redis]` + only. See {doc}`/concepts/testing-philosophy`. +2. **100% line coverage is enforced in CI.** No `# pragma: no cover`. If a + branch can't be tested, it shouldn't exist. +3. **Public API is what `sql_redis/__init__.py` exports.** Anything else is + internal and can change without notice. The autoclass-driven reference at + `docs/api/` is the contract. +4. **Docstrings are the single source of truth for the API reference.** + Sphinx `autoclass` reads them directly. If you change a method signature, + update the docstring in the same change. +5. **No emdashes or `--` in prose.** Stylistic rule from the project's + [`CLAUDE.md`](https://github.com/redis-applied-ai/sql-redis/blob/main/CLAUDE.md). diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..e1923ed --- /dev/null +++ b/docs/index.md @@ -0,0 +1,93 @@ +--- +myst: + html_meta: + "description lang=en": | + sql-redis documentation. SQL to Redis FT.SEARCH and FT.AGGREGATE translator. +--- + +# sql-redis + +```{admonition} Status: Experimental +:class: warning + +sql-redis is part of the [Redis AI Hub](https://redis.io/ai-hub/) under the +**Experimental** tier. The Python API can change between minor releases. The +project is validating its design and SQL surface in real use; we welcome bug +reports and feedback at the [issue tracker](https://github.com/redis-applied-ai/sql-redis/issues). +``` + +Query Redis collections with familiar SQL on top of RediSearch and RedisVL indexes. sql-redis converts SQL `SELECT` statements into Redis `FT.SEARCH` and `FT.AGGREGATE` commands, looking up index schemas via `FT.INFO` so the translation respects the underlying field types. + +## Quick Start + +```bash +pip install sql-redis +``` + +```bash +docker run -d --name redis -p 6379:6379 redis:8.4 +``` + +→ *{doc}`user_guide/getting-started`* + +--- + +## Explore the Docs + +::::{grid} 2 +:gutter: 4 + +:::{grid-item-card} 📖 Concepts +:link: concepts/index +:link-type: doc +:class-card: sd-shadow-sm + +Understand how sql-redis works. Architecture, design decisions, and the why behind every layer. +::: + +:::{grid-item-card} 🚀 User Guide +:link: user_guide/index +:link-type: doc +:class-card: sd-shadow-sm + +Step by step. Installation, first query, and task-oriented recipes for every feature. +::: + +:::{grid-item-card} 💡 Examples +:link: examples/index +:link-type: doc +:class-card: sd-shadow-sm + +Worked examples and patterns built on the sql-redis primitives. +::: + +:::{grid-item-card} 📚 API Reference +:link: api/index +:link-type: doc +:class-card: sd-shadow-sm + +Every public class, method, and parameter, generated from docstrings. +::: + +:::: + +## For AI agents + +If you are an AI agent reading these docs, start with +[`AGENTS.md`](https://github.com/redis-applied-ai/sql-redis/blob/main/AGENTS.md) +at the repo root for a usage-oriented quick reference, or +{doc}`for-ais-only/index` for an internal map of the source tree. A flat +[`llms.txt`](https://github.com/redis-applied-ai/sql-redis/blob/main/docs/llms.txt) +index of every doc page is also available. + +```{toctree} +:maxdepth: 2 +:hidden: + +Concepts +User Guide +Examples +API +For AI Agents +Changelog +``` diff --git a/docs/llms.txt b/docs/llms.txt new file mode 100644 index 0000000..4345be5 --- /dev/null +++ b/docs/llms.txt @@ -0,0 +1,51 @@ +# sql-redis + +> Experimental SQL to Redis FT.SEARCH and FT.AGGREGATE translator. Accepts a SQL SELECT string, looks up index schemas via FT.INFO, emits the matching Redis search command, parses the reply into rows. + +For agents using the library, read [AGENTS.md](../AGENTS.md) first. For the full API surface, read [docs/api/](api/). For task-oriented recipes, read [docs/user_guide/how_to_guides/](user_guide/how_to_guides/). + +## Concepts + +- [Architecture](concepts/architecture.md): The two top-level objects (Executor, SchemaRegistry) and the layered translator they contain. +- [Why SQL?](concepts/why-sql.md): Interface choice. SQL versus pandas-style DSL versus a builder API. +- [Why sqlglot?](concepts/why-sqlglot.md): Parser choice. sqlglot versus a hand-rolled recursive-descent parser. +- [Schema-aware translation](concepts/schema-aware-translation.md): Why field types matter, how the registry caches them, lazy versus eager loading, async coalescing. +- [FT.SEARCH versus FT.AGGREGATE](concepts/search-vs-aggregate.md): Which Redis command runs for a given SQL, which feature combinations are illegal, and why. +- [Parameter substitution](concepts/parameter-substitution.md): The token-based substitution algorithm, the two bugs it fixes, and why it is not parser-based. +- [Vector substitution](concepts/vector-substitution.md): Why bytes parameters take a different path. Two-stage substitution that keeps vectors out of the SQL string. +- [Async invariants](concepts/async-invariants.md): The three guarantees the async path provides: coalesced loads, shielded reads, invalidate cancels in-flight. +- [Result shape](concepts/result-shape.md): What QueryResult.rows actually contains, why it varies with the command, scoring, and client decoding. +- [Testing philosophy](concepts/testing-philosophy.md): TDD, 100% coverage, why integration tests use real Redis instead of mocks. + +## User guide + +- [Installation](user_guide/installation.md): pip install, Redis container setup, optional development setup. +- [Getting started](user_guide/getting-started.md): End-to-end first query in five minutes. Index creation through result iteration with expected output. + +## How-to guides + +- [Use parameters](user_guide/how_to_guides/use-parameters.md): Token-based substitution rules, type handling, vector bytes. +- [Vector and hybrid search](user_guide/how_to_guides/vector-search.md): KNN, filter-then-vector, scoring. +- [Full-text search](user_guide/how_to_guides/text-search.md): Phrase, tokenized AND, OR, fuzzy, prefix/suffix/contains, proximity, optional terms, scoring, stopwords. +- [GEO queries](user_guide/how_to_guides/geo-queries.md): POINT(lon, lat), units, all operators, distance in SELECT. +- [Date queries](user_guide/how_to_guides/date-queries.md): ISO 8601 literal conversion, YEAR/MONTH/DAY functions, limitations. +- [Missing fields](user_guide/how_to_guides/missing-fields.md): IS NULL / IS NOT NULL (Redis 7.4+, INDEXMISSING) and the exists() function. +- [Lazy versus eager schemas](user_guide/how_to_guides/lazy-vs-eager-schemas.md): SchemaCacheStrategy choices, invalidation, polling for index changes. +- [Async usage](user_guide/how_to_guides/async-usage.md): AsyncExecutor, AsyncSchemaRegistry.ensure_schema, cancellation safety. + +## API reference + +- [Translator](api/translator.rst): Translator and TranslatedQuery. Turns SQL strings into Redis command lists without executing. +- [Schema registries](api/schema.rst): SchemaRegistry (sync) and AsyncSchemaRegistry. Caches FT.INFO output; supports lazy load, invalidation, change polling. +- [Executor](api/executor.rst): Executor, AsyncExecutor, create_executor, create_async_executor, QueryResult, SchemaCacheStrategy, __version__. +- [SQL syntax catalog](api/sql-syntax.md): Every supported clause, operator, and function with its RediSearch translation. + +## Examples + +- [Examples index](examples/index.md): Pointers to applied scenarios in the how-to guides. + +## For agents modifying sql-redis + +- [Repository map](for-ais-only/REPOSITORY_MAP.md): Module-by-module description of the source tree. +- [Build and test](for-ais-only/BUILD_AND_TEST.md): Make targets, test layout, coverage policy. +- [Failure modes](for-ais-only/FAILURE_MODES.md): Things that look like bugs but are intentional design choices. diff --git a/docs/parameter-substitution.md b/docs/parameter-substitution.md deleted file mode 100644 index 3b45ea3..0000000 --- a/docs/parameter-substitution.md +++ /dev/null @@ -1,373 +0,0 @@ -# Parameter Substitution Design Document - -## Table of Contents -- [Problem Statement](#problem-statement) -- [Approaches Considered](#approaches-considered) -- [Decision](#decision) -- [Implementation Details](#implementation-details) -- [Known Limitations](#known-limitations) -- [Test Coverage](#test-coverage) -- [References](#references) - ---- - -## Problem Statement - -Through Test-Driven Development (TDD) investigation, two critical bugs were discovered in the original parameter substitution implementation: - -### Bug 1: Quote Escaping Bug (CRITICAL) - -**Issue**: Single quotes in string parameters were not being escaped, causing SQL parsing errors. - -**Example**: -```python -# User input -sql = "SELECT * FROM users WHERE name = :name" -params = {"name": "O'Brien"} - -# BUGGY OUTPUT (original implementation) -# SELECT * FROM users WHERE name = 'O'Brien' -# ^ Unescaped quote breaks SQL parsing - -# CORRECT OUTPUT (fixed implementation) -# SELECT * FROM users WHERE name = 'O''Brien' -# ^^ Properly escaped -``` - -**Impact**: Any user with an apostrophe in their name (O'Brien, McDonald's, etc.) would cause query failures. - -### Bug 2: Partial Matching Bug - -**Issue**: Using naive `str.replace()` caused `:id` to incorrectly match inside `:product_id`. - -**Example**: -```python -# User input -sql = "SELECT * FROM products WHERE id = :id AND product_id = :product_id" -params = {"id": 123, "product_id": 456} - -# BUGGY OUTPUT (using str.replace(':id', '123')) -# SELECT * FROM products WHERE 123 = 123 AND product_123 = :product_id -# ^^^ ^^^ -# Correct WRONG! Partial match corrupted :product_id - -# CORRECT OUTPUT (token-based approach) -# SELECT * FROM products WHERE id = 123 AND product_id = 456 -``` - -**Impact**: Queries with similar parameter names would produce incorrect results or fail. - ---- - -## Approaches Considered - -### 1. Naive `str.replace()` (Original Implementation) - -```python -def _substitute_params(self, sql: str, params: dict[str, Any]) -> str: - for key, value in params.items(): - sql = sql.replace(f":{key}", str(value)) - return sql -``` - -**Pros**: -- Simple, 3 lines of code -- No dependencies - -**Cons**: -- ❌ Partial matching bug (`:id` matches inside `:product_id`) -- ❌ No quote escaping -- ❌ Order-dependent (Python 3.7+ dict ordering masks some issues) - -**Verdict**: ❌ **Rejected** - Has both critical bugs - -### 2. Token-based Approach (Chosen) - -```python -def _substitute_params(self, sql: str, params: dict[str, Any]) -> str: - tokens = re.split(r"(:[a-zA-Z_][a-zA-Z0-9_]*)", sql) - result = [] - for token in tokens: - if token.startswith(":"): - key = token[1:] - if key in params: - value = params[key] - if isinstance(value, str): - escaped = value.replace("'", "''") - result.append(f"'{escaped}'") - else: - result.append(str(value)) - else: - result.append(token) - else: - result.append(token) - return "".join(result) -``` - -**Pros**: -- ✅ Fixes both bugs (partial matching + quote escaping) -- ✅ Simple, ~30 lines of code -- ✅ Only uses stdlib `re` module -- ✅ Fast (single regex split + join) -- ✅ Easy to understand and maintain - -**Cons**: -- ⚠️ Theoretical limitation: colons in string literals (see [Known Limitations](#known-limitations)) - -**Verdict**: ✅ **CHOSEN** - Best balance of simplicity, correctness, and performance - -### 3. sqlglot Parse-Aware Approach - -```python -def _substitute_params(self, sql: str, params: dict[str, Any]) -> str: - parsed = sqlglot.parse_one(sql) - converted_params = { - k: exp.Literal.string(v) if isinstance(v, str) else exp.Literal.number(v) - for k, v in params.items() - } - substituted = exp.replace_placeholders(parsed, **converted_params) - return substituted.sql() -``` - -**Pros**: -- ✅ Theoretically handles colons in string literals correctly -- ✅ Fixes both bugs - -**Cons**: -- ❌ Requires external `sqlglot` dependency -- ❌ Complex (~60 lines with error handling) -- ❌ Slower (parse → transform → generate) -- ❌ Can fail on invalid SQL (needs try/except) -- ❌ Over-engineering: theoretical advantage doesn't apply in practice - -**Verdict**: ❌ **Rejected** - Over-engineered for the problem - ---- - -## Decision - -**We chose the token-based approach** for the following reasons: - -1. **Simplicity**: ~30 lines of clear, maintainable code -2. **No Dependencies**: Uses only Python stdlib `re` module -3. **Performance**: Single regex split + string join (no parsing overhead) -4. **Correctness**: Fixes both critical bugs discovered through TDD -5. **Proven**: Already implemented and tested in redis-vl-python -6. **Practical**: The theoretical advantage of sqlglot (handling colons in string literals) doesn't apply because: - - Users pass values via parameters, not hardcoded in SQL - - The translator has its own handling of string literals - - No real-world use cases have been identified - ---- - -## Implementation Details - -### How It Works - -The implementation uses a regex-based tokenization approach: - -```python -# Step 1: Split SQL on parameter patterns, keeping delimiters -tokens = re.split(r"(:[a-zA-Z_][a-zA-Z0-9_]*)", sql) - -# Example: -# Input: "SELECT * FROM users WHERE id = :id AND name = :name" -# Output: ["SELECT * FROM users WHERE id = ", ":id", " AND name = ", ":name", ""] -``` - -### Regex Pattern Breakdown - -``` -(:[a-zA-Z_][a-zA-Z0-9_]*) - ^ ^ ^ - | | | - | | +-- Zero or more alphanumeric or underscore - | +---------------- First char: letter or underscore - +-------------------------- Starts with colon -``` - -This pattern ensures: -- `:id` and `:product_id` are treated as separate tokens (prevents partial matching) -- Only valid identifiers are matched (`:123` is not a valid parameter) -- Parentheses capture the delimiter, keeping it in the split result - -### Quote Escaping - -String values are escaped using the SQL standard: - -```python -# SQL standard: single quote -> double single quote -escaped = value.replace("'", "''") -result.append(f"'{escaped}'") -``` - -**Examples**: -- `"O'Brien"` → `'O''Brien'` -- `"It's a test"` → `'It''s a test'` -- `"McDonald's"` → `'McDonald''s'` - -### Type Handling - -```python -if isinstance(value, (int, float)): - result.append(str(value)) # 123 → "123" -elif isinstance(value, str): - escaped = value.replace("'", "''") - result.append(f"'{escaped}'") # "test" → "'test'" -else: - result.append(token) # Keep placeholder (e.g., bytes for vectors) -``` - ---- - -## Known Limitations - -### 1. Colons in String Literals (Theoretical Edge Case) - -**Issue**: SQL with hardcoded string literals containing colons might be incorrectly parsed. - -**Example**: -```python -sql = "SELECT * FROM users WHERE email = 'admin:test@example.com' AND id = :id" -params = {"id": 123} - -# The regex would match :test inside the string literal -# However, this is NOT a practical issue because: -``` - -**Why This Doesn't Matter**: - -1. **Users don't write SQL this way**: In practice, users pass values via parameters: - ```python - # Real-world usage - sql = "SELECT * FROM users WHERE email = :email AND id = :id" - params = {"email": "admin:test@example.com", "id": 123} - ``` - -2. **Translator handles string literals**: The translator has its own processing that would handle this case differently anyway. - -3. **No real-world use cases**: After extensive testing and review of the codebase, no instances of this pattern were found. - -4. **TDD validation**: All 12 parameter substitution tests pass, covering real-world scenarios. - -### 2. Parameter Name Case Sensitivity - -Parameter names are case-sensitive: -```python -sql = "SELECT * FROM users WHERE id = :id" -params = {"ID": 123} # Won't match - :id != :ID -``` - -This is **expected behavior** and follows SQL conventions. - -### 3. Unsupported Types - -Only `int`, `float`, and `str` types are substituted. Other types keep their placeholder: -- `None` → placeholder kept (not converted to `NULL`) -- `bool` → placeholder kept (not converted to `TRUE`/`FALSE`) -- `list` → placeholder kept (not converted to `IN` clause) -- `bytes` → placeholder kept (handled separately for vector search) - -This is **intentional** - complex types are handled elsewhere in the pipeline. - ---- - -## Test Coverage - -### Test Files - -**`tests/test_parameter_substitution.py`** (12 tests) -- Comprehensive test suite validating the bug fixes -- Written using TDD methodology (tests written first, then implementation) -- All tests pass, demonstrating correctness of the implementation - -### Test Results - -``` -✅ 12/12 parameter substitution tests PASS -✅ 235/235 total tests PASS -``` - -### Test Categories - -#### 1. Partial Matching Bug Tests (3 tests) -```python -def test_similar_param_names_no_partial_match(): - """Verify :id doesn't match inside :product_id""" - sql = "SELECT * FROM products WHERE id = :id AND product_id = :product_id" - params = {"id": 1, "product_id": 100} - # ✅ PASSES - Both parameters substituted correctly -``` - -#### 2. Quote Escaping Bug Tests (3 tests) -```python -def test_single_quote_in_value(): - """Verify single quotes are properly escaped""" - sql = "SELECT * FROM users WHERE name = :name" - params = {"name": "O'Brien"} - # ✅ PASSES - Quote escaped as O''Brien -``` - -#### 3. Edge Cases (6 tests) -- Multiple occurrences of same parameter -- Empty string values -- Numeric types (int, float) -- Special characters in values -- Parameter at start/end of SQL -- Very long strings - -All edge case tests **PASS**, demonstrating robustness. - ---- - -## References - -### Related Files - -- **Implementation**: `sql_redis/executor.py` (lines 32-108) -- **Tests**: `tests/test_parameter_substitution.py` - -### External References - -- **redis-vl-python**: Original implementation in `redisvl/query/sql.py` (commit 2f118f7) -- **SQL Standard**: Single quote escaping using double single quote (`''`) -- **Python re module**: https://docs.python.org/3/library/re.html - -### Design Process - -This implementation was developed using **Test-Driven Development (TDD)**: - -1. **Discovery**: Feature audit revealed potential bugs -2. **Test Creation**: Wrote 12 failing tests demonstrating the bugs -3. **Implementation**: Implemented token-based fix -4. **Validation**: All tests pass, no regressions -5. **Documentation**: This document - -### Maintenance Notes - -**For Future Maintainers**: - -- If you need to modify parameter substitution, **write tests first** (TDD) -- The regex pattern is critical - don't change it without understanding the partial matching bug -- Quote escaping must use SQL standard (`''`), not backslash escaping (`\'`) -- Consider performance: this code runs on every query execution -- If adding support for new types, update the type handling section in `_substitute_params()` - -**Common Questions**: - -Q: Why not use a SQL parser like sqlglot? -A: Over-engineering. The token-based approach is simpler, faster, and solves the real bugs. - -Q: What about colons in string literals? -A: Not a practical issue. See [Known Limitations](#known-limitations) section. - -Q: Can we support None/bool/list types? -A: Possible, but intentionally not done. These are handled elsewhere in the pipeline. - ---- - -**Last Updated**: 2026-02-06 -**Author**: TDD Investigation & Implementation -**Status**: Production-Ready ✅ - - diff --git a/docs/user_guide/getting-started.md b/docs/user_guide/getting-started.md new file mode 100644 index 0000000..fc0010d --- /dev/null +++ b/docs/user_guide/getting-started.md @@ -0,0 +1,98 @@ +# Getting Started + +By the end of this walkthrough you will have run a SQL `SELECT` against a Redis index and printed three rows back to your terminal. Allow about five minutes. + +## Prerequisites + +Before you start, confirm both: + +- Redis 8.x is running locally on port 6379. See {doc}`installation`. +- `sql-redis` is installed in the current Python environment (`pip install sql-redis`). + +If both are true, the following Python snippet should print a version string: + +```python +from sql_redis import __version__ +print(__version__) +``` + +You should see something like `0.4.0`. + +## 1. Create an index in Redis + +sql-redis queries existing indexes; it never creates them. Define one with `FT.CREATE` first. From a fresh Python session: + +```python +from redis import Redis + +client = Redis() + +client.execute_command( + "FT.CREATE", "products", + "ON", "HASH", + "PREFIX", "1", "product:", + "SCHEMA", + "title", "TEXT", + "category", "TAG", + "price", "NUMERIC", "SORTABLE", +) +``` + +You should see `b'OK'` printed (or no output, depending on your shell). If you see `Index already exists`, that is fine: it means a previous run created it. + +## 2. Load three documents + +```python +client.hset("product:1", mapping={"title": "gaming laptop", "category": "electronics", "price": 1499}) +client.hset("product:2", mapping={"title": "mechanical keyboard", "category": "electronics", "price": 129}) +client.hset("product:3", mapping={"title": "ergonomic chair", "category": "furniture", "price": 349}) +``` + +Each call returns `3` (the number of fields written). Redis indexes the documents automatically because they match the `product:` prefix. + +## 3. Build an executor + +```python +from sql_redis import create_executor + +executor = create_executor(client) +``` + +`create_executor` constructs a schema registry for you and uses lazy schema loading: no `FT.INFO` call is made yet. + +## 4. Run a query + +```python +result = executor.execute(""" + SELECT title, price + FROM products + WHERE category = 'electronics' AND price < 500 + ORDER BY price ASC +""") + +for row in result.rows: + print(row[b"title"], row[b"price"]) +``` + +You should see exactly one line: + +``` +b'mechanical keyboard' b'129' +``` + +The other two rows are filtered out: the laptop is over the price cap, the chair is in the wrong category. Field keys come back as bytes because the default `Redis()` client does not decode responses; if you want strings, construct it as `Redis(decode_responses=True)` and the snippet will print `mechanical keyboard 129` instead. + +`result.count` is `1` here, the total match count from Redis. + +## You are done + +You created an index, loaded data, ran a SQL query, and got rows back. Everything else in the docs builds on those four steps. + +## Where next + +- Inject runtime values into a query: {doc}`how_to_guides/use-parameters`. +- Search a `TEXT` field beyond simple equality: {doc}`how_to_guides/text-search`. +- Find the K most similar items to a query embedding: {doc}`how_to_guides/vector-search`. +- Filter by geographic distance: {doc}`how_to_guides/geo-queries`. +- Use the async API: {doc}`how_to_guides/async-usage`. +- The full SQL surface: {doc}`/api/sql-syntax`. diff --git a/docs/user_guide/how_to_guides/async-usage.md b/docs/user_guide/how_to_guides/async-usage.md new file mode 100644 index 0000000..47681ff --- /dev/null +++ b/docs/user_guide/how_to_guides/async-usage.md @@ -0,0 +1,60 @@ +# Use the async API + +You want to run queries from async code (FastAPI, ASGI, an asyncio worker). + +## Construct an async executor + +```python +import redis.asyncio as redis_async +from sql_redis import create_async_executor + +client = redis_async.Redis(host="localhost", port=6379) + +executor = await create_async_executor(client) +``` + +Like the sync factory, `create_async_executor` defaults to lazy schema loading. Pass `schema_cache_strategy="load_all"` to load every index at construction. + +## Execute + +```python +result = await executor.execute( + "SELECT title FROM products WHERE category = :cat LIMIT 10", + params={"cat": "electronics"}, +) + +for row in result.rows: + print(row) +``` + +`result` is the same `QueryResult` as the sync API. + +## Lazy loading semantics + +`AsyncSchemaRegistry.ensure_schema(index)` is the async equivalent of the sync lazy path. The first call issues one `FT.INFO`. Concurrent calls for the same index share the single in-flight request, so a burst of requests for a freshly seen index does not turn into a thundering herd. + +If you cancel an `await executor.execute(...)` (for example via `asyncio.wait_for` timeout), the underlying schema load is shielded so other awaiters keep their result. + +## Invalidating + +Same shape as sync: + +```python +executor._schema_registry.invalidate("products") +executor._schema_registry.invalidate() +``` + +`invalidate()` cancels in-flight schema loads as well, so a stale read cannot land after invalidation. + +## Constructing manually + +If you need an explicit registry (for sharing across executors, for example): + +```python +from sql_redis import AsyncExecutor, AsyncSchemaRegistry + +registry = AsyncSchemaRegistry(client) +executor = AsyncExecutor(client, registry) +``` + +`AsyncSchemaRegistry.load_all()` is async; call it explicitly if you want eager loading without going through the factory. diff --git a/docs/user_guide/how_to_guides/date-queries.md b/docs/user_guide/how_to_guides/date-queries.md new file mode 100644 index 0000000..c50116a --- /dev/null +++ b/docs/user_guide/how_to_guides/date-queries.md @@ -0,0 +1,64 @@ +# Date and datetime queries + +You want to filter on dates, even though Redis has no native `DATE` type. + +## How dates are stored + +Dates are stored as `NUMERIC` fields containing Unix timestamps. sql-redis converts ISO 8601 string literals to timestamps automatically. + +```sql +-- '2024-01-01' becomes 1704067200 +SELECT * FROM events WHERE created_at > '2024-01-01' + +-- Datetime with time +SELECT * FROM events WHERE created_at > '2024-01-01T12:00:00' + +-- Range +SELECT * FROM events WHERE created_at BETWEEN '2024-01-01' AND '2024-01-31' +``` + +## Supported literal formats + +- Date: `'2024-01-01'` (interpreted as midnight UTC) +- Datetime: `'2024-01-01T12:00:00'` or `'2024-01-01 12:00:00'` +- Datetime with timezone: `'2024-01-01T12:00:00Z'`, `'2024-01-01T12:00:00+00:00'` +- Raw timestamp: `1704067200` + +Timezone-naive literals are interpreted as UTC. + +## Date functions + +Extract date parts using SQL functions that map to Redis `APPLY`: + +| SQL function | Redis function | Description | +|---|---|---| +| `YEAR(field)` | `year(@field)` | Extract year | +| `MONTH(field)` | `monthofyear(@field)` | Month, **0-11** | +| `DAY(field)` | `dayofmonth(@field)` | Day of month, 1-31 | +| `HOUR(field)` | `hour(@field)` | Round to hour | +| `MINUTE(field)` | `minute(@field)` | Round to minute | +| `DAYOFWEEK(field)` | `dayofweek(@field)` | **0 = Sunday** | +| `DAYOFYEAR(field)` | `dayofyear(@field)` | 0-365 | +| `DATE_FORMAT(field, fmt)` | `timefmt(@field, fmt)` | Format timestamp | + +## Examples + +```sql +-- Extract parts +SELECT name, YEAR(created_at) AS y, MONTH(created_at) AS m FROM events + +-- Filter by year +SELECT name FROM events WHERE YEAR(created_at) = 2024 + +-- Group by year +SELECT YEAR(created_at) AS year, COUNT(*) FROM events GROUP BY year + +-- Format +SELECT name, DATE_FORMAT(created_at, '%Y-%m-%d') AS date FROM events +``` + +## Limitations + +- `NOT YEAR(field) = 2024` raises `ValueError`. +- `DATE_FORMAT()` is `SELECT`-only. It is not supported in `WHERE`. +- Date functions combined with `OR` raise `ValueError`. diff --git a/docs/user_guide/how_to_guides/geo-queries.md b/docs/user_guide/how_to_guides/geo-queries.md new file mode 100644 index 0000000..4eab5ed --- /dev/null +++ b/docs/user_guide/how_to_guides/geo-queries.md @@ -0,0 +1,59 @@ +# GEO queries + +You want to filter or sort by geographic distance. + +## Coordinate order + +Use **longitude first**, matching Redis's native GEO format: + +```sql +-- San Francisco: lon=-122.4194, lat=37.7749 +SELECT name FROM stores +WHERE geo_distance(location, POINT(-122.4194, 37.7749)) < 5000 +``` + +The default unit is meters. + +## Units + +| Unit | Code | Example | +|---|---|---| +| Meters | `m` | `geo_distance(location, POINT(-122.4194, 37.7749)) < 5000` | +| Kilometers | `km` | `geo_distance(location, POINT(-122.4194, 37.7749), 'km') < 5` | +| Miles | `mi` | `geo_distance(location, POINT(-122.4194, 37.7749), 'mi') < 3` | +| Feet | `ft` | `geo_distance(location, POINT(-122.4194, 37.7749), 'ft') < 16400` | + +## Operators + +All comparison operators are supported: + +```sql +-- Less than (uses optimized GEOFILTER) +WHERE geo_distance(location, POINT(-122.4194, 37.7749)) < 5000 + +-- Less than or equal (uses optimized GEOFILTER) +WHERE geo_distance(location, POINT(-122.4194, 37.7749)) <= 5000 + +-- Greater than (uses FT.AGGREGATE with FILTER) +WHERE geo_distance(location, POINT(-122.4194, 37.7749)) > 100000 + +-- Between (uses FT.AGGREGATE with FILTER) +WHERE geo_distance(location, POINT(-122.4194, 37.7749), 'km') BETWEEN 10 AND 100 +``` + +## Distance in SELECT + +Compute the distance for every result: + +```sql +SELECT name, geo_distance(location, POINT(-122.4194, 37.7749)) AS distance +FROM stores +``` + +## Combine with other filters + +```sql +SELECT name FROM stores +WHERE category = 'retail' + AND geo_distance(location, POINT(-122.4194, 37.7749)) < 5000 +``` diff --git a/docs/user_guide/how_to_guides/index.md b/docs/user_guide/how_to_guides/index.md new file mode 100644 index 0000000..d7e5bda --- /dev/null +++ b/docs/user_guide/how_to_guides/index.md @@ -0,0 +1,16 @@ +# How-To Guides + +Task-oriented recipes. Each page assumes you have completed {doc}`/user_guide/getting-started` and answers a single "how do I" question. + +```{toctree} +:maxdepth: 1 + +use-parameters +vector-search +text-search +geo-queries +date-queries +missing-fields +lazy-vs-eager-schemas +async-usage +``` diff --git a/docs/user_guide/how_to_guides/lazy-vs-eager-schemas.md b/docs/user_guide/how_to_guides/lazy-vs-eager-schemas.md new file mode 100644 index 0000000..bc15940 --- /dev/null +++ b/docs/user_guide/how_to_guides/lazy-vs-eager-schemas.md @@ -0,0 +1,66 @@ +# Choose lazy or eager schema loading + +The schema registry can fetch index schemas in two ways. The right choice depends on how many indexes you query and how much you care about startup latency. + +## Lazy (default) + +Schemas are loaded the first time each index is referenced. + +```python +from sql_redis import create_executor + +executor = create_executor(client) +# No FT.INFO calls yet. + +executor.execute("SELECT * FROM products LIMIT 1") +# One FT.INFO("products") call now. + +executor.execute("SELECT * FROM products LIMIT 5") +# No additional FT.INFO call (cached). +``` + +Choose lazy when: + +- You only query a subset of indexes per process. +- You care about startup latency (web app cold start, serverless function). +- You construct executors in tests and do not want test setup to block on Redis. + +## Eager (`load_all`) + +Every index is loaded at construction time: + +```python +executor = create_executor(client, schema_cache_strategy="load_all") +# FT._LIST + one FT.INFO per index right now. +``` + +Choose eager when: + +- You want to fail fast on a missing index at startup, not at first query. +- Your process is short-lived and queries many indexes; the up-front cost is the same either way. +- You are running batch scripts where startup cost is irrelevant. + +## Invalidating cached schemas + +If you alter or drop an index, the cached schema goes stale: + +```python +executor._schema_registry.invalidate("products") # one index +executor._schema_registry.invalidate() # all +``` + +The next access re-fetches `FT.INFO`. + +## Watching for changes (sync only) + +The sync `SchemaRegistry` can poll for index creation and deletion: + +```python +registry = executor._schema_registry +registry.start_watching(on_change=lambda evt, idx: print(evt, idx)) + +# In a loop, periodically: +registry.process_pending_events() +``` + +RediSearch does not emit keyspace notifications for `FT.*` commands, so this is poll-based via `FT._LIST`. Call `process_pending_events()` from a background thread or a periodic task. diff --git a/docs/user_guide/how_to_guides/missing-fields.md b/docs/user_guide/how_to_guides/missing-fields.md new file mode 100644 index 0000000..eac4402 --- /dev/null +++ b/docs/user_guide/how_to_guides/missing-fields.md @@ -0,0 +1,37 @@ +# Check for missing fields + +You want to find documents where a field is absent or present. + +## `IS NULL` and `IS NOT NULL` + +```sql +-- Find users without an email +SELECT * FROM users WHERE email IS NULL + +-- Find users with an email +SELECT * FROM users WHERE email IS NOT NULL + +-- Combine with other filters +SELECT * FROM users WHERE category = 'eng' AND email IS NULL +``` + +`IS NULL` translates to `ismissing(@email)` and the negation to `-ismissing(@email)`. + +**Requires Redis 7.4+** (RediSearch 2.10+) and the field must be declared with `INDEXMISSING` in the index schema. A `UserWarning` is emitted at translation time as a reminder. + +## `exists()` for `SELECT` and `HAVING` + +`exists()` is a different mechanism. It runs through `FT.AGGREGATE` and `APPLY`, returning `1` or `0` per row rather than filtering them out. + +```sql +-- Add a 0/1 column +SELECT name, exists(email) AS has_email FROM users + +-- Filter via HAVING +SELECT name FROM users HAVING exists(email) = 1 + +-- Multiple checks +SELECT name, exists(email) AS has_email, exists(phone) AS has_phone FROM users +``` + +`exists()` does not require `INDEXMISSING` on the field, but it cannot be used in `WHERE`. Use `IS NULL` / `IS NOT NULL` to filter rows by presence. diff --git a/docs/user_guide/how_to_guides/text-search.md b/docs/user_guide/how_to_guides/text-search.md new file mode 100644 index 0000000..8907959 --- /dev/null +++ b/docs/user_guide/how_to_guides/text-search.md @@ -0,0 +1,153 @@ +# Full-text search + +You want to search a `TEXT` field beyond simple equality. Each section below answers one task. + +For the complete catalog of supported modes and their RediSearch translation, see {doc}`/api/sql-syntax`. + +## Match an exact phrase + +```python +executor.execute( + "SELECT * FROM products WHERE title = :phrase", + params={"phrase": "gaming laptop"}, +) +``` + +`=` on a `TEXT` field is **exact phrase match**, not tokenized AND. Redis sees `@title:"gaming laptop"`. Stopwords like `of` are stripped automatically with a `UserWarning`; see "Keep stopwords in matches" below. + +## Match all of several words, in any order + +Use `fulltext()` for tokenized AND search: + +```python +executor.execute( + "SELECT * FROM products WHERE fulltext(title, :terms)", + params={"terms": "gaming laptop"}, +) +``` + +Redis sees `@title:(gaming laptop)`. A title containing both words in either order matches. + +## Match either of several words + +Use uppercase `OR` inside `fulltext()`: + +```python +executor.execute( + "SELECT * FROM products WHERE fulltext(title, 'laptop OR tablet OR phone')", +) +``` + +The `OR` must be uppercase. Lowercase `or` is treated as a literal third word. + +## Match across multiple fields + +Use `OR` between two `fulltext()` calls: + +```python +executor.execute(""" + SELECT * FROM products + WHERE fulltext(title, 'laptop') OR fulltext(description, 'laptop') +""") +``` + +## Match with typos + +Use `fuzzy()` and pass an optional Levenshtein distance (1, 2, or 3): + +```python +executor.execute( + "SELECT * FROM products WHERE fuzzy(title, :term, 2)", + params={"term": "laptap"}, +) +``` + +Distance 1 catches one-character typos; 2 catches most common misspellings; 3 is permissive and slow. + +## Match a prefix, suffix, or substring + +Use `LIKE` with `%`: + +```python +executor.execute("SELECT * FROM products WHERE title LIKE 'lap%'") # prefix +executor.execute("SELECT * FROM products WHERE title LIKE '%top'") # suffix +executor.execute("SELECT * FROM products WHERE title LIKE '%apt%'") # contains +``` + +Underscore (`_`) wildcards from standard SQL are not supported. + +## Require words to be near each other + +Use `fulltext()` with a slop value (max words allowed between the terms): + +```python +executor.execute( + "SELECT * FROM products WHERE fulltext(title, :phrase, 2)", + params={"phrase": "gaming laptop"}, +) +``` + +To require the words appear in the given order, pass `true` as the fourth argument: + +```python +executor.execute( + "SELECT * FROM products WHERE fulltext(title, 'gaming laptop', 2, true)", +) +``` + +## Mark a term as optional but boosting + +Prefix a term with `~` inside `fulltext()`: + +```python +executor.execute( + "SELECT * FROM products WHERE fulltext(title, 'laptop ~gaming')", +) +``` + +Documents matching `laptop` rank higher when `gaming` is also present. Documents without `gaming` still match. + +## Exclude documents with a term + +Use `NOT`: + +```python +executor.execute( + "SELECT * FROM products WHERE fulltext(title, 'laptop') AND NOT fulltext(title, 'refurbished')", +) +``` + +## Get a relevance score back + +Add `score()` to the `SELECT` list: + +```python +result = executor.execute(""" + SELECT title, score() AS relevance + FROM products + WHERE fulltext(title, 'laptop') +""") + +for row in result.rows: + print(row[b"title"], row[b"relevance"]) +``` + +`score()` triggers `WITHSCORES` in the underlying `FT.SEARCH`. The score is BM25 by default. The result-row shape changes when scoring is enabled; see {doc}`/concepts/result-shape`. + +## Keep stopwords in matches + +By default, both `=` and `fulltext()` strip Redis's default stopwords (about 300 common words like `the`, `a`, `of`) before sending the query. RediSearch does not index these by default, so an unstripped query would silently match nothing. + +If your data needs stopwords to match, create the index with `STOPWORDS 0`: + +``` +FT.CREATE myindex ON HASH PREFIX 1 doc: STOPWORDS 0 SCHEMA title TEXT +``` + +This is an index-creation choice, not a per-query choice. Once the index has been built without stopword filtering, all queries against it preserve them. + +## Common gotchas + +- `fuzzy()` and `fulltext()` only work on `TEXT` fields. Calling them on `TAG` or `NUMERIC` raises `ValueError`. +- Special characters in search terms (`@`, `|`, `-`, `*`, `+`) are escaped automatically by sql-redis. +- A `UserWarning` is emitted when stopwords are stripped, so you can audit which terms are dropping out. diff --git a/docs/user_guide/how_to_guides/use-parameters.md b/docs/user_guide/how_to_guides/use-parameters.md new file mode 100644 index 0000000..0947db7 --- /dev/null +++ b/docs/user_guide/how_to_guides/use-parameters.md @@ -0,0 +1,65 @@ +# Use parameters + +You want to inject runtime values into a SQL string without manually building the query. + +## Recipe + +Use `:name` placeholders and pass values via `params`: + +```python +result = executor.execute( + "SELECT title FROM products WHERE category = :cat AND price < :max_price", + params={"cat": "electronics", "max_price": 500}, +) +``` + +## What gets substituted + +| Python type | SQL substitution | +|---|---| +| `int`, `float` | unquoted literal: `123`, `12.5` | +| `str` | quoted with SQL standard escaping: `'O''Brien'` | +| `bytes` | kept; substituted later as the `$vector` for KNN | +| anything else | kept as `:name`; the translator handles it (or raises) | + +## Quote escaping is automatic + +```python +executor.execute( + "SELECT * FROM users WHERE name = :name", + params={"name": "O'Brien"}, +) +# Produces: WHERE name = 'O''Brien' +``` + +## Similar parameter names are safe + +The substitution is token-based, so `:id` will not match inside `:product_id`: + +```python +executor.execute( + "SELECT * FROM rows WHERE id = :id AND product_id = :product_id", + params={"id": 1, "product_id": 100}, +) +``` + +## Vectors + +Pass a vector as `bytes` for use in a KNN query: + +```python +import struct + +vec = struct.pack(f"{len(embedding)}f", *embedding) + +result = executor.execute( + "SELECT title, vector_distance(embedding, :vec) AS score FROM products LIMIT 5", + params={"vec": vec}, +) +``` + +The `bytes` value is intentionally not stringified into the SQL. The executor injects it into the Redis command list as raw bytes after translation. + +## See also + +- {doc}`/concepts/parameter-substitution` for why the substitution is token-based. diff --git a/docs/user_guide/how_to_guides/vector-search.md b/docs/user_guide/how_to_guides/vector-search.md new file mode 100644 index 0000000..3afe882 --- /dev/null +++ b/docs/user_guide/how_to_guides/vector-search.md @@ -0,0 +1,56 @@ +# Vector and hybrid search + +You want to find the K most similar items to a query embedding, optionally filtered by metadata. + +## Prerequisites + +- An index with a `VECTOR` field, e.g.: + ``` + FT.CREATE products ON HASH PREFIX 1 product: + SCHEMA + title TEXT + category TAG + embedding VECTOR FLAT 6 TYPE FLOAT32 DIM 1536 DISTANCE_METRIC COSINE + ``` +- Embeddings stored as `FLOAT32` byte arrays in the `embedding` field. + +## Pure KNN + +```python +import struct + +query_vec = struct.pack(f"{len(embedding)}f", *embedding) + +result = executor.execute( + """ + SELECT title, vector_distance(embedding, :vec) AS score + FROM products + LIMIT 5 + """, + params={"vec": query_vec}, +) +``` + +`vector_distance(field, :param)` is the function that triggers a KNN search. The `LIMIT` becomes the K value. + +## Hybrid: filter then KNN + +Combine a `WHERE` clause with `vector_distance`: + +```python +result = executor.execute( + """ + SELECT title, vector_distance(embedding, :vec) AS score + FROM products + WHERE category = 'electronics' AND price < 1000 + LIMIT 5 + """, + params={"vec": query_vec}, +) +``` + +The filter narrows the candidate set; the KNN runs over what survives. + +## Returning the score + +`vector_distance(...) AS alias` is required for the score to come back as a column. The result rows include the alias as a key. diff --git a/docs/user_guide/index.md b/docs/user_guide/index.md new file mode 100644 index 0000000..d9efae8 --- /dev/null +++ b/docs/user_guide/index.md @@ -0,0 +1,43 @@ +--- +myst: + html_meta: + "description lang=en": | + sql-redis user guide. Installation, getting started, and task-oriented recipes. +--- + +# User Guide + +::::{grid} 2 +:gutter: 3 + +:::{grid-item-card} 📦 Installation +:link: installation +:link-type: doc + +**Set up sql-redis.** pip install, Redis container, optional extras. +::: + +:::{grid-item-card} 🚀 Getting Started +:link: getting-started +:link-type: doc + +**Your first query.** Schema setup, executor construction, end-to-end SELECT. +::: + +:::{grid-item-card} 🛠️ How-To Guides +:link: how_to_guides/index +:link-type: doc + +**Solve specific problems.** Recipes for parameters, vectors, text search, GEO, dates, async, and schema strategy. +::: + +:::: + +```{toctree} +:maxdepth: 2 +:hidden: + +installation +getting-started +how_to_guides/index +``` diff --git a/docs/user_guide/installation.md b/docs/user_guide/installation.md new file mode 100644 index 0000000..881ab58 --- /dev/null +++ b/docs/user_guide/installation.md @@ -0,0 +1,51 @@ +# Installation + +## Install the package + +```bash +pip install sql-redis +``` + +Or with `uv`: + +```bash +uv add sql-redis +``` + +Python 3.9 or newer is required. + +## Run Redis with the search module + +The library targets RediSearch 2.x. The simplest way to get a compatible Redis is the official Redis image, version 8.x: + +```bash +docker run -d --name redis -p 6379:6379 redis:8.4 +``` + +For features that depend on `INDEXMISSING` (used by `IS NULL` translation), Redis 7.4 or newer is required. + +## Verify the install + +```python +from sql_redis import __version__ +print(__version__) +``` + +## Optional: development setup + +If you are working on sql-redis itself: + +```bash +git clone https://github.com/redis-developer/sql-redis +cd sql-redis +make install # uv sync +make test # requires Docker for testcontainers +``` + +To build the docs locally: + +```bash +uv sync --group docs +make docs-build +make docs-serve # http://localhost:8000 +``` diff --git a/pyproject.toml b/pyproject.toml index c8e51c8..99291a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ dependencies = [ [project.urls] Homepage = "https://github.com/redis-developer/sql-redis" Repository = "https://github.com/redis-developer/sql-redis" +Documentation = "https://docs.redisvl.com/projects/sql-redis/" [build-system] requires = ["hatchling"] @@ -46,6 +47,13 @@ dev = [ "codespell>=2.4.1,<3", "testcontainers[redis]>=4.0.0,<5", ] +docs = [ + "sphinx>=7.3,<9", + "sphinx-book-theme>=1.1", + "sphinx-design>=0.6", + "sphinx-copybutton>=0.5", + "myst-parser>=3.0", +] [tool.uv] default-groups = ["dev"] diff --git a/uv.lock b/uv.lock index 3036905..1f12e6d 100644 --- a/uv.lock +++ b/uv.lock @@ -2,11 +2,50 @@ version = 1 revision = 2 requires-python = ">=3.9, <3.14" resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", "python_full_version >= '3.9.2' and python_full_version < '3.10'", "python_full_version < '3.9.2'", ] +[[package]] +name = "accessible-pygments" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/c1/bbac6a50d02774f91572938964c582fff4270eee73ab822a4aeea4d8b11b/accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872", size = 1377899, upload-time = "2024-05-10T11:23:10.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/3f/95338030883d8c8b91223b4e21744b04d11b161a3ef117295d8241f50ab4/accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7", size = 1395903, upload-time = "2024-05-10T11:23:08.421Z" }, +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + [[package]] name = "async-timeout" version = "5.0.1" @@ -16,6 +55,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, ] +[[package]] +name = "babel" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/b2/51899539b6ceeeb420d40ed3cd4b7a40519404f9baf3d4ac99dc413a834b/babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d", size = 9959554, upload-time = "2026-02-01T12:30:56.078Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/f5/21d2de20e8b8b0408f0681956ca2c69f1320a3848ac50e6e7f39c6159675/babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35", size = 10196845, upload-time = "2026-02-01T12:30:53.445Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, +] + [[package]] name = "black" version = "25.11.0" @@ -64,7 +125,8 @@ name = "black" version = "25.12.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] dependencies = [ { name = "click", version = "8.3.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -128,7 +190,8 @@ name = "cfgv" version = "3.5.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } wheels = [ @@ -245,7 +308,8 @@ name = "click" version = "8.3.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] dependencies = [ { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, @@ -372,7 +436,8 @@ name = "coverage" version = "7.13.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" } wheels = [ @@ -471,6 +536,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + [[package]] name = "exceptiongroup" version = "1.3.1" @@ -501,7 +575,8 @@ name = "filelock" version = "3.20.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } wheels = [ @@ -526,7 +601,8 @@ name = "identify" version = "2.6.16" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } wheels = [ @@ -542,6 +618,44 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "imagesize" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/59/4b0dd64676aa6fb4986a755790cb6fc558559cf0084effad516820208ec3/imagesize-1.5.0.tar.gz", hash = "sha256:8bfc5363a7f2133a89f0098451e0bcb1cd71aba4dc02bbcecb39d99d40e1b94f", size = 1281127, upload-time = "2026-03-03T01:59:54.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/b1/a0662b03103c66cf77101a187f396ea91167cd9b7d5d3a2e465ad2c7ee9b/imagesize-1.5.0-py2.py3-none-any.whl", hash = "sha256:32677681b3f434c2cb496f00e89c5a291247b35b1f527589909e008057da5899", size = 5763, upload-time = "2026-03-03T01:59:52.343Z" }, +] + +[[package]] +name = "imagesize" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/e6/7bf14eeb8f8b7251141944835abd42eb20a658d89084b7e1f3e5fe394090/imagesize-2.0.0.tar.gz", hash = "sha256:8e8358c4a05c304f1fccf7ff96f036e7243a189e9e42e90851993c558cfe9ee3", size = 1773045, upload-time = "2026-03-03T14:18:29.941Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/53/fb7122b71361a0d121b669dcf3d31244ef75badbbb724af388948de543e2/imagesize-2.0.0-py2.py3-none-any.whl", hash = "sha256:5667c5bbb57ab3f1fa4bc366f4fbc971db3d5ed011fd2715fd8001f782718d96", size = 9441, upload-time = "2026-03-03T14:18:27.892Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -560,7 +674,8 @@ name = "iniconfig" version = "2.3.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ @@ -576,6 +691,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310, upload-time = "2023-12-13T20:37:23.244Z" }, ] +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + [[package]] name = "librt" version = "0.7.8" @@ -637,6 +764,154 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/5d/dce0c92f786495adf2c1e6784d9c50a52fb7feb1cfb17af97a08281a6e82/librt-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:e90a8e237753c83b8e484d478d9a996dc5e39fd5bd4c6ce32563bc8123f132be", size = 49801, upload-time = "2026-01-14T12:56:15.827Z" }, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/56/23/0d8c13a44bde9154821586520840643467aee574d8ce79a17da539ee7fed/markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26", size = 11623, upload-time = "2025-09-27T18:37:29.296Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/07a2cb9a8045d5f3f0890a8c3bc0859d7a47bfd9a560b563899bec7b72ed/markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc", size = 12049, upload-time = "2025-09-27T18:37:30.234Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e4/6be85eb81503f8e11b61c0b6369b6e077dcf0a74adbd9ebf6b349937b4e9/markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c", size = 21923, upload-time = "2025-09-27T18:37:31.177Z" }, + { url = "https://files.pythonhosted.org/packages/6f/bc/4dc914ead3fe6ddaef035341fee0fc956949bbd27335b611829292b89ee2/markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42", size = 20543, upload-time = "2025-09-27T18:37:32.168Z" }, + { url = "https://files.pythonhosted.org/packages/89/6e/5fe81fbcfba4aef4093d5f856e5c774ec2057946052d18d168219b7bd9f9/markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b", size = 20585, upload-time = "2025-09-27T18:37:33.166Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f6/e0e5a3d3ae9c4020f696cd055f940ef86b64fe88de26f3a0308b9d3d048c/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758", size = 21387, upload-time = "2025-09-27T18:37:34.185Z" }, + { url = "https://files.pythonhosted.org/packages/c8/25/651753ef4dea08ea790f4fbb65146a9a44a014986996ca40102e237aa49a/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2", size = 20133, upload-time = "2025-09-27T18:37:35.138Z" }, + { url = "https://files.pythonhosted.org/packages/dc/0a/c3cf2b4fef5f0426e8a6d7fce3cb966a17817c568ce59d76b92a233fdbec/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d", size = 20588, upload-time = "2025-09-27T18:37:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/cd/1b/a7782984844bd519ad4ffdbebbba2671ec5d0ebbeac34736c15fb86399e8/markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7", size = 14566, upload-time = "2025-09-27T18:37:37.09Z" }, + { url = "https://files.pythonhosted.org/packages/18/1f/8d9c20e1c9440e215a44be5ab64359e207fcb4f675543f1cf9a2a7f648d0/markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e", size = 15053, upload-time = "2025-09-27T18:37:38.054Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d3/fe08482b5cd995033556d45041a4f4e76e7f0521112a9c9991d40d39825f/markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8", size = 13928, upload-time = "2025-09-27T18:37:39.037Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542, upload-time = "2024-09-09T20:27:49.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316, upload-time = "2024-09-09T20:27:48.397Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + [[package]] name = "mypy" version = "1.19.1" @@ -692,6 +967,67 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] +[[package]] +name = "myst-parser" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "mdit-py-plugins", version = "0.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pyyaml", marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/64/e2f13dac02f599980798c01156393b781aec983b52a6e4057ee58f07c43a/myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87", size = 92392, upload-time = "2024-04-28T20:22:42.116Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/de/21aa8394f16add8f7427f0a1326ccd2b3a2a8a3245c9252bc5ac034c6155/myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1", size = 83163, upload-time = "2024-04-28T20:22:39.985Z" }, +] + +[[package]] +name = "myst-parser" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "docutils", marker = "python_full_version == '3.10.*'" }, + { name = "jinja2", marker = "python_full_version == '3.10.*'" }, + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "mdit-py-plugins", version = "0.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "pyyaml", marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" }, +] + +[[package]] +name = "myst-parser" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "mdit-py-plugins", version = "0.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "pyyaml", marker = "python_full_version >= '3.11'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/fa/7b45eef11b7971f0beb29d27b7bfe0d747d063aa29e170d9edd004733c8a/myst_parser-5.0.0.tar.gz", hash = "sha256:f6f231452c56e8baa662cc352c548158f6a16fcbd6e3800fc594978002b94f3a", size = 98535, upload-time = "2026-01-15T09:08:18.036Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/ac/686789b9145413f1a61878c407210e41bfdb097976864e0913078b24098c/myst_parser-5.0.0-py3-none-any.whl", hash = "sha256:ab31e516024918296e169139072b81592336f2fef55b8986aa31c9f04b5f7211", size = 84533, upload-time = "2026-01-15T09:08:16.788Z" }, +] + [[package]] name = "nodeenv" version = "1.10.0" @@ -737,7 +1073,8 @@ name = "platformdirs" version = "4.5.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } wheels = [ @@ -778,7 +1115,8 @@ name = "pre-commit" version = "4.5.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] dependencies = [ { name = "cfgv", version = "3.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -792,6 +1130,52 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, ] +[[package]] +name = "pydata-sphinx-theme" +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "accessible-pygments", marker = "python_full_version < '3.11'" }, + { name = "babel", marker = "python_full_version < '3.11'" }, + { name = "beautifulsoup4", marker = "python_full_version < '3.11'" }, + { name = "docutils", marker = "python_full_version < '3.11'" }, + { name = "packaging", marker = "python_full_version < '3.11'" }, + { name = "pygments", marker = "python_full_version < '3.11'" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/ea/3ab478cccacc2e8ef69892c42c44ae547bae089f356c4b47caf61730958d/pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d", size = 2400673, upload-time = "2024-06-25T19:28:45.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/d3/c622950d87a2ffd1654208733b5bd1c5645930014abed8f4c0d74863988b/pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6", size = 4640157, upload-time = "2024-06-25T19:28:42.383Z" }, +] + +[[package]] +name = "pydata-sphinx-theme" +version = "0.16.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "accessible-pygments", marker = "python_full_version >= '3.11'" }, + { name = "babel", marker = "python_full_version >= '3.11'" }, + { name = "beautifulsoup4", marker = "python_full_version >= '3.11'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/20/bb50f9de3a6de69e6abd6b087b52fa2418a0418b19597601605f855ad044/pydata_sphinx_theme-0.16.1.tar.gz", hash = "sha256:a08b7f0b7f70387219dc659bff0893a7554d5eb39b59d3b8ef37b8401b7642d7", size = 2412693, upload-time = "2024-12-17T10:53:39.537Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/0d/8ba33fa83a7dcde13eb3c1c2a0c1cc29950a048bfed6d9b0d8b6bd710b4c/pydata_sphinx_theme-0.16.1-py3-none-any.whl", hash = "sha256:225331e8ac4b32682c18fcac5a57a6f717c4e632cea5dd0e247b55155faeccde", size = 6723264, upload-time = "2024-12-17T10:53:35.645Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -987,7 +1371,8 @@ name = "redis" version = "7.1.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", ] dependencies = [ { name = "async-timeout", marker = "python_full_version >= '3.10' and python_full_version < '3.11.3'" }, @@ -1012,6 +1397,276 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] +[[package]] +name = "roman-numerals" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/f9/41dc953bbeb056c17d5f7a519f50fdf010bd0553be2d630bc69d1e022703/roman_numerals-4.1.0.tar.gz", hash = "sha256:1af8b147eb1405d5839e78aeb93131690495fe9da5c91856cb33ad55a7f1e5b2", size = 9077, upload-time = "2025-12-17T18:25:34.381Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/54/6f679c435d28e0a568d8e8a7c0a93a09010818634c3c3907fc98d8983770/roman_numerals-4.1.0-py3-none-any.whl", hash = "sha256:647ba99caddc2cc1e55a51e4360689115551bf4476d90e8162cf8c345fe233c7", size = 7676, upload-time = "2025-12-17T18:25:33.098Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "roman-numerals", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/b5/de96fca640f4f656eb79bbee0e79aeec52e3e0e359f8a3e6a0d366378b64/roman_numerals_py-4.1.0.tar.gz", hash = "sha256:f5d7b2b4ca52dd855ef7ab8eb3590f428c0b1ea480736ce32b01fef2a5f8daf9", size = 4274, upload-time = "2025-12-17T18:25:41.153Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/2c/daca29684cbe9fd4bc711f8246da3c10adca1ccc4d24436b17572eb2590e/roman_numerals_py-4.1.0-py3-none-any.whl", hash = "sha256:553114c1167141c1283a51743759723ecd05604a1b6b507225e91dc1a6df0780", size = 4547, upload-time = "2025-12-17T18:25:40.136Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "babel", marker = "python_full_version < '3.10'" }, + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "imagesize", version = "1.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "babel", marker = "python_full_version == '3.10.*'" }, + { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version == '3.10.*'" }, + { name = "imagesize", version = "2.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "jinja2", marker = "python_full_version == '3.10.*'" }, + { name = "packaging", marker = "python_full_version == '3.10.*'" }, + { name = "pygments", marker = "python_full_version == '3.10.*'" }, + { name = "requests", marker = "python_full_version == '3.10.*'" }, + { name = "snowballstemmer", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version == '3.10.*'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "babel", marker = "python_full_version >= '3.11'" }, + { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "imagesize", version = "2.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "requests", marker = "python_full_version >= '3.11'" }, + { name = "roman-numerals-py", marker = "python_full_version >= '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-book-theme" +version = "1.1.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "pydata-sphinx-theme", version = "0.15.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/19/d002ed96bdc7738c15847c730e1e88282d738263deac705d5713b4d8fa94/sphinx_book_theme-1.1.4.tar.gz", hash = "sha256:73efe28af871d0a89bd05856d300e61edce0d5b2fbb7984e84454be0fedfe9ed", size = 439188, upload-time = "2025-02-20T16:32:32.581Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/9e/c41d68be04eef5b6202b468e0f90faf0c469f3a03353f2a218fd78279710/sphinx_book_theme-1.1.4-py3-none-any.whl", hash = "sha256:843b3f5c8684640f4a2d01abd298beb66452d1b2394cd9ef5be5ebd5640ea0e1", size = 433952, upload-time = "2025-02-20T16:32:31.009Z" }, +] + +[[package]] +name = "sphinx-book-theme" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "pydata-sphinx-theme", version = "0.16.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/f7/154786f3cfb7692cd7acc24b6dfe4dcd1146b66f376b17df9e47125555e9/sphinx_book_theme-1.2.0.tar.gz", hash = "sha256:4a7ebfc7da4395309ac942ddfc38fbec5c5254c3be22195e99ad12586fbda9e3", size = 443962, upload-time = "2026-03-09T23:20:30.442Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/bf/6f506a37c7f8ecc4576caf9486e303c7af249f6d70447bb51dde9d78cb99/sphinx_book_theme-1.2.0-py3-none-any.whl", hash = "sha256:709605d308e1991c5ef0cf19c481dbe9084b62852e317fafab74382a0ee7ccfa", size = 455936, upload-time = "2026-03-09T23:20:28.788Z" }, +] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/2b/a964715e7f5295f77509e59309959f4125122d648f86b4fe7d70ca1d882c/sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd", size = 23039, upload-time = "2023-04-14T08:10:22.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/48/1ea60e74949eecb12cdd6ac43987f9fd331156388dcc2319b45e2ebb81bf/sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e", size = 13343, upload-time = "2023-04-14T08:10:20.844Z" }, +] + +[[package]] +name = "sphinx-design" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/69/b34e0cb5336f09c6866d53b4a19d76c227cdec1bbc7ac4de63ca7d58c9c7/sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632", size = 2193689, upload-time = "2024-08-02T13:48:44.277Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/43/65c0acbd8cc6f50195a3a1fc195c404988b15c67090e73c7a41a9f57d6bd/sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c", size = 2215338, upload-time = "2024-08-02T13:48:42.106Z" }, +] + +[[package]] +name = "sphinx-design" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/7b/804f311da4663a4aecc6cf7abd83443f3d4ded970826d0c958edc77d4527/sphinx_design-0.7.0.tar.gz", hash = "sha256:d2a3f5b19c24b916adb52f97c5f00efab4009ca337812001109084a740ec9b7a", size = 2203582, upload-time = "2026-01-19T13:12:53.297Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/cf/45dd359f6ca0c3762ce0490f681da242f0530c49c81050c035c016bfdd3a/sphinx_design-0.7.0-py3-none-any.whl", hash = "sha256:f82bf179951d58f55dca78ab3706aeafa496b741a91b1911d371441127d64282", size = 2220350, upload-time = "2026-01-19T13:12:51.077Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + [[package]] name = "sql-redis" version = "0.5.0" @@ -1037,6 +1692,19 @@ dev = [ { name = "testcontainers", version = "4.13.0", source = { registry = "https://pypi.org/simple" }, extra = ["redis"], marker = "python_full_version < '3.9.2'" }, { name = "testcontainers", version = "4.13.3", source = { registry = "https://pypi.org/simple" }, extra = ["redis"], marker = "python_full_version >= '3.9.2'" }, ] +docs = [ + { name = "myst-parser", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "myst-parser", version = "4.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "myst-parser", version = "5.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-book-theme", version = "1.1.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx-book-theme", version = "1.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-copybutton" }, + { name = "sphinx-design", version = "0.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx-design", version = "0.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] [package.metadata] requires-dist = [ @@ -1056,6 +1724,13 @@ dev = [ { name = "pytest-cov", specifier = ">=4.0.0,<5" }, { name = "testcontainers", extras = ["redis"], specifier = ">=4.0.0,<5" }, ] +docs = [ + { name = "myst-parser", specifier = ">=3.0" }, + { name = "sphinx", specifier = ">=7.3,<9" }, + { name = "sphinx-book-theme", specifier = ">=1.1" }, + { name = "sphinx-copybutton", specifier = ">=0.5" }, + { name = "sphinx-design", specifier = ">=0.6" }, +] [[package]] name = "sqlglot" @@ -1095,7 +1770,8 @@ name = "testcontainers" version = "4.13.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.10'", + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", "python_full_version >= '3.9.2' and python_full_version < '3.10'", ] dependencies = [ @@ -1264,3 +1940,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/90/0c14b241d18d80ddf4c847a5f52071e126e8a6a9e5a8a7952add8ef0d766/wrapt-2.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:d6cc985b9c8b235bd933990cdbf0f891f8e010b65a3911f7a55179cd7b0fc57b", size = 58895, upload-time = "2025-11-07T00:45:29.527Z" }, { url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" }, ] + +[[package]] +name = "zipp" +version = "3.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/21/093488dfc7cc8964ded15ab726fad40f25fd3d788fd741cc1c5a17d78ee8/zipp-3.23.1.tar.gz", hash = "sha256:32120e378d32cd9714ad503c1d024619063ec28aad2248dc6672ad13edfa5110", size = 25965, upload-time = "2026-04-13T23:21:46.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/8a/0861bec20485572fbddf3dfba2910e38fe249796cb73ecdeb74e07eeb8d3/zipp-3.23.1-py3-none-any.whl", hash = "sha256:0b3596c50a5c700c9cb40ba8d86d9f2cc4807e9bedb06bcdf7fac85633e444dc", size = 10378, upload-time = "2026-04-13T23:21:45.386Z" }, +]