diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..131952d
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,22 @@
+# AI Configuration
+AI_PROVIDER=claude
+# [1m] = 1 million token context window, this is a valid model identifier
+AI_MODEL=claude-opus-4-6[1m]
+AI_CLI_TIMEOUT=60
+
+# Claude - Option 1: API Key
+# ANTHROPIC_API_KEY=
+
+# Claude - Option 2: Vertex AI
+# CLAUDE_CODE_USE_VERTEX=1
+# CLOUD_ML_REGION=
+# ANTHROPIC_VERTEX_PROJECT_ID=
+
+# Gemini
+# GEMINI_API_KEY=
+
+# Cursor
+# CURSOR_API_KEY=
+
+# Logging
+LOG_LEVEL=INFO
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..4f82c9f
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,11 @@
+[flake8]
+select=M511
+
+exclude =
+ doc,
+ .tox,
+ .git,
+ *.yml,
+ Pipfile.*,
+ docs/*,
+ .cache/*
diff --git a/.gitleaks.toml b/.gitleaks.toml
new file mode 100644
index 0000000..5c28152
--- /dev/null
+++ b/.gitleaks.toml
@@ -0,0 +1,7 @@
+[extend]
+useDefault = true
+
+[allowlist]
+paths = [
+ '''tests/test_repository\.py''',
+]
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..768d61e
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,60 @@
+---
+default_language_version:
+ python: python3
+
+ci:
+ autofix_prs: false
+ autoupdate_commit_msg: "ci: [pre-commit.ci] pre-commit autoupdate"
+
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v6.0.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-docstring-first
+ - id: check-executables-have-shebangs
+ - id: check-merge-conflict
+ - id: check-symlinks
+ - id: detect-private-key
+ - id: mixed-line-ending
+ - id: debug-statements
+ - id: trailing-whitespace
+ args: [--markdown-linebreak-ext=md]
+ - id: end-of-file-fixer
+ - id: check-ast
+ - id: check-builtin-literals
+ - id: check-toml
+
+ # flake8 retained for RedHatQE M511 plugin; ruff handles standard linting
+ - repo: https://github.com/PyCQA/flake8
+ rev: 7.3.0
+ hooks:
+ - id: flake8
+ args: [--config=.flake8]
+ additional_dependencies:
+ # Tracks main branch intentionally for latest RedHatQE flake8 plugins
+ [git+https://github.com/RedHatQE/flake8-plugins.git, flake8-mutable]
+
+ - repo: https://github.com/Yelp/detect-secrets
+ rev: v1.5.0
+ hooks:
+ - id: detect-secrets
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.15.2
+ hooks:
+ - id: ruff
+ - id: ruff-format
+
+ - repo: https://github.com/gitleaks/gitleaks
+ rev: v8.30.0
+ hooks:
+ - id: gitleaks
+
+ - repo: https://github.com/pre-commit/mirrors-mypy
+ rev: v1.19.1
+ hooks:
+ - id: mypy
+ exclude: (tests/)
+ additional_dependencies:
+ [types-requests, types-PyYAML, types-colorama, types-aiofiles, pydantic, types-Markdown]
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..d7a0007
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,99 @@
+# syntax=docker/dockerfile:1
+FROM python:3.12-slim AS builder
+
+WORKDIR /app
+
+# Install uv
+COPY --from=ghcr.io/astral-sh/uv:0.5.14 /uv /usr/local/bin/uv
+
+# Install git (needed for gitpython dependency)
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ git \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy project files
+COPY pyproject.toml uv.lock ./
+COPY src/ src/
+
+# Create venv and install dependencies
+RUN uv sync --frozen --no-dev
+
+# Production stage
+FROM python:3.12-slim
+
+WORKDIR /app
+
+# Install bash (needed for CLI install scripts), git (required at runtime for gitpython), curl (for Claude CLI), and nodejs/npm (for Gemini CLI)
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ bash \
+ git \
+ curl \
+ nodejs \
+ npm \
+ && rm -rf /var/lib/apt/lists/*
+
+# Create non-root user, data directory, and set permissions
+# OpenShift runs containers as a random UID in the root group (GID 0)
+RUN useradd --create-home --shell /bin/bash -g 0 appuser \
+ && mkdir -p /data \
+ && chown appuser:0 /data \
+ && chmod -R g+w /data
+
+# Copy uv for runtime
+COPY --from=ghcr.io/astral-sh/uv:0.5.14 /uv /usr/local/bin/uv
+
+# Switch to non-root user for CLI installs
+USER appuser
+
+# Install Claude Code CLI (installs to ~/.local/bin)
+RUN /bin/bash -o pipefail -c "curl -fsSL https://claude.ai/install.sh | bash"
+
+# Install Cursor Agent CLI (installs to ~/.local/bin)
+RUN /bin/bash -o pipefail -c "curl -fsSL https://cursor.com/install | bash"
+
+# Configure npm for non-root global installs and install Gemini CLI
+RUN mkdir -p /home/appuser/.npm-global \
+ && npm config set prefix '/home/appuser/.npm-global' \
+ && npm install -g @google/gemini-cli
+
+# Switch to root for file copies and permission fixes
+USER root
+
+# Copy the virtual environment from builder
+COPY --chown=appuser:0 --from=builder /app/.venv /app/.venv
+
+# Copy project files needed by uv
+COPY --chown=appuser:0 --from=builder /app/pyproject.toml /app/uv.lock ./
+
+# Copy source code
+COPY --chown=appuser:0 --from=builder /app/src /app/src
+
+# Make /app group-writable for OpenShift compatibility
+RUN chmod -R g+w /app
+
+# Make appuser home accessible by OpenShift arbitrary UID
+# Only chmod directories (not files) — files are already group-readable by default.
+# Directories need group write+execute for OpenShift's arbitrary UID (in GID 0)
+# to create config/cache files at runtime.
+RUN find /home/appuser -type d -exec chmod g=u {} + \
+ && npm cache clean --force 2>/dev/null; \
+ rm -rf /home/appuser/.npm/_cacache
+
+# Switch back to non-root user for runtime
+USER appuser
+
+# Ensure CLIs are in PATH
+ENV PATH="/home/appuser/.local/bin:/home/appuser/.npm-global/bin:${PATH}"
+# Set HOME for OpenShift compatibility (random UID has no passwd entry)
+ENV HOME="/home/appuser"
+
+EXPOSE 8000
+
+HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
+ CMD curl -f http://localhost:8000/health || exit 1
+
+# Use uv run for uvicorn
+# --no-sync prevents uv from attempting to modify the venv at runtime.
+# This is required for OpenShift where containers run as an arbitrary UID
+# and may not have write access to the .venv directory.
+ENTRYPOINT ["uv", "run", "--no-sync", "uvicorn", "docsfy.main:app", "--host", "0.0.0.0", "--port", "8000"]
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..95e7623
--- /dev/null
+++ b/README.md
@@ -0,0 +1,30 @@
+# docsfy
+
+AI-powered documentation generator that creates polished static HTML docs from GitHub repositories using Claude, Gemini, or Cursor CLI.
+
+[**Documentation**](https://myk-org.github.io/docsfy/) | [**GitHub**](https://github.com/myk-org/docsfy)
+
+## Quick Start
+
+```bash
+# Clone and configure
+git clone https://github.com/myk-org/docsfy.git
+cd docsfy
+cp .env.example .env
+# Edit .env with your AI provider credentials
+
+# Run
+docker compose up
+
+# Generate docs for any repo
+curl -X POST http://localhost:8000/api/generate \
+ -H "Content-Type: application/json" \
+ -d '{"repo_url": "https://github.com/org/repo"}'
+
+# Browse docs
+open http://localhost:8000/docs/repo/
+```
+
+## License
+
+Apache-2.0
diff --git a/docker-compose.yaml b/docker-compose.yaml
new file mode 100644
index 0000000..79219cd
--- /dev/null
+++ b/docker-compose.yaml
@@ -0,0 +1,13 @@
+services:
+ docsfy:
+ build: .
+ ports:
+ - "8000:8000"
+ env_file: .env
+ volumes:
+ - ./data:/data
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
diff --git a/docs/plans/2026-03-04-docsfy-design.md b/docs/plans/2026-03-04-docsfy-design.md
index 2adcd04..910b451 100644
--- a/docs/plans/2026-03-04-docsfy-design.md
+++ b/docs/plans/2026-03-04-docsfy-design.md
@@ -5,7 +5,7 @@
## Overview
-docsfy is an open-source FastAPI service that generates polished, Mintlify-quality static HTML documentation sites from GitHub repositories using AI CLI tools. It supports multiple AI providers (Claude Code, Cursor Agent, Gemini CLI), incremental updates on repository changes, and flexible hosting -- serve docs directly from the API or download the static HTML to host anywhere.
+docsfy is an open-source FastAPI service that generates polished, production-quality static HTML documentation sites from GitHub repositories using AI CLI tools. It supports multiple AI providers (Claude Code, Cursor Agent, Gemini CLI), incremental updates on repository changes, and flexible hosting -- serve docs directly from the API or download the static HTML to host anywhere.
## Architecture
diff --git a/docs/plans/2026-03-04-docsfy-implementation-plan.md b/docs/plans/2026-03-04-docsfy-implementation-plan.md
new file mode 100644
index 0000000..1e465a8
--- /dev/null
+++ b/docs/plans/2026-03-04-docsfy-implementation-plan.md
@@ -0,0 +1,2004 @@
+# docsfy Implementation Plan
+
+> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
+
+**Goal:** Build an AI-powered FastAPI service that generates production-quality static HTML documentation from GitHub repositories.
+
+**Architecture:** Clone repo → AI plans doc structure → AI generates each page as markdown → Render to static HTML. FastAPI serves the docs and provides a download endpoint for self-hosting.
+
+**Tech Stack:** Python 3.12+, FastAPI, uvicorn, aiosqlite, Jinja2, markdown, pydantic-settings, hatchling, uv
+
+---
+
+## Task 1: Project Scaffolding
+
+**Files:**
+- Create: `pyproject.toml`
+- Create: `src/docsfy/__init__.py`
+- Create: `.pre-commit-config.yaml` (copy from `/home/myakove/git/pr-test-oracle/.pre-commit-config.yaml`)
+- Create: `.flake8` (copy from `/home/myakove/git/pr-test-oracle/.flake8`)
+- Create: `tox.toml` (based on `/home/myakove/git/pr-test-oracle/tox.toml`)
+- Create: `.gitleaks.toml`
+- Create: `.env.example`
+- Create: `Dockerfile`
+- Create: `docker-compose.yaml`
+
+**Step 1: Create pyproject.toml**
+
+```toml
+[project]
+name = "docsfy"
+version = "0.1.0"
+description = "AI-powered documentation generator - generates polished static HTML docs from GitHub repos"
+requires-python = ">=3.12"
+dependencies = [
+ "fastapi",
+ "uvicorn",
+ "pydantic-settings",
+ "python-simple-logger",
+ "aiosqlite",
+ "jinja2",
+ "markdown",
+ "pygments",
+]
+
+[project.scripts]
+docsfy = "docsfy.main:run"
+
+[project.optional-dependencies]
+dev = ["pytest", "pytest-asyncio", "pytest-xdist", "httpx"]
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.pytest.ini_options]
+asyncio_mode = "auto"
+testpaths = ["tests"]
+pythonpath = ["src"]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/docsfy"]
+
+[tool.mypy]
+check_untyped_defs = true
+disallow_any_generics = true
+disallow_incomplete_defs = true
+disallow_untyped_defs = true
+no_implicit_optional = true
+show_error_codes = true
+warn_unused_ignores = true
+strict_equality = true
+extra_checks = true
+warn_unused_configs = true
+warn_redundant_casts = true
+```
+
+**Step 2: Create `src/docsfy/__init__.py`** — empty file.
+
+**Step 3: Copy `.pre-commit-config.yaml`** from `/home/myakove/git/pr-test-oracle/.pre-commit-config.yaml` verbatim, updating mypy `additional_dependencies` to: `[types-requests, types-PyYAML, types-colorama, types-aiofiles, pydantic]`
+
+**Step 4: Copy `.flake8`** from `/home/myakove/git/pr-test-oracle/.flake8` verbatim.
+
+**Step 5: Create `tox.toml`**
+
+```toml
+skipsdist = true
+
+envlist = ["unused-code", "unittests"]
+
+[env.unused-code]
+deps = ["python-utility-scripts"]
+commands = [
+ [
+ "pyutils-unusedcode",
+ ],
+]
+
+[env.unittests]
+deps = ["uv"]
+commands = [
+ [
+ "uv",
+ "run",
+ "--extra",
+ "dev",
+ "pytest",
+ "-n",
+ "auto",
+ "tests",
+ ],
+]
+```
+
+**Step 6: Create `.gitleaks.toml`**
+
+```toml
+[extend]
+useDefault = true
+
+[allowlist]
+paths = [
+ '''tests/.*\.py''',
+]
+```
+
+**Step 7: Create `.env.example`**
+
+```bash
+# AI Configuration
+AI_PROVIDER=claude
+AI_MODEL=claude-opus-4-6[1m]
+AI_CLI_TIMEOUT=60
+
+# Claude - Option 1: API Key
+# ANTHROPIC_API_KEY=
+
+# Claude - Option 2: Vertex AI
+# CLAUDE_CODE_USE_VERTEX=1
+# CLOUD_ML_REGION=
+# ANTHROPIC_VERTEX_PROJECT_ID=
+
+# Gemini
+# GEMINI_API_KEY=
+
+# Cursor
+# CURSOR_API_KEY=
+
+# Logging
+LOG_LEVEL=INFO
+```
+
+**Step 8: Create `Dockerfile`**
+
+```dockerfile
+# --- Stage 1: build ---
+FROM python:3.12-slim AS builder
+
+COPY --from=ghcr.io/astral-sh/uv:0.5.14 /uv /usr/local/bin/uv
+
+WORKDIR /app
+COPY pyproject.toml ./
+RUN uv sync --frozen --no-dev --no-install-project
+
+COPY src/ src/
+RUN uv sync --frozen --no-dev
+
+# --- Stage 2: runtime ---
+FROM python:3.12-slim
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ bash git curl ca-certificates gnupg && \
+ rm -rf /var/lib/apt/lists/*
+
+# Install Node.js 20.x (for Gemini CLI)
+RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \
+ apt-get install -y --no-install-recommends nodejs && \
+ rm -rf /var/lib/apt/lists/*
+
+# Create non-root user (OpenShift compatible)
+RUN useradd --create-home --shell /bin/bash appuser && \
+ mkdir -p /data /home/appuser/.npm-global && \
+ chown -R appuser:0 /data /home/appuser && \
+ chmod -R g=u /data /home/appuser
+
+USER appuser
+WORKDIR /app
+
+ENV HOME=/home/appuser \
+ PATH="/home/appuser/.local/bin:/home/appuser/.npm-global/bin:${PATH}"
+
+# Install AI CLIs (unpinned for latest)
+RUN /bin/bash -o pipefail -c "curl -fsSL https://claude.ai/install.sh | bash"
+RUN /bin/bash -o pipefail -c "curl -fsSL https://cursor.com/install | bash"
+RUN npm config set prefix '/home/appuser/.npm-global' && \
+ npm install -g @google/gemini-cli
+
+# Copy app from builder
+COPY --from=builder --chown=appuser:0 /app /app
+
+EXPOSE 8000
+
+HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
+ CMD curl -f http://localhost:8000/health || exit 1
+
+ENTRYPOINT ["uv", "run", "--no-sync", "uvicorn", "docsfy.main:app", "--host", "0.0.0.0", "--port", "8000"]
+```
+
+**Step 9: Create `docker-compose.yaml`**
+
+```yaml
+services:
+ docsfy:
+ build: .
+ ports:
+ - "8000:8000"
+ env_file: .env
+ volumes:
+ - ./data:/data
+ - ~/.config/gcloud:/home/appuser/.config/gcloud:ro
+ - ./cursor:/home/appuser/.config/cursor
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+```
+
+**Step 10: Commit**
+
+```bash
+git add pyproject.toml src/docsfy/__init__.py .pre-commit-config.yaml .flake8 tox.toml .gitleaks.toml .env.example Dockerfile docker-compose.yaml
+git commit -m "feat: project scaffolding with build config, linters, and container setup"
+```
+
+---
+
+## Task 2: Configuration Module
+
+**Files:**
+- Create: `src/docsfy/config.py`
+- Create: `tests/__init__.py`
+- Create: `tests/test_config.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/__init__.py` (empty file).
+
+Create `tests/test_config.py`:
+
+```python
+from __future__ import annotations
+
+import os
+from unittest.mock import patch
+
+import pytest
+
+
+def test_default_settings() -> None:
+ from docsfy.config import Settings
+
+ with patch.dict(os.environ, {}, clear=True):
+ settings = Settings()
+ assert settings.ai_provider == "claude"
+ assert settings.ai_model == "claude-opus-4-6[1m]"
+ assert settings.ai_cli_timeout == 60
+ assert settings.log_level == "INFO"
+ assert settings.data_dir == "/data"
+
+
+def test_custom_settings() -> None:
+ from docsfy.config import Settings
+
+ env = {
+ "AI_PROVIDER": "gemini",
+ "AI_MODEL": "gemini-2.5-pro",
+ "AI_CLI_TIMEOUT": "120",
+ "LOG_LEVEL": "DEBUG",
+ }
+ with patch.dict(os.environ, env, clear=True):
+ settings = Settings()
+ assert settings.ai_provider == "gemini"
+ assert settings.ai_model == "gemini-2.5-pro"
+ assert settings.ai_cli_timeout == 120
+ assert settings.log_level == "DEBUG"
+
+
+def test_invalid_timeout_rejected() -> None:
+ from docsfy.config import Settings
+
+ with patch.dict(os.environ, {"AI_CLI_TIMEOUT": "0"}, clear=True):
+ with pytest.raises(Exception):
+ Settings()
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_config.py -v`
+Expected: FAIL (module not found)
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/config.py`:
+
+```python
+from __future__ import annotations
+
+from functools import lru_cache
+
+from pydantic import Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+class Settings(BaseSettings):
+ model_config = SettingsConfigDict(
+ env_file=".env",
+ env_file_encoding="utf-8",
+ extra="ignore",
+ )
+
+ ai_provider: str = "claude"
+ ai_model: str = "claude-opus-4-6[1m]"
+ ai_cli_timeout: int = Field(default=60, gt=0)
+ log_level: str = "INFO"
+ data_dir: str = "/data"
+
+
+@lru_cache
+def get_settings() -> Settings:
+ return Settings()
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_config.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/config.py tests/__init__.py tests/test_config.py
+git commit -m "feat: add configuration module with pydantic-settings"
+```
+
+---
+
+## Task 3: Pydantic Models
+
+**Files:**
+- Create: `src/docsfy/models.py`
+- Create: `tests/test_models.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_models.py`:
+
+```python
+from __future__ import annotations
+
+import pytest
+
+
+def test_generate_request_valid_https() -> None:
+ from docsfy.models import GenerateRequest
+
+ req = GenerateRequest(repo_url="https://github.com/org/repo.git")
+ assert req.repo_url == "https://github.com/org/repo.git"
+
+
+def test_generate_request_valid_ssh() -> None:
+ from docsfy.models import GenerateRequest
+
+ req = GenerateRequest(repo_url="git@github.com:org/repo.git")
+ assert req.repo_url == "git@github.com:org/repo.git"
+
+
+def test_generate_request_extracts_project_name() -> None:
+ from docsfy.models import GenerateRequest
+
+ req = GenerateRequest(repo_url="https://github.com/org/my-repo.git")
+ assert req.project_name == "my-repo"
+
+ req2 = GenerateRequest(repo_url="https://github.com/org/my-repo")
+ assert req2.project_name == "my-repo"
+
+
+def test_generate_request_invalid_url() -> None:
+ from docsfy.models import GenerateRequest
+
+ with pytest.raises(Exception):
+ GenerateRequest(repo_url="not-a-url")
+
+
+def test_doc_page_model() -> None:
+ from docsfy.models import DocPage
+
+ page = DocPage(slug="intro", title="Introduction", description="Project overview")
+ assert page.slug == "intro"
+
+
+def test_doc_plan_model() -> None:
+ from docsfy.models import DocPage, DocPlan, NavGroup
+
+ plan = DocPlan(
+ project_name="my-repo",
+ tagline="A cool project",
+ navigation=[
+ NavGroup(
+ group="Getting Started",
+ pages=[DocPage(slug="intro", title="Introduction", description="Overview")],
+ )
+ ],
+ )
+ assert plan.project_name == "my-repo"
+ assert len(plan.navigation) == 1
+ assert len(plan.navigation[0].pages) == 1
+
+
+def test_project_status_model() -> None:
+ from docsfy.models import ProjectStatus
+
+ status = ProjectStatus(
+ name="my-repo",
+ repo_url="https://github.com/org/my-repo.git",
+ status="ready",
+ )
+ assert status.name == "my-repo"
+ assert status.status == "ready"
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_models.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/models.py`:
+
+```python
+from __future__ import annotations
+
+import re
+from typing import Literal
+
+from pydantic import BaseModel, Field, field_validator
+
+
+class GenerateRequest(BaseModel):
+ repo_url: str = Field(description="Git repository URL (HTTPS or SSH)")
+ ai_provider: Literal["claude", "gemini", "cursor"] | None = None
+ ai_model: str | None = None
+ ai_cli_timeout: int | None = Field(default=None, gt=0)
+
+ @field_validator("repo_url")
+ @classmethod
+ def validate_repo_url(cls, v: str) -> str:
+ https_pattern = r"^https?://[\w.\-]+/[\w.\-]+/[\w.\-]+(\.git)?$"
+ ssh_pattern = r"^git@[\w.\-]+:[\w.\-]+/[\w.\-]+(\.git)?$"
+ if not re.match(https_pattern, v) and not re.match(ssh_pattern, v):
+ msg = f"Invalid git repository URL: '{v}'"
+ raise ValueError(msg)
+ return v
+
+ @property
+ def project_name(self) -> str:
+ name = self.repo_url.rstrip("/").split("/")[-1]
+ if name.endswith(".git"):
+ name = name[:-4]
+ return name
+
+
+class DocPage(BaseModel):
+ slug: str
+ title: str
+ description: str = ""
+
+
+class NavGroup(BaseModel):
+ group: str
+ pages: list[DocPage]
+
+
+class DocPlan(BaseModel):
+ project_name: str
+ tagline: str = ""
+ navigation: list[NavGroup] = Field(default_factory=list)
+
+
+class ProjectStatus(BaseModel):
+ name: str
+ repo_url: str
+ status: Literal["generating", "ready", "error"] = "generating"
+ last_commit_sha: str | None = None
+ last_generated: str | None = None
+ error_message: str | None = None
+ page_count: int = 0
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_models.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/models.py tests/test_models.py
+git commit -m "feat: add pydantic models for requests, doc plans, and project status"
+```
+
+---
+
+## Task 4: SQLite Storage Layer
+
+**Files:**
+- Create: `src/docsfy/storage.py`
+- Create: `tests/test_storage.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_storage.py`:
+
+```python
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+
+@pytest.fixture
+async def db_path(tmp_path: Path) -> Path:
+ import docsfy.storage as storage
+
+ db = tmp_path / "test.db"
+ storage.DB_PATH = db
+ storage.DATA_DIR = tmp_path
+ storage.PROJECTS_DIR = tmp_path / "projects"
+ await storage.init_db()
+ return db
+
+
+async def test_init_db_creates_table(db_path: Path) -> None:
+ assert db_path.exists()
+
+
+async def test_save_and_get_project(db_path: Path) -> None:
+ from docsfy.storage import get_project, save_project
+
+ await save_project(
+ name="my-repo",
+ repo_url="https://github.com/org/my-repo.git",
+ status="generating",
+ )
+ project = await get_project("my-repo")
+ assert project is not None
+ assert project["name"] == "my-repo"
+ assert project["repo_url"] == "https://github.com/org/my-repo.git"
+ assert project["status"] == "generating"
+
+
+async def test_update_project_status(db_path: Path) -> None:
+ from docsfy.storage import get_project, save_project, update_project_status
+
+ await save_project(name="my-repo", repo_url="https://github.com/org/my-repo.git", status="generating")
+ await update_project_status("my-repo", status="ready", last_commit_sha="abc123", page_count=5)
+ project = await get_project("my-repo")
+ assert project is not None
+ assert project["status"] == "ready"
+ assert project["last_commit_sha"] == "abc123"
+ assert project["page_count"] == 5
+
+
+async def test_list_projects(db_path: Path) -> None:
+ from docsfy.storage import list_projects, save_project
+
+ await save_project(name="repo-a", repo_url="https://github.com/org/repo-a.git", status="ready")
+ await save_project(name="repo-b", repo_url="https://github.com/org/repo-b.git", status="generating")
+ projects = await list_projects()
+ assert len(projects) == 2
+
+
+async def test_delete_project(db_path: Path) -> None:
+ from docsfy.storage import delete_project, get_project, save_project
+
+ await save_project(name="my-repo", repo_url="https://github.com/org/my-repo.git", status="ready")
+ deleted = await delete_project("my-repo")
+ assert deleted is True
+ project = await get_project("my-repo")
+ assert project is None
+
+
+async def test_delete_nonexistent_project(db_path: Path) -> None:
+ from docsfy.storage import delete_project
+
+ deleted = await delete_project("no-such-repo")
+ assert deleted is False
+
+
+async def test_get_nonexistent_project(db_path: Path) -> None:
+ from docsfy.storage import get_project
+
+ project = await get_project("no-such-repo")
+ assert project is None
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_storage.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/storage.py`:
+
+```python
+from __future__ import annotations
+
+import os
+from pathlib import Path
+
+import aiosqlite
+
+DB_PATH = Path(os.getenv("DATA_DIR", "/data")) / "docsfy.db"
+DATA_DIR = Path(os.getenv("DATA_DIR", "/data"))
+PROJECTS_DIR = DATA_DIR / "projects"
+
+
+async def init_db() -> None:
+ DB_PATH.parent.mkdir(parents=True, exist_ok=True)
+ PROJECTS_DIR.mkdir(parents=True, exist_ok=True)
+
+ async with aiosqlite.connect(DB_PATH) as db:
+ await db.execute("""
+ CREATE TABLE IF NOT EXISTS projects (
+ name TEXT PRIMARY KEY,
+ repo_url TEXT NOT NULL,
+ status TEXT NOT NULL DEFAULT 'generating',
+ last_commit_sha TEXT,
+ last_generated TEXT,
+ page_count INTEGER DEFAULT 0,
+ error_message TEXT,
+ plan_json TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ )
+ """)
+ await db.commit()
+
+
+async def save_project(name: str, repo_url: str, status: str = "generating") -> None:
+ async with aiosqlite.connect(DB_PATH) as db:
+ await db.execute(
+ "INSERT OR REPLACE INTO projects (name, repo_url, status, updated_at) VALUES (?, ?, ?, CURRENT_TIMESTAMP)",
+ (name, repo_url, status),
+ )
+ await db.commit()
+
+
+async def update_project_status(
+ name: str,
+ status: str,
+ last_commit_sha: str | None = None,
+ page_count: int | None = None,
+ error_message: str | None = None,
+ plan_json: str | None = None,
+) -> None:
+ async with aiosqlite.connect(DB_PATH) as db:
+ fields = ["status = ?", "updated_at = CURRENT_TIMESTAMP"]
+ values: list[str | int | None] = [status]
+
+ if last_commit_sha is not None:
+ fields.append("last_commit_sha = ?")
+ values.append(last_commit_sha)
+ if page_count is not None:
+ fields.append("page_count = ?")
+ values.append(page_count)
+ if error_message is not None:
+ fields.append("error_message = ?")
+ values.append(error_message)
+ if plan_json is not None:
+ fields.append("plan_json = ?")
+ values.append(plan_json)
+ if status == "ready":
+ fields.append("last_generated = CURRENT_TIMESTAMP")
+
+ values.append(name)
+ await db.execute(f"UPDATE projects SET {', '.join(fields)} WHERE name = ?", values)
+ await db.commit()
+
+
+async def get_project(name: str) -> dict | None:
+ async with aiosqlite.connect(DB_PATH) as db:
+ db.row_factory = aiosqlite.Row
+ cursor = await db.execute("SELECT * FROM projects WHERE name = ?", (name,))
+ row = await cursor.fetchone()
+ if row:
+ return dict(row)
+ return None
+
+
+async def list_projects() -> list[dict]:
+ async with aiosqlite.connect(DB_PATH) as db:
+ db.row_factory = aiosqlite.Row
+ cursor = await db.execute(
+ "SELECT name, repo_url, status, last_commit_sha, last_generated, page_count FROM projects ORDER BY updated_at DESC"
+ )
+ rows = await cursor.fetchall()
+ return [dict(row) for row in rows]
+
+
+async def delete_project(name: str) -> bool:
+ async with aiosqlite.connect(DB_PATH) as db:
+ cursor = await db.execute("DELETE FROM projects WHERE name = ?", (name,))
+ await db.commit()
+ return cursor.rowcount > 0
+
+
+def get_project_dir(name: str) -> Path:
+ return PROJECTS_DIR / name
+
+
+def get_project_site_dir(name: str) -> Path:
+ return PROJECTS_DIR / name / "site"
+
+
+def get_project_cache_dir(name: str) -> Path:
+ return PROJECTS_DIR / name / "cache" / "pages"
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_storage.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/storage.py tests/test_storage.py
+git commit -m "feat: add SQLite storage layer for project metadata"
+```
+
+---
+
+## Task 5: AI CLI Provider Module
+
+**Files:**
+- Create: `src/docsfy/ai_client.py`
+- Create: `tests/test_ai_client.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_ai_client.py`:
+
+```python
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+
+def test_provider_config_registry() -> None:
+ from docsfy.ai_client import PROVIDER_CONFIG, VALID_AI_PROVIDERS
+
+ assert "claude" in PROVIDER_CONFIG
+ assert "gemini" in PROVIDER_CONFIG
+ assert "cursor" in PROVIDER_CONFIG
+ assert VALID_AI_PROVIDERS == {"claude", "gemini", "cursor"}
+
+
+def test_build_claude_cmd() -> None:
+ from docsfy.ai_client import PROVIDER_CONFIG
+
+ config = PROVIDER_CONFIG["claude"]
+ cmd = config.build_cmd(config.binary, "claude-opus-4-6", None)
+ assert cmd == ["claude", "--model", "claude-opus-4-6", "--dangerously-skip-permissions", "-p"]
+ assert config.uses_own_cwd is False
+
+
+def test_build_gemini_cmd() -> None:
+ from docsfy.ai_client import PROVIDER_CONFIG
+
+ config = PROVIDER_CONFIG["gemini"]
+ cmd = config.build_cmd(config.binary, "gemini-2.5-pro", None)
+ assert cmd == ["gemini", "--model", "gemini-2.5-pro", "--yolo"]
+ assert config.uses_own_cwd is False
+
+
+def test_build_cursor_cmd() -> None:
+ from docsfy.ai_client import PROVIDER_CONFIG
+
+ config = PROVIDER_CONFIG["cursor"]
+ cmd = config.build_cmd(config.binary, "claude-sonnet", Path("/tmp/repo"))
+ assert cmd == ["agent", "--force", "--model", "claude-sonnet", "--print", "--workspace", "/tmp/repo"]
+ assert config.uses_own_cwd is True
+
+
+def test_build_cursor_cmd_no_cwd() -> None:
+ from docsfy.ai_client import PROVIDER_CONFIG
+
+ config = PROVIDER_CONFIG["cursor"]
+ cmd = config.build_cmd(config.binary, "claude-sonnet", None)
+ assert "--workspace" not in cmd
+
+
+async def test_call_ai_cli_unknown_provider() -> None:
+ from docsfy.ai_client import call_ai_cli
+
+ success, msg = await call_ai_cli("hello", ai_provider="unknown", ai_model="test")
+ assert success is False
+ assert "Unknown AI provider" in msg
+
+
+async def test_call_ai_cli_no_model() -> None:
+ from docsfy.ai_client import call_ai_cli
+
+ success, msg = await call_ai_cli("hello", ai_provider="claude", ai_model="")
+ assert success is False
+ assert "No AI model" in msg
+
+
+async def test_call_ai_cli_success() -> None:
+ from docsfy.ai_client import call_ai_cli
+
+ mock_result = MagicMock()
+ mock_result.returncode = 0
+ mock_result.stdout = "AI response here"
+ mock_result.stderr = ""
+
+ with patch("docsfy.ai_client.asyncio.to_thread", return_value=mock_result):
+ success, output = await call_ai_cli("test prompt", ai_provider="claude", ai_model="opus")
+ assert success is True
+ assert output == "AI response here"
+
+
+async def test_call_ai_cli_nonzero_exit() -> None:
+ from docsfy.ai_client import call_ai_cli
+
+ mock_result = MagicMock()
+ mock_result.returncode = 1
+ mock_result.stdout = ""
+ mock_result.stderr = "some error"
+
+ with patch("docsfy.ai_client.asyncio.to_thread", return_value=mock_result):
+ success, output = await call_ai_cli("test", ai_provider="claude", ai_model="opus")
+ assert success is False
+ assert "some error" in output
+
+
+async def test_call_ai_cli_timeout() -> None:
+ import subprocess
+
+ from docsfy.ai_client import call_ai_cli
+
+ with patch("docsfy.ai_client.asyncio.to_thread", side_effect=subprocess.TimeoutExpired("cmd", 60)):
+ success, output = await call_ai_cli("test", ai_provider="claude", ai_model="opus", ai_cli_timeout=1)
+ assert success is False
+ assert "timed out" in output
+
+
+async def test_call_ai_cli_binary_not_found() -> None:
+ from docsfy.ai_client import call_ai_cli
+
+ with patch("docsfy.ai_client.asyncio.to_thread", side_effect=FileNotFoundError()):
+ success, output = await call_ai_cli("test", ai_provider="claude", ai_model="opus")
+ assert success is False
+ assert "not found" in output
+
+
+async def test_check_ai_cli_available_success() -> None:
+ from docsfy.ai_client import check_ai_cli_available
+
+ mock_result = MagicMock()
+ mock_result.returncode = 0
+ mock_result.stdout = "Hello!"
+ mock_result.stderr = ""
+
+ with patch("docsfy.ai_client.asyncio.to_thread", return_value=mock_result):
+ success, msg = await check_ai_cli_available("claude", "opus")
+ assert success is True
+
+
+async def test_check_ai_cli_available_failure() -> None:
+ from docsfy.ai_client import check_ai_cli_available
+
+ mock_result = MagicMock()
+ mock_result.returncode = 1
+ mock_result.stdout = ""
+ mock_result.stderr = "auth error"
+
+ with patch("docsfy.ai_client.asyncio.to_thread", return_value=mock_result):
+ success, msg = await check_ai_cli_available("claude", "opus")
+ assert success is False
+ assert "sanity check failed" in msg
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_ai_client.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/ai_client.py`:
+
+```python
+from __future__ import annotations
+
+import asyncio
+import os
+import subprocess
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Callable
+
+from simple_logger.logger import get_logger
+
+logger = get_logger(name=__name__)
+
+
+@dataclass(frozen=True)
+class ProviderConfig:
+ binary: str
+ build_cmd: Callable[[str, str, Path | None], list[str]]
+ uses_own_cwd: bool = False
+
+
+def _build_claude_cmd(binary: str, model: str, _cwd: Path | None) -> list[str]:
+ return [binary, "--model", model, "--dangerously-skip-permissions", "-p"]
+
+
+def _build_gemini_cmd(binary: str, model: str, _cwd: Path | None) -> list[str]:
+ return [binary, "--model", model, "--yolo"]
+
+
+def _build_cursor_cmd(binary: str, model: str, cwd: Path | None) -> list[str]:
+ cmd = [binary, "--force", "--model", model, "--print"]
+ if cwd:
+ cmd.extend(["--workspace", str(cwd)])
+ return cmd
+
+
+PROVIDER_CONFIG: dict[str, ProviderConfig] = {
+ "claude": ProviderConfig(binary="claude", build_cmd=_build_claude_cmd),
+ "gemini": ProviderConfig(binary="gemini", build_cmd=_build_gemini_cmd),
+ "cursor": ProviderConfig(binary="agent", uses_own_cwd=True, build_cmd=_build_cursor_cmd),
+}
+VALID_AI_PROVIDERS = set(PROVIDER_CONFIG.keys())
+
+
+def _get_ai_cli_timeout() -> int:
+ raw = os.getenv("AI_CLI_TIMEOUT", "60")
+ try:
+ value = int(raw)
+ if value <= 0:
+ raise ValueError
+ return value
+ except (ValueError, TypeError):
+ logger.warning(f"Invalid AI_CLI_TIMEOUT={raw}; defaulting to 60")
+ return 60
+
+
+AI_CLI_TIMEOUT = _get_ai_cli_timeout()
+
+
+async def call_ai_cli(
+ prompt: str,
+ cwd: Path | None = None,
+ ai_provider: str = "",
+ ai_model: str = "",
+ ai_cli_timeout: int | None = None,
+) -> tuple[bool, str]:
+ config = PROVIDER_CONFIG.get(ai_provider)
+ if not config:
+ return (False, f"Unknown AI provider: '{ai_provider}'. Valid: {', '.join(sorted(VALID_AI_PROVIDERS))}")
+
+ if not ai_model:
+ return (False, "No AI model configured. Set AI_MODEL environment variable.")
+
+ provider_info = f"{ai_provider.upper()} ({ai_model})"
+ cmd = config.build_cmd(config.binary, ai_model, cwd)
+ subprocess_cwd = None if config.uses_own_cwd else cwd
+ effective_timeout = ai_cli_timeout or AI_CLI_TIMEOUT
+ timeout = effective_timeout * 60
+
+ logger.info(f"Calling {provider_info} CLI")
+
+ try:
+ result = await asyncio.to_thread(
+ subprocess.run,
+ cmd,
+ cwd=subprocess_cwd,
+ capture_output=True,
+ text=True,
+ timeout=timeout,
+ input=prompt,
+ )
+ except subprocess.TimeoutExpired:
+ return (False, f"{provider_info} CLI error: timed out after {effective_timeout} minutes")
+ except FileNotFoundError:
+ return (False, f"{provider_info} CLI error: '{config.binary}' not found in PATH")
+
+ if result.returncode != 0:
+ error_detail = result.stderr or result.stdout or "unknown error (no output)"
+ return False, f"{provider_info} CLI error: {error_detail}"
+
+ logger.debug(f"{provider_info} CLI response length: {len(result.stdout)} chars")
+ return True, result.stdout
+
+
+async def check_ai_cli_available(ai_provider: str, ai_model: str) -> tuple[bool, str]:
+ config = PROVIDER_CONFIG.get(ai_provider)
+ if not config:
+ return (False, f"Unknown AI provider: '{ai_provider}'")
+ if not ai_model:
+ return (False, "No AI model configured")
+
+ provider_info = f"{ai_provider.upper()} ({ai_model})"
+ sanity_cmd = config.build_cmd(config.binary, ai_model, None)
+
+ try:
+ sanity_result = await asyncio.to_thread(
+ subprocess.run, sanity_cmd, cwd=None, capture_output=True, text=True, timeout=60, input="Hi",
+ )
+ if sanity_result.returncode != 0:
+ error_detail = sanity_result.stderr or sanity_result.stdout or "unknown"
+ return False, f"{provider_info} sanity check failed: {error_detail}"
+ except subprocess.TimeoutExpired:
+ return False, f"{provider_info} sanity check timed out"
+ except FileNotFoundError:
+ return False, f"{provider_info}: '{config.binary}' not found in PATH"
+
+ return True, ""
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_ai_client.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/ai_client.py tests/test_ai_client.py
+git commit -m "feat: add AI CLI provider module with claude, gemini, and cursor support"
+```
+
+---
+
+## Task 6: JSON Response Parser
+
+**Files:**
+- Create: `src/docsfy/json_parser.py`
+- Create: `tests/test_json_parser.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_json_parser.py`:
+
+```python
+from __future__ import annotations
+
+import json
+
+
+def test_parse_direct_json() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ data = {"project_name": "test", "navigation": []}
+ result = parse_json_response(json.dumps(data))
+ assert result == data
+
+
+def test_parse_json_with_surrounding_text() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ raw = 'Here is the plan:\n{"project_name": "test", "navigation": []}\nDone!'
+ result = parse_json_response(raw)
+ assert result is not None
+ assert result["project_name"] == "test"
+
+
+def test_parse_json_from_code_block() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ raw = '```json\n{"project_name": "test", "navigation": []}\n```'
+ result = parse_json_response(raw)
+ assert result is not None
+ assert result["project_name"] == "test"
+
+
+def test_parse_json_nested_braces() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ data = {"project_name": "test", "meta": {"key": "value"}, "navigation": []}
+ raw = f"Some text before {json.dumps(data)} some text after"
+ result = parse_json_response(raw)
+ assert result is not None
+ assert result["meta"]["key"] == "value"
+
+
+def test_parse_json_returns_none_for_garbage() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ result = parse_json_response("this is not json at all")
+ assert result is None
+
+
+def test_parse_json_empty_string() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ result = parse_json_response("")
+ assert result is None
+
+
+def test_parse_json_with_escaped_quotes() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ raw = '{"project_name": "test \\"quoted\\" name", "navigation": []}'
+ result = parse_json_response(raw)
+ assert result is not None
+ assert "quoted" in result["project_name"]
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_json_parser.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/json_parser.py`:
+
+```python
+from __future__ import annotations
+
+import json
+import re
+
+from simple_logger.logger import get_logger
+
+logger = get_logger(name=__name__)
+
+
+def parse_json_response(raw_text: str) -> dict | None:
+ text = raw_text.strip()
+ if not text:
+ return None
+
+ # Strategy 1: Direct parse
+ if text.startswith("{"):
+ try:
+ return json.loads(text)
+ except (json.JSONDecodeError, ValueError):
+ pass
+
+ # Strategy 2: Brace matching
+ result = _extract_json_by_braces(text)
+ if result is not None:
+ return result
+
+ # Strategy 3: Markdown code blocks
+ result = _extract_json_from_code_blocks(text)
+ if result is not None:
+ return result
+
+ logger.warning("Failed to parse AI response as JSON")
+ return None
+
+
+def _extract_json_by_braces(text: str) -> dict | None:
+ first_brace = text.find("{")
+ if first_brace == -1:
+ return None
+
+ depth = 0
+ in_string = False
+ escape_next = False
+ end_pos = -1
+
+ for i in range(first_brace, len(text)):
+ char = text[i]
+ if escape_next:
+ escape_next = False
+ continue
+ if char == "\\":
+ if in_string:
+ escape_next = True
+ continue
+ if char == '"':
+ in_string = not in_string
+ continue
+ if in_string:
+ continue
+ if char == "{":
+ depth += 1
+ elif char == "}":
+ depth -= 1
+ if depth == 0:
+ end_pos = i
+ break
+
+ if end_pos == -1:
+ return None
+
+ json_str = text[first_brace : end_pos + 1]
+ try:
+ return json.loads(json_str)
+ except (json.JSONDecodeError, ValueError):
+ return None
+
+
+def _extract_json_from_code_blocks(text: str) -> dict | None:
+ blocks = re.findall(r"```(?:json)?\s*\n?(.*?)```", text, re.DOTALL)
+ for block_content in blocks:
+ block_content = block_content.strip()
+ if not block_content or "{" not in block_content:
+ continue
+ try:
+ return json.loads(block_content)
+ except (json.JSONDecodeError, ValueError):
+ pass
+ result = _extract_json_by_braces(block_content)
+ if result is not None:
+ return result
+ return None
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_json_parser.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/json_parser.py tests/test_json_parser.py
+git commit -m "feat: add multi-strategy JSON response parser for AI CLI output"
+```
+
+---
+
+## Task 7: Repository Cloning
+
+**Files:**
+- Create: `src/docsfy/repository.py`
+- Create: `tests/test_repository.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_repository.py`:
+
+```python
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import MagicMock, patch
+
+
+def test_extract_repo_name_https() -> None:
+ from docsfy.repository import extract_repo_name
+
+ assert extract_repo_name("https://github.com/org/my-repo.git") == "my-repo"
+ assert extract_repo_name("https://github.com/org/my-repo") == "my-repo"
+
+
+def test_extract_repo_name_ssh() -> None:
+ from docsfy.repository import extract_repo_name
+
+ assert extract_repo_name("git@github.com:org/my-repo.git") == "my-repo"
+
+
+def test_clone_repo_success(tmp_path: Path) -> None:
+ from docsfy.repository import clone_repo
+
+ with patch("docsfy.repository.subprocess.run") as mock_run:
+ mock_run.side_effect = [
+ MagicMock(returncode=0, stdout="", stderr=""),
+ MagicMock(returncode=0, stdout="abc123def\n", stderr=""),
+ ]
+ repo_path, sha = clone_repo("https://github.com/org/repo.git", tmp_path)
+
+ assert repo_path == tmp_path / "repo"
+ assert sha == "abc123def" # pragma: allowlist secret
+
+
+def test_clone_repo_failure(tmp_path: Path) -> None:
+ import pytest
+
+ from docsfy.repository import clone_repo
+
+ with patch("docsfy.repository.subprocess.run") as mock_run:
+ mock_run.return_value = MagicMock(returncode=128, stdout="", stderr="fatal: repo not found")
+ with pytest.raises(RuntimeError, match="Clone failed"):
+ clone_repo("https://github.com/org/bad-repo.git", tmp_path)
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_repository.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/repository.py`:
+
+```python
+from __future__ import annotations
+
+import subprocess
+from pathlib import Path
+
+from simple_logger.logger import get_logger
+
+logger = get_logger(name=__name__)
+
+
+def extract_repo_name(repo_url: str) -> str:
+ name = repo_url.rstrip("/").split("/")[-1]
+ if name.endswith(".git"):
+ name = name[:-4]
+ if ":" in name:
+ name = name.split(":")[-1].split("/")[-1]
+ return name
+
+
+def clone_repo(repo_url: str, base_dir: Path) -> tuple[Path, str]:
+ repo_name = extract_repo_name(repo_url)
+ repo_path = base_dir / repo_name
+
+ logger.info(f"Cloning {repo_url} to {repo_path}")
+
+ result = subprocess.run(
+ ["git", "clone", "--depth", "1", repo_url, str(repo_path)],
+ capture_output=True,
+ text=True,
+ )
+ if result.returncode != 0:
+ msg = f"Clone failed: {result.stderr or result.stdout}"
+ raise RuntimeError(msg)
+
+ sha_result = subprocess.run(
+ ["git", "rev-parse", "HEAD"],
+ cwd=repo_path,
+ capture_output=True,
+ text=True,
+ )
+ commit_sha = sha_result.stdout.strip()
+
+ logger.info(f"Cloned {repo_name} at commit {commit_sha[:8]}")
+ return repo_path, commit_sha
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_repository.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/repository.py tests/test_repository.py
+git commit -m "feat: add repository cloning with shallow clone support"
+```
+
+---
+
+## Task 8: AI Prompt Templates
+
+**Files:**
+- Create: `src/docsfy/prompts.py`
+- Create: `tests/test_prompts.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_prompts.py`:
+
+```python
+from __future__ import annotations
+
+
+def test_build_planner_prompt() -> None:
+ from docsfy.prompts import build_planner_prompt
+
+ prompt = build_planner_prompt("my-repo")
+ assert "my-repo" in prompt
+ assert "JSON" in prompt
+ assert "project_name" in prompt
+ assert "navigation" in prompt
+
+
+def test_build_page_prompt() -> None:
+ from docsfy.prompts import build_page_prompt
+
+ prompt = build_page_prompt(
+ project_name="my-repo",
+ page_title="Installation",
+ page_description="How to install the project",
+ )
+ assert "my-repo" in prompt
+ assert "Installation" in prompt
+ assert "markdown" in prompt.lower()
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_prompts.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/prompts.py`:
+
+```python
+from __future__ import annotations
+
+PLAN_SCHEMA = """{
+ "project_name": "string - project name",
+ "tagline": "string - one-line project description",
+ "navigation": [
+ {
+ "group": "string - section group name",
+ "pages": [
+ {
+ "slug": "string - URL-friendly page identifier",
+ "title": "string - human-readable page title",
+ "description": "string - brief description of what this page covers"
+ }
+ ]
+ }
+ ]
+}"""
+
+
+def build_planner_prompt(project_name: str) -> str:
+ return f"""You are a technical documentation planner. Explore this repository thoroughly.
+Read the README, source code, configuration files, tests, and any existing documentation.
+Understand what this project does, how it works, and who uses it.
+
+Then create a documentation plan as a JSON object. The plan should cover:
+- Introduction and overview
+- Installation / getting started
+- Configuration (if applicable)
+- Usage guides for key features
+- API reference (if the project has an API)
+- Any other sections that would help users understand and use this project
+
+Project name: {project_name}
+
+CRITICAL: Your response must be ONLY a valid JSON object. No text before or after. No markdown code blocks.
+
+Output format:
+{PLAN_SCHEMA}"""
+
+
+def build_page_prompt(project_name: str, page_title: str, page_description: str) -> str:
+ return f"""You are a technical documentation writer. Explore this repository to write
+the "{page_title}" page for the {project_name} documentation.
+
+Page description: {page_description}
+
+Explore the codebase as needed. Read source files, configs, tests, and existing docs
+to write comprehensive, accurate documentation.
+
+Write in markdown format. Include:
+- Clear explanations
+- Code examples from the actual codebase (not made up)
+- Configuration snippets where relevant
+
+Use these callout formats for special content:
+- Notes: > **Note:** text
+- Warnings: > **Warning:** text
+- Tips: > **Tip:** text
+
+Output ONLY the markdown content for this page. No wrapping, no explanation."""
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_prompts.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/prompts.py tests/test_prompts.py
+git commit -m "feat: add AI prompt templates for planner and page generation"
+```
+
+---
+
+## Task 9: Documentation Generator (Orchestrator)
+
+**Files:**
+- Create: `src/docsfy/generator.py`
+- Create: `tests/test_generator.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_generator.py`:
+
+```python
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+
+
+@pytest.fixture
+def sample_plan() -> dict:
+ return {
+ "project_name": "test-repo",
+ "tagline": "A test project",
+ "navigation": [
+ {
+ "group": "Getting Started",
+ "pages": [
+ {"slug": "introduction", "title": "Introduction", "description": "Overview"},
+ {"slug": "quickstart", "title": "Quick Start", "description": "Get started fast"},
+ ],
+ }
+ ],
+ }
+
+
+async def test_run_planner(tmp_path: Path, sample_plan: dict) -> None:
+ from docsfy.generator import run_planner
+
+ with patch("docsfy.generator.call_ai_cli", return_value=(True, json.dumps(sample_plan))):
+ plan = await run_planner(repo_path=tmp_path, project_name="test-repo", ai_provider="claude", ai_model="opus")
+
+ assert plan is not None
+ assert plan["project_name"] == "test-repo"
+ assert len(plan["navigation"]) == 1
+
+
+async def test_run_planner_ai_failure(tmp_path: Path) -> None:
+ from docsfy.generator import run_planner
+
+ with patch("docsfy.generator.call_ai_cli", return_value=(False, "AI error")):
+ with pytest.raises(RuntimeError, match="AI error"):
+ await run_planner(repo_path=tmp_path, project_name="test-repo", ai_provider="claude", ai_model="opus")
+
+
+async def test_run_planner_bad_json(tmp_path: Path) -> None:
+ from docsfy.generator import run_planner
+
+ with patch("docsfy.generator.call_ai_cli", return_value=(True, "not json")):
+ with pytest.raises(RuntimeError, match="Failed to parse"):
+ await run_planner(repo_path=tmp_path, project_name="test-repo", ai_provider="claude", ai_model="opus")
+
+
+async def test_generate_page(tmp_path: Path) -> None:
+ from docsfy.generator import generate_page
+
+ cache_dir = tmp_path / "cache"
+ cache_dir.mkdir()
+
+ with patch("docsfy.generator.call_ai_cli", return_value=(True, "# Introduction\n\nWelcome!")):
+ md = await generate_page(
+ repo_path=tmp_path, slug="introduction", title="Introduction", description="Overview",
+ cache_dir=cache_dir, ai_provider="claude", ai_model="opus",
+ )
+
+ assert "# Introduction" in md
+ assert (cache_dir / "introduction.md").exists()
+
+
+async def test_generate_page_uses_cache(tmp_path: Path) -> None:
+ from docsfy.generator import generate_page
+
+ cache_dir = tmp_path / "cache"
+ cache_dir.mkdir()
+ cached = cache_dir / "introduction.md"
+ cached.write_text("# Cached content")
+
+ md = await generate_page(
+ repo_path=tmp_path, slug="introduction", title="Introduction", description="Overview",
+ cache_dir=cache_dir, ai_provider="claude", ai_model="opus", use_cache=True,
+ )
+
+ assert md == "# Cached content"
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_generator.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/generator.py`:
+
+```python
+from __future__ import annotations
+
+import asyncio
+from pathlib import Path
+
+from simple_logger.logger import get_logger
+
+from docsfy.ai_client import call_ai_cli
+from docsfy.json_parser import parse_json_response
+from docsfy.prompts import build_page_prompt, build_planner_prompt
+
+logger = get_logger(name=__name__)
+
+MAX_CONCURRENT_PAGES = 5
+
+
+async def run_planner(
+ repo_path: Path, project_name: str, ai_provider: str, ai_model: str, ai_cli_timeout: int | None = None,
+) -> dict:
+ prompt = build_planner_prompt(project_name)
+ success, output = await call_ai_cli(
+ prompt=prompt, cwd=repo_path, ai_provider=ai_provider, ai_model=ai_model, ai_cli_timeout=ai_cli_timeout,
+ )
+ if not success:
+ msg = f"Planner failed: {output}"
+ raise RuntimeError(msg)
+
+ plan = parse_json_response(output)
+ if plan is None:
+ msg = "Failed to parse planner output as JSON"
+ raise RuntimeError(msg)
+
+ logger.info(f"Plan generated: {len(plan.get('navigation', []))} groups")
+ return plan
+
+
+async def generate_page(
+ repo_path: Path, slug: str, title: str, description: str, cache_dir: Path,
+ ai_provider: str, ai_model: str, ai_cli_timeout: int | None = None, use_cache: bool = False,
+) -> str:
+ cache_file = cache_dir / f"{slug}.md"
+ if use_cache and cache_file.exists():
+ logger.debug(f"Using cached page: {slug}")
+ return cache_file.read_text()
+
+ prompt = build_page_prompt(project_name=repo_path.name, page_title=title, page_description=description)
+ success, output = await call_ai_cli(
+ prompt=prompt, cwd=repo_path, ai_provider=ai_provider, ai_model=ai_model, ai_cli_timeout=ai_cli_timeout,
+ )
+ if not success:
+ logger.warning(f"Failed to generate page '{slug}': {output}")
+ output = f"# {title}\n\n*Documentation generation failed. Please re-run.*"
+
+ cache_dir.mkdir(parents=True, exist_ok=True)
+ cache_file.write_text(output)
+ logger.info(f"Generated page: {slug} ({len(output)} chars)")
+ return output
+
+
+async def generate_all_pages(
+ repo_path: Path, plan: dict, cache_dir: Path, ai_provider: str, ai_model: str,
+ ai_cli_timeout: int | None = None, use_cache: bool = False,
+) -> dict[str, str]:
+ semaphore = asyncio.Semaphore(MAX_CONCURRENT_PAGES)
+ pages: dict[str, str] = {}
+
+ async def _gen(slug: str, title: str, description: str) -> tuple[str, str]:
+ async with semaphore:
+ md = await generate_page(
+ repo_path=repo_path, slug=slug, title=title, description=description,
+ cache_dir=cache_dir, ai_provider=ai_provider, ai_model=ai_model,
+ ai_cli_timeout=ai_cli_timeout, use_cache=use_cache,
+ )
+ return slug, md
+
+ tasks = []
+ for group in plan.get("navigation", []):
+ for page in group.get("pages", []):
+ tasks.append(_gen(page["slug"], page["title"], page.get("description", "")))
+
+ results = await asyncio.gather(*tasks)
+ for slug, md in results:
+ pages[slug] = md
+
+ logger.info(f"Generated {len(pages)} pages total")
+ return pages
+```
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_generator.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/generator.py tests/test_generator.py
+git commit -m "feat: add documentation generator with planner and concurrent page generation"
+```
+
+---
+
+## Task 10: HTML Renderer
+
+**Files:**
+- Create: `src/docsfy/renderer.py`
+- Create: `src/docsfy/templates/page.html`
+- Create: `src/docsfy/templates/index.html`
+- Create: `src/docsfy/static/style.css`
+- Create: `src/docsfy/static/theme.js`
+- Create: `src/docsfy/static/search.js`
+- Create: `tests/test_renderer.py`
+
+This is the largest task. The HTML template and CSS create the production-quality polish.
+
+**Step 1: Write the failing test**
+
+Create `tests/test_renderer.py`:
+
+```python
+from __future__ import annotations
+
+from pathlib import Path
+
+
+def test_render_page_to_html() -> None:
+ from docsfy.renderer import render_page
+
+ html = render_page(
+ markdown_content="# Hello\n\nThis is a test.",
+ page_title="Hello", project_name="test-repo", tagline="A test project",
+ navigation=[{"group": "Docs", "pages": [{"slug": "hello", "title": "Hello"}]}],
+ current_slug="hello",
+ )
+ assert " None:
+ from docsfy.renderer import render_site
+
+ plan = {
+ "project_name": "test-repo", "tagline": "A test project",
+ "navigation": [
+ {"group": "Getting Started", "pages": [
+ {"slug": "introduction", "title": "Introduction", "description": "Overview"},
+ ]},
+ ],
+ }
+ pages = {"introduction": "# Introduction\n\nWelcome to test-repo."}
+ output_dir = tmp_path / "site"
+
+ render_site(plan=plan, pages=pages, output_dir=output_dir)
+
+ assert (output_dir / "index.html").exists()
+ assert (output_dir / "introduction.html").exists()
+ assert (output_dir / "assets" / "style.css").exists()
+ index_html = (output_dir / "index.html").read_text()
+ assert "test-repo" in index_html
+ page_html = (output_dir / "introduction.html").read_text()
+ assert "Welcome to test-repo" in page_html
+
+
+def test_search_index_generated(tmp_path: Path) -> None:
+ from docsfy.renderer import render_site
+
+ plan = {
+ "project_name": "test-repo", "tagline": "Test",
+ "navigation": [{"group": "Docs", "pages": [{"slug": "intro", "title": "Intro", "description": ""}]}],
+ }
+ pages = {"intro": "# Intro\n\nSome searchable content here."}
+ output_dir = tmp_path / "site"
+
+ render_site(plan=plan, pages=pages, output_dir=output_dir)
+ assert (output_dir / "search-index.json").exists()
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_renderer.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create the renderer module, Jinja2 HTML templates (page.html, index.html), and static assets (style.css, theme.js, search.js). The templates should include:
+- Sidebar navigation with group headers and page links
+- Dark/light theme toggle button
+- Search input with client-side filtering
+- Responsive layout (mobile-friendly)
+- Code syntax highlighting via Pygments CSS classes
+- Callout box styling for Note/Warning/Tip blockquotes
+- Card grid on index page for navigation groups
+- Active page highlighting in sidebar
+
+The `render_site()` function converts markdown to HTML via the `markdown` library with extensions (fenced_code, codehilite, tables, toc), renders each page into the template, copies static assets, and builds a search index JSON.
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_renderer.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/renderer.py src/docsfy/templates/ src/docsfy/static/ tests/test_renderer.py
+git commit -m "feat: add HTML renderer with Jinja2 templates, dark/light theme, and search"
+```
+
+---
+
+## Task 11: FastAPI Application
+
+**Files:**
+- Create: `src/docsfy/main.py`
+- Create: `tests/test_main.py`
+
+**Step 1: Write the failing test**
+
+Create `tests/test_main.py`:
+
+```python
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+from httpx import ASGITransport, AsyncClient
+
+
+@pytest.fixture
+async def client(tmp_path: Path):
+ import docsfy.storage as storage
+
+ storage.DB_PATH = tmp_path / "test.db"
+ storage.DATA_DIR = tmp_path
+ storage.PROJECTS_DIR = tmp_path / "projects"
+
+ from docsfy.main import app
+
+ await storage.init_db()
+ transport = ASGITransport(app=app)
+ async with AsyncClient(transport=transport, base_url="http://test") as ac:
+ yield ac
+
+
+async def test_health_endpoint(client: AsyncClient) -> None:
+ response = await client.get("/health")
+ assert response.status_code == 200
+ assert response.json()["status"] == "ok"
+
+
+async def test_status_endpoint_empty(client: AsyncClient) -> None:
+ response = await client.get("/api/status")
+ assert response.status_code == 200
+ assert response.json()["projects"] == []
+
+
+async def test_generate_endpoint_invalid_url(client: AsyncClient) -> None:
+ response = await client.post("/api/generate", json={"repo_url": "not-a-url"})
+ assert response.status_code == 422
+
+
+async def test_generate_endpoint_starts_generation(client: AsyncClient) -> None:
+ with patch("docsfy.main.asyncio.create_task"):
+ response = await client.post("/api/generate", json={"repo_url": "https://github.com/org/repo.git"})
+ assert response.status_code == 202
+ body = response.json()
+ assert body["project"] == "repo"
+ assert body["status"] == "generating"
+
+
+async def test_get_project_not_found(client: AsyncClient) -> None:
+ response = await client.get("/api/projects/nonexistent")
+ assert response.status_code == 404
+
+
+async def test_delete_project_not_found(client: AsyncClient) -> None:
+ response = await client.delete("/api/projects/nonexistent")
+ assert response.status_code == 404
+```
+
+**Step 2: Run test to verify it fails**
+
+Run: `uv run --extra dev pytest tests/test_main.py -v`
+Expected: FAIL
+
+**Step 3: Write implementation**
+
+Create `src/docsfy/main.py` with:
+- FastAPI app with lifespan (calls `init_db()` on startup)
+- `GET /health` - returns `{"status": "ok"}`
+- `GET /api/status` - lists all projects from DB
+- `POST /api/generate` (202) - validates `GenerateRequest`, saves project to DB, starts `_run_generation()` as background task via `asyncio.create_task()`
+- `GET /api/projects/{name}` - returns project details or 404
+- `DELETE /api/projects/{name}` - deletes project from DB and filesystem or 404
+- `GET /api/projects/{name}/download` - creates tar.gz of site dir and streams it
+- `GET /docs/{project}/{path:path}` - serves static files from project's site dir with path traversal protection
+- `run()` function for CLI entry point using `uvicorn.run()`
+
+The `_run_generation()` background task:
+1. Checks AI CLI availability
+2. Clones repo to tempdir
+3. Checks if already up-to-date (same commit SHA)
+4. Runs AI planner
+5. Generates all pages
+6. Renders HTML site
+7. Saves plan.json to project dir
+8. Updates DB status to "ready" (or "error" on exception)
+
+**Step 4: Run test to verify it passes**
+
+Run: `uv run --extra dev pytest tests/test_main.py -v`
+Expected: PASS
+
+**Step 5: Commit**
+
+```bash
+git add src/docsfy/main.py tests/test_main.py
+git commit -m "feat: add FastAPI application with all API endpoints"
+```
+
+---
+
+## Task 12: Integration Test
+
+**Files:**
+- Create: `tests/test_integration.py`
+
+**Step 1: Write the integration test**
+
+Create `tests/test_integration.py` that tests the full flow with mocked AI:
+1. Call `_run_generation()` with mocked `check_ai_cli_available`, `clone_repo`, `run_planner`, `generate_all_pages`
+2. Verify project status is "ready" via `GET /api/status`
+3. Verify project details via `GET /api/projects/{name}`
+4. Verify docs are served via `GET /docs/{project}/index.html`
+5. Verify download works via `GET /api/projects/{name}/download`
+6. Verify delete works via `DELETE /api/projects/{name}`
+
+**Step 2: Run all tests**
+
+Run: `uv run --extra dev pytest tests/ -v`
+Expected: ALL PASS
+
+**Step 3: Commit**
+
+```bash
+git add tests/test_integration.py
+git commit -m "test: add integration test for full generate-serve-download flow"
+```
+
+---
+
+## Task 13: Final Verification
+
+**Step 1: Run full test suite**
+
+```bash
+uv run --extra dev pytest tests/ -v --tb=short
+```
+Expected: ALL PASS
+
+**Step 2: Run pre-commit hooks**
+
+```bash
+pre-commit install
+pre-commit run --all-files
+```
+Expected: ALL PASS (fix any issues that arise)
+
+**Step 3: Verify project structure**
+
+```bash
+find src/docsfy -type f | sort
+```
+
+Expected:
+```
+src/docsfy/__init__.py
+src/docsfy/ai_client.py
+src/docsfy/config.py
+src/docsfy/generator.py
+src/docsfy/json_parser.py
+src/docsfy/main.py
+src/docsfy/models.py
+src/docsfy/prompts.py
+src/docsfy/renderer.py
+src/docsfy/repository.py
+src/docsfy/static/search.js
+src/docsfy/static/style.css
+src/docsfy/static/theme.js
+src/docsfy/storage.py
+src/docsfy/templates/index.html
+src/docsfy/templates/page.html
+```
+
+**Step 4: Final commit**
+
+```bash
+git add -A
+git commit -m "chore: final cleanup and verification"
+```
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..19be0e2
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,51 @@
+[project]
+name = "docsfy"
+version = "0.1.0"
+description = "AI-powered documentation generator - generates polished static HTML docs from GitHub repos"
+requires-python = ">=3.12"
+dependencies = [
+ "ai-cli-runner",
+ "fastapi",
+ "uvicorn",
+ "pydantic-settings",
+ "python-simple-logger",
+ "aiosqlite",
+ "jinja2",
+ "markdown",
+ "pygments",
+]
+
+[project.urls]
+Documentation = "https://myk-org.github.io/docsfy/"
+Repository = "https://github.com/myk-org/docsfy"
+
+[project.scripts]
+docsfy = "docsfy.main:run"
+
+[project.optional-dependencies]
+dev = ["pytest", "pytest-asyncio", "pytest-xdist", "httpx"]
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.pytest.ini_options]
+asyncio_mode = "auto"
+testpaths = ["tests"]
+pythonpath = ["src"]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/docsfy"]
+
+[tool.mypy]
+check_untyped_defs = true
+disallow_any_generics = true
+disallow_incomplete_defs = true
+disallow_untyped_defs = true
+no_implicit_optional = true
+show_error_codes = true
+warn_unused_ignores = true
+strict_equality = true
+extra_checks = true
+warn_unused_configs = true
+warn_redundant_casts = true
diff --git a/src/docsfy/__init__.py b/src/docsfy/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/docsfy/ai_client.py b/src/docsfy/ai_client.py
new file mode 100644
index 0000000..5884698
--- /dev/null
+++ b/src/docsfy/ai_client.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from ai_cli_runner import (
+ PROVIDERS,
+ VALID_AI_PROVIDERS,
+ ProviderConfig,
+ call_ai_cli,
+ check_ai_cli_available,
+ get_ai_cli_timeout,
+ run_parallel_with_limit,
+)
+
+__all__ = [
+ "PROVIDERS",
+ "VALID_AI_PROVIDERS",
+ "ProviderConfig",
+ "call_ai_cli",
+ "check_ai_cli_available",
+ "get_ai_cli_timeout",
+ "run_parallel_with_limit",
+]
diff --git a/src/docsfy/config.py b/src/docsfy/config.py
new file mode 100644
index 0000000..f64c01d
--- /dev/null
+++ b/src/docsfy/config.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from functools import lru_cache
+
+from pydantic import Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+class Settings(BaseSettings):
+ model_config = SettingsConfigDict(
+ env_file=".env",
+ env_file_encoding="utf-8",
+ extra="ignore",
+ )
+
+ ai_provider: str = "claude"
+ ai_model: str = "claude-opus-4-6[1m]" # [1m] = 1 million token context window
+ ai_cli_timeout: int = Field(default=60, gt=0)
+ log_level: str = "INFO"
+ data_dir: str = "/data"
+
+
+@lru_cache
+def get_settings() -> Settings:
+ return Settings()
diff --git a/src/docsfy/generator.py b/src/docsfy/generator.py
new file mode 100644
index 0000000..86e83a0
--- /dev/null
+++ b/src/docsfy/generator.py
@@ -0,0 +1,180 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import Any
+
+from simple_logger.logger import get_logger
+
+from docsfy.ai_client import call_ai_cli, run_parallel_with_limit
+from docsfy.json_parser import parse_json_response
+from docsfy.prompts import build_page_prompt, build_planner_prompt
+
+logger = get_logger(name=__name__)
+
+MAX_CONCURRENT_PAGES = 5
+
+
+def _strip_ai_preamble(text: str) -> str:
+ """Strip AI thinking/planning text that appears before actual content."""
+ lines = text.split("\n")
+ for i, line in enumerate(lines):
+ if i > 10:
+ break
+ if line.startswith("#"):
+ return "\n".join(lines[i:])
+ return text
+
+
+async def run_planner(
+ repo_path: Path,
+ project_name: str,
+ ai_provider: str,
+ ai_model: str,
+ ai_cli_timeout: int | None = None,
+) -> dict[str, Any]:
+ logger.info(f"[{project_name}] Calling AI planner")
+ prompt = build_planner_prompt(project_name)
+ success, output = await call_ai_cli(
+ prompt=prompt,
+ cwd=repo_path,
+ ai_provider=ai_provider,
+ ai_model=ai_model,
+ ai_cli_timeout=ai_cli_timeout,
+ )
+ if not success:
+ msg = f"Planner failed: {output}"
+ raise RuntimeError(msg)
+
+ plan = parse_json_response(output)
+ if plan is None:
+ msg = "Failed to parse planner output as JSON"
+ raise RuntimeError(msg)
+
+ logger.info(
+ f"[{project_name}] Plan generated: {len(plan.get('navigation', []))} groups"
+ )
+ return plan
+
+
+async def generate_page(
+ repo_path: Path,
+ slug: str,
+ title: str,
+ description: str,
+ cache_dir: Path,
+ ai_provider: str,
+ ai_model: str,
+ ai_cli_timeout: int | None = None,
+ use_cache: bool = False,
+ project_name: str = "",
+) -> str:
+ _label = project_name or repo_path.name
+
+ # Validate slug to prevent path traversal
+ if "/" in slug or "\\" in slug or slug.startswith(".") or ".." in slug:
+ msg = f"Invalid page slug: '{slug}'"
+ raise ValueError(msg)
+
+ cache_file = cache_dir / f"{slug}.md"
+ if use_cache and cache_file.exists():
+ logger.debug(f"[{_label}] Using cached page: {slug}")
+ return cache_file.read_text(encoding="utf-8")
+
+ prompt = build_page_prompt(
+ project_name=repo_path.name, page_title=title, page_description=description
+ )
+ success, output = await call_ai_cli(
+ prompt=prompt,
+ cwd=repo_path,
+ ai_provider=ai_provider,
+ ai_model=ai_model,
+ ai_cli_timeout=ai_cli_timeout,
+ )
+ if not success:
+ logger.warning(f"[{_label}] Failed to generate page '{slug}': {output}")
+ output = f"# {title}\n\n*Documentation generation failed. Please re-run.*"
+
+ output = _strip_ai_preamble(output)
+ cache_dir.mkdir(parents=True, exist_ok=True)
+ cache_file.write_text(output, encoding="utf-8")
+ logger.info(f"[{_label}] Generated page: {slug} ({len(output)} chars)")
+
+ # Update page count in DB if project_name provided
+ if project_name:
+ from docsfy.storage import update_project_status
+
+ # Count cached pages to get current total
+ existing_pages = len(list(cache_dir.glob("*.md")))
+ await update_project_status(
+ project_name, status="generating", page_count=existing_pages
+ )
+
+ return output
+
+
+async def generate_all_pages(
+ repo_path: Path,
+ plan: dict[str, Any],
+ cache_dir: Path,
+ ai_provider: str,
+ ai_model: str,
+ ai_cli_timeout: int | None = None,
+ use_cache: bool = False,
+ project_name: str = "",
+) -> dict[str, str]:
+ _label = project_name or repo_path.name
+
+ all_pages: list[dict[str, str]] = []
+ for group in plan.get("navigation", []):
+ for page in group.get("pages", []):
+ slug = page.get("slug", "")
+ title = page.get("title", slug)
+ if not slug:
+ logger.warning(
+ f"[{_label}] Skipping page with no slug in group '{group.get('group', 'unknown')}'"
+ )
+ continue
+ if "/" in slug or "\\" in slug or slug.startswith(".") or ".." in slug:
+ logger.warning(f"[{_label}] Skipping path-unsafe slug: '{slug}'")
+ continue
+ all_pages.append(
+ {
+ "slug": slug,
+ "title": title,
+ "description": page.get("description", ""),
+ }
+ )
+
+ coroutines = [
+ generate_page(
+ repo_path=repo_path,
+ slug=p["slug"],
+ title=p["title"],
+ description=p["description"],
+ cache_dir=cache_dir,
+ ai_provider=ai_provider,
+ ai_model=ai_model,
+ ai_cli_timeout=ai_cli_timeout,
+ use_cache=use_cache,
+ project_name=project_name,
+ )
+ for p in all_pages
+ ]
+
+ results = await run_parallel_with_limit(
+ coroutines, max_concurrency=MAX_CONCURRENT_PAGES
+ )
+ pages: dict[str, str] = {}
+ for page_info, result in zip(all_pages, results):
+ if isinstance(result, Exception):
+ logger.warning(
+ f"[{_label}] Page generation failed for '{page_info['slug']}': {result}"
+ )
+ pages[page_info["slug"]] = (
+ f"# {page_info['title']}\n\n*Documentation generation failed.*"
+ )
+ else:
+ pages[page_info["slug"]] = result
+
+ logger.info(f"[{_label}] Generated {len(pages)} pages total")
+ return pages
diff --git a/src/docsfy/json_parser.py b/src/docsfy/json_parser.py
new file mode 100644
index 0000000..c475dab
--- /dev/null
+++ b/src/docsfy/json_parser.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+import json
+import re
+from typing import Any
+
+from simple_logger.logger import get_logger
+
+logger = get_logger(name=__name__)
+
+
+def parse_json_response(raw_text: str) -> dict[str, Any] | None:
+ text = raw_text.strip()
+ if not text:
+ return None
+ if text.startswith("{"):
+ try:
+ return json.loads(text)
+ except (json.JSONDecodeError, ValueError):
+ pass
+ result = _extract_json_by_braces(text)
+ if result is not None:
+ return result
+ result = _extract_json_from_code_blocks(text)
+ if result is not None:
+ return result
+ logger.warning("Failed to parse AI response as JSON")
+ return None
+
+
+def _extract_json_by_braces(text: str) -> dict[str, Any] | None:
+ first_brace = text.find("{")
+ if first_brace == -1:
+ return None
+ depth = 0
+ in_string = False
+ escape_next = False
+ end_pos = -1
+ for i in range(first_brace, len(text)):
+ char = text[i]
+ if escape_next:
+ escape_next = False
+ continue
+ if char == "\\":
+ if in_string:
+ escape_next = True
+ continue
+ if char == '"':
+ in_string = not in_string
+ continue
+ if in_string:
+ continue
+ if char == "{":
+ depth += 1
+ elif char == "}":
+ depth -= 1
+ if depth == 0:
+ end_pos = i
+ break
+ if end_pos == -1:
+ return None
+ json_str = text[first_brace : end_pos + 1]
+ try:
+ return json.loads(json_str)
+ except (json.JSONDecodeError, ValueError):
+ return None
+
+
+def _extract_json_from_code_blocks(text: str) -> dict[str, Any] | None:
+ blocks = re.findall(r"```(?:json)?\s*\n?(.*?)```", text, re.DOTALL)
+ for block_content in blocks:
+ block_content = block_content.strip()
+ if not block_content or "{" not in block_content:
+ continue
+ try:
+ return json.loads(block_content)
+ except (json.JSONDecodeError, ValueError):
+ pass
+ result = _extract_json_by_braces(block_content)
+ if result is not None:
+ return result
+ return None
diff --git a/src/docsfy/main.py b/src/docsfy/main.py
new file mode 100644
index 0000000..6ba961d
--- /dev/null
+++ b/src/docsfy/main.py
@@ -0,0 +1,331 @@
+from __future__ import annotations
+
+import asyncio
+import json
+import os
+import re as _re
+import shutil
+import tarfile
+import tempfile
+from collections.abc import AsyncIterator
+from contextlib import asynccontextmanager
+from pathlib import Path
+from typing import Any
+
+from fastapi import FastAPI, HTTPException
+from fastapi.responses import FileResponse, StreamingResponse
+from simple_logger.logger import get_logger
+
+from docsfy.ai_client import check_ai_cli_available
+from docsfy.config import get_settings
+from docsfy.generator import generate_all_pages, run_planner
+from docsfy.models import GenerateRequest
+from docsfy.repository import clone_repo, get_local_repo_info
+from docsfy.renderer import render_site
+from docsfy.storage import (
+ delete_project,
+ get_project,
+ get_project_cache_dir,
+ get_project_dir,
+ get_project_site_dir,
+ init_db,
+ list_projects,
+ save_project,
+ update_project_status,
+)
+
+logger = get_logger(name=__name__)
+
+_generating: set[str] = set()
+
+
+def _validate_project_name(name: str) -> str:
+ """Validate project name to prevent path traversal."""
+ if not _re.match(r"^[a-zA-Z0-9][a-zA-Z0-9._-]*$", name):
+ raise HTTPException(status_code=400, detail=f"Invalid project name: '{name}'")
+ return name
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI) -> AsyncIterator[None]:
+ await init_db()
+ yield
+
+
+app = FastAPI(
+ title="docsfy",
+ description="AI-powered documentation generator",
+ version="0.1.0",
+ lifespan=lifespan,
+)
+
+
+@app.get("/health")
+async def health() -> dict[str, str]:
+ return {"status": "ok"}
+
+
+@app.get("/api/status")
+async def status() -> dict[str, Any]:
+ projects = await list_projects()
+ return {"projects": projects}
+
+
+@app.post("/api/generate", status_code=202)
+async def generate(request: GenerateRequest) -> dict[str, str]:
+ settings = get_settings()
+ ai_provider = request.ai_provider or settings.ai_provider
+ ai_model = request.ai_model or settings.ai_model
+ project_name = request.project_name
+
+ if project_name in _generating:
+ raise HTTPException(
+ status_code=409,
+ detail=f"Project '{project_name}' is already being generated",
+ )
+
+ _generating.add(project_name)
+
+ await save_project(
+ name=project_name,
+ repo_url=request.repo_url or request.repo_path or "",
+ status="generating",
+ )
+
+ try:
+ asyncio.create_task(
+ _run_generation(
+ repo_url=request.repo_url,
+ repo_path=request.repo_path,
+ project_name=project_name,
+ ai_provider=ai_provider,
+ ai_model=ai_model,
+ ai_cli_timeout=request.ai_cli_timeout or settings.ai_cli_timeout,
+ force=request.force,
+ )
+ )
+ except Exception:
+ _generating.discard(project_name)
+ raise
+
+ return {"project": project_name, "status": "generating"}
+
+
+async def _run_generation(
+ repo_url: str | None,
+ repo_path: str | None,
+ project_name: str,
+ ai_provider: str,
+ ai_model: str,
+ ai_cli_timeout: int,
+ force: bool = False,
+) -> None:
+ try:
+ available, msg = await check_ai_cli_available(ai_provider, ai_model)
+ if not available:
+ await update_project_status(project_name, status="error", error_message=msg)
+ return
+
+ if repo_path:
+ # Local repository - use directly, no cloning needed
+ local_path, commit_sha = get_local_repo_info(Path(repo_path))
+ await _generate_from_path(
+ local_path,
+ commit_sha,
+ repo_url or repo_path,
+ project_name,
+ ai_provider,
+ ai_model,
+ ai_cli_timeout,
+ force,
+ )
+ else:
+ # Remote repository - clone to temp dir
+ if repo_url is None:
+ msg = "repo_url must be provided for remote repositories"
+ raise ValueError(msg)
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ repo_dir, commit_sha = await asyncio.to_thread(
+ clone_repo, repo_url, Path(tmp_dir)
+ )
+ await _generate_from_path(
+ repo_dir,
+ commit_sha,
+ repo_url or "",
+ project_name,
+ ai_provider,
+ ai_model,
+ ai_cli_timeout,
+ force,
+ )
+
+ except asyncio.CancelledError:
+ logger.warning(f"Generation cancelled for {project_name}")
+ await update_project_status(
+ project_name, status="error", error_message="Generation was cancelled"
+ )
+ raise
+ except Exception as exc:
+ logger.error(f"Generation failed for {project_name}: {exc}")
+ await update_project_status(
+ project_name, status="error", error_message=str(exc)
+ )
+ finally:
+ _generating.discard(project_name)
+
+
+async def _generate_from_path(
+ repo_dir: Path,
+ commit_sha: str,
+ source_url: str,
+ project_name: str,
+ ai_provider: str,
+ ai_model: str,
+ ai_cli_timeout: int,
+ force: bool,
+) -> None:
+ if force:
+ cache_dir = get_project_cache_dir(project_name)
+ if cache_dir.exists():
+ shutil.rmtree(cache_dir)
+ logger.info(f"[{project_name}] Cleared cache (force=True)")
+ # Reset page count so API shows 0 during regeneration
+ await update_project_status(project_name, status="generating", page_count=0)
+ else:
+ existing = await get_project(project_name)
+ if (
+ existing
+ and existing.get("last_commit_sha") == commit_sha
+ and existing.get("status") == "ready"
+ ):
+ logger.info(f"[{project_name}] Project is up to date at {commit_sha[:8]}")
+ await update_project_status(project_name, status="ready")
+ return
+
+ plan = await run_planner(
+ repo_path=repo_dir,
+ project_name=project_name,
+ ai_provider=ai_provider,
+ ai_model=ai_model,
+ ai_cli_timeout=ai_cli_timeout,
+ )
+
+ plan["repo_url"] = source_url
+
+ # Store plan so API consumers can see doc structure while pages generate
+ await update_project_status(
+ project_name,
+ status="generating",
+ plan_json=json.dumps(plan),
+ )
+
+ cache_dir = get_project_cache_dir(project_name)
+ pages = await generate_all_pages(
+ repo_path=repo_dir,
+ plan=plan,
+ cache_dir=cache_dir,
+ ai_provider=ai_provider,
+ ai_model=ai_model,
+ ai_cli_timeout=ai_cli_timeout,
+ use_cache=not force,
+ project_name=project_name,
+ )
+
+ site_dir = get_project_site_dir(project_name)
+ render_site(plan=plan, pages=pages, output_dir=site_dir)
+
+ project_dir = get_project_dir(project_name)
+ (project_dir / "plan.json").write_text(json.dumps(plan, indent=2), encoding="utf-8")
+
+ page_count = len(pages)
+ await update_project_status(
+ project_name,
+ status="ready",
+ last_commit_sha=commit_sha,
+ page_count=page_count,
+ plan_json=json.dumps(plan),
+ )
+ logger.info(f"[{project_name}] Documentation ready ({page_count} pages)")
+
+
+@app.get("/api/projects/{name}")
+async def get_project_details(name: str) -> dict[str, str | int | None]:
+ name = _validate_project_name(name)
+ project = await get_project(name)
+ if not project:
+ raise HTTPException(status_code=404, detail=f"Project '{name}' not found")
+ return project
+
+
+@app.delete("/api/projects/{name}")
+async def delete_project_endpoint(name: str) -> dict[str, str]:
+ name = _validate_project_name(name)
+ deleted = await delete_project(name)
+ if not deleted:
+ raise HTTPException(status_code=404, detail=f"Project '{name}' not found")
+ project_dir = get_project_dir(name)
+ if project_dir.exists():
+ shutil.rmtree(project_dir)
+ return {"deleted": name}
+
+
+@app.get("/api/projects/{name}/download")
+async def download_project(name: str) -> StreamingResponse:
+ name = _validate_project_name(name)
+ project = await get_project(name)
+ if not project:
+ raise HTTPException(status_code=404, detail=f"Project '{name}' not found")
+ if project["status"] != "ready":
+ raise HTTPException(
+ status_code=400,
+ detail=f"Project '{name}' is not ready (status: {project['status']})",
+ )
+ site_dir = get_project_site_dir(name)
+ if not site_dir.exists():
+ raise HTTPException(
+ status_code=404, detail=f"Site directory not found for '{name}'"
+ )
+ tmp = tempfile.NamedTemporaryFile(suffix=".tar.gz", delete=False)
+ tar_path = Path(tmp.name)
+ tmp.close()
+ with tarfile.open(tar_path, mode="w:gz") as tar:
+ tar.add(str(site_dir), arcname=name)
+
+ async def _stream_and_cleanup() -> AsyncIterator[bytes]:
+ try:
+ with open(tar_path, "rb") as f:
+ while chunk := f.read(8192):
+ yield chunk
+ finally:
+ tar_path.unlink(missing_ok=True)
+
+ return StreamingResponse(
+ _stream_and_cleanup(),
+ media_type="application/gzip",
+ headers={"Content-Disposition": f"attachment; filename={name}-docs.tar.gz"},
+ )
+
+
+@app.get("/docs/{project}/{path:path}")
+async def serve_docs(project: str, path: str = "index.html") -> FileResponse:
+ project = _validate_project_name(project)
+ if not path or path == "/":
+ path = "index.html"
+ site_dir = get_project_site_dir(project)
+ file_path = site_dir / path
+ try:
+ file_path.resolve().relative_to(site_dir.resolve())
+ except ValueError:
+ raise HTTPException(status_code=403, detail="Access denied")
+ if not file_path.exists() or not file_path.is_file():
+ raise HTTPException(status_code=404, detail="File not found")
+ return FileResponse(file_path)
+
+
+def run() -> None:
+ import uvicorn
+
+ reload = os.getenv("DEBUG", "").lower() == "true"
+ host = os.getenv("HOST", "0.0.0.0")
+ port = int(os.getenv("PORT", "8000"))
+ uvicorn.run("docsfy.main:app", host=host, port=port, reload=reload)
diff --git a/src/docsfy/models.py b/src/docsfy/models.py
new file mode 100644
index 0000000..23fe26b
--- /dev/null
+++ b/src/docsfy/models.py
@@ -0,0 +1,94 @@
+from __future__ import annotations
+
+import re
+from pathlib import Path
+from typing import Literal
+
+from pydantic import BaseModel, Field, field_validator, model_validator
+
+
+class GenerateRequest(BaseModel):
+ repo_url: str | None = Field(
+ default=None, description="Git repository URL (HTTPS or SSH)"
+ )
+ repo_path: str | None = Field(default=None, description="Local git repository path")
+ ai_provider: Literal["claude", "gemini", "cursor"] | None = None
+ ai_model: str | None = None
+ ai_cli_timeout: int | None = Field(default=None, gt=0)
+ force: bool = Field(
+ default=False, description="Force full regeneration, ignoring cache"
+ )
+
+ @model_validator(mode="after")
+ def validate_source(self) -> GenerateRequest:
+ if not self.repo_url and not self.repo_path:
+ msg = "Either 'repo_url' or 'repo_path' must be provided"
+ raise ValueError(msg)
+ if self.repo_url and self.repo_path:
+ msg = "Provide either 'repo_url' or 'repo_path', not both"
+ raise ValueError(msg)
+ return self
+
+ @field_validator("repo_url")
+ @classmethod
+ def validate_repo_url(cls, v: str | None) -> str | None:
+ if v is None:
+ return v
+ https_pattern = r"^https?://[\w.\-]+/[\w.\-]+/[\w.\-]+(\.git)?$"
+ ssh_pattern = r"^git@[\w.\-]+:[\w.\-]+/[\w.\-]+(\.git)?$"
+ if not re.match(https_pattern, v) and not re.match(ssh_pattern, v):
+ msg = f"Invalid git repository URL: '{v}'"
+ raise ValueError(msg)
+ return v
+
+ @field_validator("repo_path")
+ @classmethod
+ def validate_repo_path(cls, v: str | None) -> str | None:
+ if v is None:
+ return v
+ path = Path(v)
+ if not path.exists():
+ msg = f"Repository path does not exist: '{v}'"
+ raise ValueError(msg)
+ if not (path / ".git").exists():
+ msg = f"Not a git repository (no .git directory): '{v}'"
+ raise ValueError(msg)
+ return v
+
+ @property
+ def project_name(self) -> str:
+ if self.repo_url:
+ name = self.repo_url.rstrip("/").split("/")[-1]
+ if name.endswith(".git"):
+ name = name[:-4]
+ return name
+ if self.repo_path:
+ return Path(self.repo_path).resolve().name
+ return "unknown"
+
+
+class DocPage(BaseModel):
+ slug: str
+ title: str
+ description: str = ""
+
+
+class NavGroup(BaseModel):
+ group: str
+ pages: list[DocPage]
+
+
+class DocPlan(BaseModel):
+ project_name: str
+ tagline: str = ""
+ navigation: list[NavGroup] = Field(default_factory=list)
+
+
+class ProjectStatus(BaseModel):
+ name: str
+ repo_url: str
+ status: Literal["generating", "ready", "error"] = "generating"
+ last_commit_sha: str | None = None
+ last_generated: str | None = None
+ error_message: str | None = None
+ page_count: int = 0
diff --git a/src/docsfy/prompts.py b/src/docsfy/prompts.py
new file mode 100644
index 0000000..60612c0
--- /dev/null
+++ b/src/docsfy/prompts.py
@@ -0,0 +1,61 @@
+from __future__ import annotations
+
+PLAN_SCHEMA = """{
+ "project_name": "string - project name",
+ "tagline": "string - one-line project description",
+ "navigation": [
+ {
+ "group": "string - section group name",
+ "pages": [
+ {
+ "slug": "string - URL-friendly page identifier",
+ "title": "string - human-readable page title",
+ "description": "string - brief description of what this page covers"
+ }
+ ]
+ }
+ ]
+}"""
+
+
+def build_planner_prompt(project_name: str) -> str:
+ return f"""You are a technical documentation planner. Explore this repository thoroughly.
+Explore the source code, configuration files, tests, CI/CD pipelines, and project structure.
+Do NOT rely on the README — understand the project from its code and configuration.
+
+Then create a documentation plan as a JSON object. The plan should cover:
+- Introduction and overview
+- Installation / getting started
+- Configuration (if applicable)
+- Usage guides for key features
+- API reference (if the project has an API)
+- Any other sections that would help users understand and use this project
+
+Project name: {project_name}
+
+CRITICAL: Your response must be ONLY a valid JSON object. No text before or after. No markdown code blocks.
+
+Output format:
+{PLAN_SCHEMA}"""
+
+
+def build_page_prompt(project_name: str, page_title: str, page_description: str) -> str:
+ return f"""You are a technical documentation writer. Explore this repository to write
+the "{page_title}" page for the {project_name} documentation.
+
+Page description: {page_description}
+
+Explore the codebase as needed. Read source files, configs, tests, and CI/CD pipelines
+to write comprehensive, accurate documentation. Do NOT rely on the README.
+
+Write in markdown format. Include:
+- Clear explanations
+- Code examples from the actual codebase (not made up)
+- Configuration snippets where relevant
+
+Use these callout formats for special content:
+- Notes: > **Note:** text
+- Warnings: > **Warning:** text
+- Tips: > **Tip:** text
+
+Output ONLY the markdown content for this page. No wrapping, no explanation."""
diff --git a/src/docsfy/renderer.py b/src/docsfy/renderer.py
new file mode 100644
index 0000000..4b7cb34
--- /dev/null
+++ b/src/docsfy/renderer.py
@@ -0,0 +1,229 @@
+from __future__ import annotations
+
+import json
+import shutil
+from pathlib import Path
+from typing import Any
+
+import markdown
+from jinja2 import Environment, FileSystemLoader, select_autoescape
+from simple_logger.logger import get_logger
+
+logger = get_logger(name=__name__)
+
+TEMPLATES_DIR = Path(__file__).parent / "templates"
+STATIC_DIR = Path(__file__).parent / "static"
+
+
+_jinja_env: Environment | None = None
+
+
+def _get_jinja_env() -> Environment:
+ global _jinja_env
+ if _jinja_env is None:
+ _jinja_env = Environment(
+ loader=FileSystemLoader(str(TEMPLATES_DIR)),
+ autoescape=select_autoescape(["html"]),
+ )
+ return _jinja_env
+
+
+def _md_to_html(md_text: str) -> tuple[str, str]:
+ """Convert markdown to HTML. Returns (content_html, toc_html)."""
+ md = markdown.Markdown(
+ extensions=["fenced_code", "codehilite", "tables", "toc"],
+ extension_configs={
+ "codehilite": {"css_class": "highlight", "guess_lang": False},
+ "toc": {"toc_depth": "2-3"},
+ },
+ )
+ content_html = md.convert(md_text)
+ toc_html = getattr(md, "toc", "")
+ return content_html, toc_html
+
+
+def render_page(
+ markdown_content: str,
+ page_title: str,
+ project_name: str,
+ tagline: str,
+ navigation: list[dict[str, Any]],
+ current_slug: str,
+ prev_page: dict[str, str] | None = None,
+ next_page: dict[str, str] | None = None,
+ repo_url: str = "",
+) -> str:
+ env = _get_jinja_env()
+ template = env.get_template("page.html")
+ content_html, toc_html = _md_to_html(markdown_content)
+ return template.render(
+ title=page_title,
+ project_name=project_name,
+ tagline=tagline,
+ content=content_html,
+ toc=toc_html,
+ navigation=navigation,
+ current_slug=current_slug,
+ prev_page=prev_page,
+ next_page=next_page,
+ repo_url=repo_url,
+ )
+
+
+def render_index(
+ project_name: str,
+ tagline: str,
+ navigation: list[dict[str, Any]],
+ repo_url: str = "",
+) -> str:
+ env = _get_jinja_env()
+ template = env.get_template("index.html")
+ return template.render(
+ title=project_name,
+ project_name=project_name,
+ tagline=tagline,
+ navigation=navigation,
+ repo_url=repo_url,
+ )
+
+
+def _build_search_index(
+ pages: dict[str, str], plan: dict[str, Any]
+) -> list[dict[str, str]]:
+ index: list[dict[str, str]] = []
+ title_map: dict[str, str] = {}
+ for group in plan.get("navigation", []):
+ for page in group.get("pages", []):
+ title_map[page.get("slug", "")] = page.get("title", "")
+ for slug, content in pages.items():
+ index.append(
+ {
+ "slug": slug,
+ "title": title_map.get(slug, slug),
+ "content": content[:2000],
+ }
+ )
+ return index
+
+
+def _build_llms_txt(plan: dict[str, Any]) -> str:
+ """Build llms.txt index file."""
+ project_name = plan.get("project_name", "Documentation")
+ tagline = plan.get("tagline", "")
+ lines = [f"# {project_name}", ""]
+ if tagline:
+ lines.extend([f"> {tagline}", ""])
+ for group in plan.get("navigation", []):
+ lines.extend([f"## {group.get('group', '')}", ""])
+ for page in group.get("pages", []):
+ desc = page.get("description", "")
+ page_title = page.get("title", "")
+ page_slug = page.get("slug", "")
+ if desc:
+ lines.append(f"- [{page_title}]({page_slug}.md): {desc}")
+ else:
+ lines.append(f"- [{page_title}]({page_slug}.md)")
+ lines.append("")
+ return "\n".join(lines)
+
+
+def _build_llms_full_txt(plan: dict[str, Any], pages: dict[str, str]) -> str:
+ """Build llms-full.txt with all content concatenated."""
+ project_name = plan.get("project_name", "Documentation")
+ tagline = plan.get("tagline", "")
+ lines = [f"# {project_name}", ""]
+ if tagline:
+ lines.extend([f"> {tagline}", ""])
+ lines.extend(["---", ""])
+ for group in plan.get("navigation", []):
+ for page in group.get("pages", []):
+ slug = page.get("slug", "")
+ content = pages.get(slug, "")
+ lines.extend(
+ [
+ f"Source: {slug}.md",
+ "",
+ content,
+ "",
+ "---",
+ "",
+ ]
+ )
+ return "\n".join(lines)
+
+
+def render_site(plan: dict[str, Any], pages: dict[str, str], output_dir: Path) -> None:
+ if output_dir.exists():
+ shutil.rmtree(output_dir)
+ output_dir.mkdir(parents=True, exist_ok=True)
+ assets_dir = output_dir / "assets"
+ assets_dir.mkdir(exist_ok=True)
+
+ project_name: str = plan.get("project_name", "Documentation")
+ tagline: str = plan.get("tagline", "")
+ navigation: list[dict[str, Any]] = plan.get("navigation", [])
+ repo_url: str = plan.get("repo_url", "")
+
+ if STATIC_DIR.exists():
+ for static_file in STATIC_DIR.iterdir():
+ if static_file.is_file():
+ shutil.copy2(static_file, assets_dir / static_file.name)
+
+ # Filter out invalid slugs
+ valid_pages: dict[str, str] = {}
+ for slug, content in pages.items():
+ if "/" in slug or "\\" in slug or slug.startswith(".") or ".." in slug:
+ logger.warning(f"Skipping invalid slug: {slug}")
+ else:
+ valid_pages[slug] = content
+
+ index_html = render_index(project_name, tagline, navigation, repo_url=repo_url)
+ (output_dir / "index.html").write_text(index_html, encoding="utf-8")
+
+ # Build ordered list of valid slugs for prev/next navigation
+ valid_slug_order: list[dict[str, str]] = []
+ for group in navigation:
+ for page in group.get("pages", []):
+ slug = page.get("slug", "")
+ if slug in valid_pages:
+ valid_slug_order.append(
+ {"slug": slug, "title": page.get("title", slug)}
+ )
+
+ for idx, slug_info in enumerate(valid_slug_order):
+ slug = slug_info["slug"]
+ md_content = valid_pages[slug]
+ title = slug_info["title"]
+
+ prev_page = valid_slug_order[idx - 1] if idx > 0 else None
+ next_page = (
+ valid_slug_order[idx + 1] if idx < len(valid_slug_order) - 1 else None
+ )
+
+ page_html = render_page(
+ markdown_content=md_content,
+ page_title=title,
+ project_name=project_name,
+ tagline=tagline,
+ navigation=navigation,
+ current_slug=slug,
+ prev_page=prev_page,
+ next_page=next_page,
+ repo_url=repo_url,
+ )
+ (output_dir / f"{slug}.html").write_text(page_html, encoding="utf-8")
+ (output_dir / f"{slug}.md").write_text(md_content, encoding="utf-8")
+
+ search_index = _build_search_index(valid_pages, plan)
+ (output_dir / "search-index.json").write_text(
+ json.dumps(search_index), encoding="utf-8"
+ )
+
+ # Generate llms.txt files
+ llms_txt = _build_llms_txt(plan)
+ (output_dir / "llms.txt").write_text(llms_txt, encoding="utf-8")
+
+ llms_full_txt = _build_llms_full_txt(plan, valid_pages)
+ (output_dir / "llms-full.txt").write_text(llms_full_txt, encoding="utf-8")
+
+ logger.info(f"Rendered site: {len(valid_pages)} pages to {output_dir}")
diff --git a/src/docsfy/repository.py b/src/docsfy/repository.py
new file mode 100644
index 0000000..16a7d4b
--- /dev/null
+++ b/src/docsfy/repository.py
@@ -0,0 +1,60 @@
+from __future__ import annotations
+
+import subprocess
+from pathlib import Path
+
+from simple_logger.logger import get_logger
+
+logger = get_logger(name=__name__)
+
+
+def extract_repo_name(repo_url: str) -> str:
+ name = repo_url.rstrip("/").split("/")[-1]
+ if name.endswith(".git"):
+ name = name[:-4]
+ if ":" in name:
+ name = name.split(":")[-1].split("/")[-1]
+ return name
+
+
+def clone_repo(repo_url: str, base_dir: Path) -> tuple[Path, str]:
+ repo_name = extract_repo_name(repo_url)
+ repo_path = base_dir / repo_name
+ logger.info(f"Cloning {repo_name} to {repo_path}")
+ result = subprocess.run(
+ ["git", "clone", "--depth", "1", "--", repo_url, str(repo_path)],
+ capture_output=True,
+ text=True,
+ timeout=300,
+ )
+ if result.returncode != 0:
+ msg = f"Clone failed: {result.stderr or result.stdout}"
+ raise RuntimeError(msg)
+ sha_result = subprocess.run(
+ ["git", "rev-parse", "HEAD"],
+ cwd=repo_path,
+ capture_output=True,
+ text=True,
+ )
+ if sha_result.returncode != 0:
+ msg = f"Failed to get commit SHA: {sha_result.stderr or sha_result.stdout}"
+ raise RuntimeError(msg)
+ commit_sha = sha_result.stdout.strip()
+ logger.info(f"Cloned {repo_name} at commit {commit_sha[:8]}")
+ return repo_path, commit_sha
+
+
+def get_local_repo_info(repo_path: Path) -> tuple[Path, str]:
+ """Get commit SHA from a local git repository."""
+ sha_result = subprocess.run(
+ ["git", "rev-parse", "HEAD"],
+ cwd=repo_path,
+ capture_output=True,
+ text=True,
+ )
+ if sha_result.returncode != 0:
+ msg = f"Failed to get commit SHA: {sha_result.stderr or sha_result.stdout}"
+ raise RuntimeError(msg)
+ commit_sha = sha_result.stdout.strip()
+ logger.info(f"Local repo {repo_path.name} at commit {commit_sha[:8]}")
+ return repo_path, commit_sha
diff --git a/src/docsfy/static/callouts.js b/src/docsfy/static/callouts.js
new file mode 100644
index 0000000..af82afc
--- /dev/null
+++ b/src/docsfy/static/callouts.js
@@ -0,0 +1,26 @@
+(function() {
+ var blockquotes = document.querySelectorAll('blockquote');
+ blockquotes.forEach(function(bq) {
+ var firstStrong = bq.querySelector('strong');
+ if (!firstStrong) return;
+
+ var text = firstStrong.textContent.toLowerCase().replace(':', '').trim();
+ var type = null;
+
+ if (text === 'note' || text === 'info') {
+ type = 'note';
+ } else if (text === 'warning' || text === 'caution') {
+ type = 'warning';
+ } else if (text === 'tip' || text === 'hint') {
+ type = 'tip';
+ } else if (text === 'danger' || text === 'error') {
+ type = 'danger';
+ } else if (text === 'important') {
+ type = 'important';
+ }
+
+ if (type) {
+ bq.classList.add('callout', 'callout-' + type);
+ }
+ });
+})();
diff --git a/src/docsfy/static/codelabels.js b/src/docsfy/static/codelabels.js
new file mode 100644
index 0000000..34467b3
--- /dev/null
+++ b/src/docsfy/static/codelabels.js
@@ -0,0 +1,75 @@
+(function() {
+ var blocks = document.querySelectorAll('pre code');
+ blocks.forEach(function(code) {
+ var classes = code.className || '';
+ var match = classes.match(/language-(\w+)/);
+ if (!match) return;
+
+ var lang = match[1];
+ var labelMap = {
+ 'python': 'Python',
+ 'py': 'Python',
+ 'javascript': 'JavaScript',
+ 'js': 'JavaScript',
+ 'typescript': 'TypeScript',
+ 'ts': 'TypeScript',
+ 'bash': 'Bash',
+ 'sh': 'Shell',
+ 'shell': 'Shell',
+ 'json': 'JSON',
+ 'yaml': 'YAML',
+ 'yml': 'YAML',
+ 'html': 'HTML',
+ 'css': 'CSS',
+ 'go': 'Go',
+ 'rust': 'Rust',
+ 'java': 'Java',
+ 'ruby': 'Ruby',
+ 'rb': 'Ruby',
+ 'sql': 'SQL',
+ 'dockerfile': 'Dockerfile',
+ 'docker': 'Dockerfile',
+ 'toml': 'TOML',
+ 'xml': 'XML',
+ 'c': 'C',
+ 'cpp': 'C++',
+ 'csharp': 'C#',
+ 'cs': 'C#',
+ 'php': 'PHP',
+ 'swift': 'Swift',
+ 'kotlin': 'Kotlin',
+ 'scala': 'Scala',
+ 'r': 'R',
+ 'lua': 'Lua',
+ 'perl': 'Perl',
+ 'makefile': 'Makefile',
+ 'graphql': 'GraphQL',
+ 'markdown': 'Markdown',
+ 'md': 'Markdown',
+ 'plaintext': 'Text',
+ 'text': 'Text',
+ 'ini': 'INI',
+ 'env': '.env',
+ };
+
+ var displayName = labelMap[lang.toLowerCase()] || lang;
+
+ var pre = code.parentElement;
+ if (!pre || pre.tagName !== 'PRE') return;
+
+ var label = document.createElement('span');
+ label.className = 'code-label';
+ label.textContent = displayName;
+
+ var wrapper = pre.closest('.code-block-wrapper');
+ if (wrapper) {
+ wrapper.insertBefore(label, wrapper.firstChild);
+ } else {
+ pre.style.position = 'relative';
+ label.style.position = 'absolute';
+ label.style.top = '0.5rem';
+ label.style.right = '0.5rem';
+ pre.insertBefore(label, pre.firstChild);
+ }
+ });
+})();
diff --git a/src/docsfy/static/copy.js b/src/docsfy/static/copy.js
new file mode 100644
index 0000000..c7e2a7a
--- /dev/null
+++ b/src/docsfy/static/copy.js
@@ -0,0 +1,41 @@
+(function() {
+ document.querySelectorAll('pre').forEach(function(pre) {
+ var btn = document.createElement('button');
+ btn.className = 'copy-btn';
+ btn.textContent = 'Copy';
+ btn.addEventListener('click', function() {
+ var code = pre.querySelector('code');
+ var text = code ? code.textContent : pre.textContent;
+ if (navigator.clipboard && navigator.clipboard.writeText) {
+ navigator.clipboard.writeText(text).then(function() {
+ btn.textContent = 'Copied!';
+ setTimeout(function() { btn.textContent = 'Copy'; }, 2000);
+ }).catch(function() {
+ fallbackCopy(text, btn);
+ });
+ } else {
+ fallbackCopy(text, btn);
+ }
+ });
+ pre.style.position = 'relative';
+ pre.appendChild(btn);
+ });
+
+ function fallbackCopy(text, btn) {
+ var textarea = document.createElement('textarea');
+ textarea.value = text;
+ textarea.style.position = 'fixed';
+ textarea.style.opacity = '0';
+ document.body.appendChild(textarea);
+ textarea.select();
+ try {
+ document.execCommand('copy');
+ btn.textContent = 'Copied!';
+ setTimeout(function() { btn.textContent = 'Copy'; }, 2000);
+ } catch (e) {
+ btn.textContent = 'Failed';
+ setTimeout(function() { btn.textContent = 'Copy'; }, 2000);
+ }
+ document.body.removeChild(textarea);
+ }
+})();
diff --git a/src/docsfy/static/github.js b/src/docsfy/static/github.js
new file mode 100644
index 0000000..9e0a739
--- /dev/null
+++ b/src/docsfy/static/github.js
@@ -0,0 +1,38 @@
+(function() {
+ var link = document.getElementById('github-link');
+ if (!link) return;
+
+ var repoUrl = link.getAttribute('data-repo-url');
+ if (!repoUrl) return;
+
+ // Extract owner/repo from GitHub URL
+ var match = repoUrl.match(/github\.com[/:]([^/]+)\/([^/.]+)/);
+ if (!match) return;
+
+ var owner = match[1];
+ var repo = match[2];
+
+ var starsEl = document.getElementById('github-stars');
+ if (!starsEl) return;
+
+ fetch('https://api.github.com/repos/' + owner + '/' + repo)
+ .then(function(response) {
+ if (!response.ok) return null;
+ return response.json();
+ })
+ .then(function(data) {
+ if (!data || typeof data.stargazers_count === 'undefined') return;
+ var count = data.stargazers_count;
+ var display;
+ if (count >= 1000) {
+ display = (count / 1000).toFixed(1).replace(/\.0$/, '') + 'k';
+ } else {
+ display = count.toString();
+ }
+ starsEl.textContent = display;
+ starsEl.title = count.toLocaleString() + ' stars';
+ })
+ .catch(function() {
+ // Silently fail - star count is a nice-to-have
+ });
+})();
diff --git a/src/docsfy/static/scrollspy.js b/src/docsfy/static/scrollspy.js
new file mode 100644
index 0000000..6ab0887
--- /dev/null
+++ b/src/docsfy/static/scrollspy.js
@@ -0,0 +1,49 @@
+(function() {
+ var tocLinks = document.querySelectorAll('.toc-container a');
+ if (tocLinks.length === 0) return;
+
+ var headings = [];
+ tocLinks.forEach(function(link) {
+ var href = link.getAttribute('href');
+ if (href && href.startsWith('#')) {
+ var target = document.getElementById(href.substring(1));
+ if (target) {
+ headings.push({ element: target, link: link });
+ }
+ }
+ });
+
+ if (headings.length === 0) return;
+
+ function updateActive() {
+ var scrollPos = window.scrollY + 100;
+ var current = null;
+
+ for (var i = 0; i < headings.length; i++) {
+ if (headings[i].element.offsetTop <= scrollPos) {
+ current = headings[i];
+ }
+ }
+
+ tocLinks.forEach(function(link) {
+ link.classList.remove('active');
+ });
+
+ if (current) {
+ current.link.classList.add('active');
+ }
+ }
+
+ var ticking = false;
+ window.addEventListener('scroll', function() {
+ if (!ticking) {
+ window.requestAnimationFrame(function() {
+ updateActive();
+ ticking = false;
+ });
+ ticking = true;
+ }
+ });
+
+ updateActive();
+})();
diff --git a/src/docsfy/static/search.js b/src/docsfy/static/search.js
new file mode 100644
index 0000000..a8fc6de
--- /dev/null
+++ b/src/docsfy/static/search.js
@@ -0,0 +1,125 @@
+(function() {
+ // Create modal HTML
+ var overlay = document.createElement('div');
+ overlay.className = 'search-modal-overlay';
+ overlay.innerHTML = '
' +
+ '
' +
+ '
' +
+ '' +
+ '
';
+ document.body.appendChild(overlay);
+
+ var input = overlay.querySelector('.search-modal-input');
+ var results = overlay.querySelector('.search-modal-results');
+ var index = [];
+ var selectedIdx = -1;
+
+ // Load index
+ fetch('search-index.json').then(function(r) { return r.json(); })
+ .then(function(data) { index = data; }).catch(function() {});
+
+ // Open/close
+ function openModal() {
+ overlay.classList.add('active');
+ input.value = '';
+ results.innerHTML = '';
+ selectedIdx = -1;
+ setTimeout(function() { input.focus(); }, 50);
+ }
+
+ function closeModal() {
+ overlay.classList.remove('active');
+ }
+
+ // Keyboard shortcut
+ document.addEventListener('keydown', function(e) {
+ if ((e.metaKey || e.ctrlKey) && e.key === 'k') {
+ e.preventDefault();
+ openModal();
+ }
+ if (e.key === 'Escape') closeModal();
+ });
+
+ // Click overlay to close
+ overlay.addEventListener('click', function(e) {
+ if (e.target === overlay) closeModal();
+ });
+
+ // Search trigger button in sidebar
+ var sidebarSearch = document.getElementById('search-input');
+ if (sidebarSearch) {
+ sidebarSearch.addEventListener('focus', function(e) {
+ e.preventDefault();
+ this.blur();
+ openModal();
+ });
+ }
+
+ // Search trigger button in top bar
+ var topBarSearch = document.getElementById('search-trigger');
+ if (topBarSearch) {
+ topBarSearch.addEventListener('click', function(e) {
+ e.preventDefault();
+ openModal();
+ });
+ }
+
+ // Search logic
+ input.addEventListener('input', function() {
+ var q = this.value.toLowerCase().trim();
+ results.innerHTML = '';
+ selectedIdx = -1;
+ if (!q) return;
+
+ var matches = index.filter(function(item) {
+ return item.title.toLowerCase().includes(q) || item.content.toLowerCase().includes(q);
+ }).slice(0, 10);
+
+ matches.forEach(function(m, i) {
+ var div = document.createElement('a');
+ div.href = m.slug + '.html';
+ div.className = 'search-result-item';
+
+ var title = document.createElement('span');
+ title.className = 'search-result-title';
+ title.textContent = m.title;
+ div.appendChild(title);
+
+ // Content preview
+ var preview = document.createElement('span');
+ preview.className = 'search-result-preview';
+ var contentIdx = m.content.toLowerCase().indexOf(q);
+ if (contentIdx >= 0) {
+ var start = Math.max(0, contentIdx - 40);
+ var end = Math.min(m.content.length, contentIdx + q.length + 60);
+ var snippet = (start > 0 ? '...' : '') + m.content.substring(start, end) + (end < m.content.length ? '...' : '');
+ preview.textContent = snippet;
+ }
+ div.appendChild(preview);
+ results.appendChild(div);
+ });
+
+ if (matches.length === 0) {
+ var empty = document.createElement('div');
+ empty.className = 'search-no-results';
+ empty.textContent = 'No results found';
+ results.appendChild(empty);
+ }
+ });
+
+ // Keyboard navigation
+ input.addEventListener('keydown', function(e) {
+ var items = results.querySelectorAll('.search-result-item');
+ if (e.key === 'ArrowDown') {
+ e.preventDefault();
+ selectedIdx = Math.min(selectedIdx + 1, items.length - 1);
+ items.forEach(function(item, i) { item.classList.toggle('selected', i === selectedIdx); });
+ } else if (e.key === 'ArrowUp') {
+ e.preventDefault();
+ selectedIdx = Math.max(selectedIdx - 1, 0);
+ items.forEach(function(item, i) { item.classList.toggle('selected', i === selectedIdx); });
+ } else if (e.key === 'Enter' && selectedIdx >= 0 && items[selectedIdx]) {
+ window.location.href = items[selectedIdx].href;
+ }
+ });
+})();
diff --git a/src/docsfy/static/style.css b/src/docsfy/static/style.css
new file mode 100644
index 0000000..980a5bc
--- /dev/null
+++ b/src/docsfy/static/style.css
@@ -0,0 +1,1344 @@
+/* ==========================================================================
+ Docsfy - Documentation Theme
+ ========================================================================== */
+
+/* --------------------------------------------------------------------------
+ CSS Custom Properties (Light Theme - Default)
+ -------------------------------------------------------------------------- */
+:root {
+ /* Backgrounds */
+ --bg-primary: #ffffff;
+ --bg-secondary: #f9fafb;
+ --bg-tertiary: #f3f4f6;
+ --bg-sidebar: #f9fafb;
+ --bg-code: #f5f5f5;
+ --bg-code-block: #1e1e2e;
+ --bg-card: #ffffff;
+ --bg-hero: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
+ --bg-search-result: #ffffff;
+
+ /* Text */
+ --text-primary: #111827;
+ --text-secondary: #4b5563;
+ --text-tertiary: #6b7280;
+ --text-inverse: #ffffff;
+ --text-link: #4f46e5;
+ --text-link-hover: #4338ca;
+ --text-code: #e11d48;
+ --text-code-block: #d4d4d8;
+
+ /* Borders */
+ --border-primary: #e5e7eb;
+ --border-secondary: #f3f4f6;
+ --border-focus: #4f46e5;
+
+ /* Accent */
+ --accent: #4f46e5;
+ --accent-light: #eef2ff;
+ --accent-hover: #4338ca;
+
+ /* Callouts */
+ --callout-note-bg: #eff6ff;
+ --callout-note-border: #3b82f6;
+ --callout-note-text: #1e40af;
+ --callout-warning-bg: #fffbeb;
+ --callout-warning-border: #f59e0b;
+ --callout-warning-text: #92400e;
+ --callout-tip-bg: #ecfdf5;
+ --callout-tip-border: #10b981;
+ --callout-tip-text: #065f46;
+
+ /* Shadows */
+ --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.05);
+ --shadow-md: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -2px rgba(0, 0, 0, 0.1);
+ --shadow-lg: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -4px rgba(0, 0, 0, 0.1);
+
+ /* Layout */
+ --sidebar-width: 280px;
+ --content-max-width: 720px;
+ --header-height: 56px;
+
+ /* Typography */
+ --font-sans: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
+ --font-mono: "SF Mono", SFMono-Regular, ui-monospace, "DejaVu Sans Mono", Menlo, Consolas, monospace;
+
+ /* Transitions */
+ --transition-fast: 150ms ease;
+ --transition-normal: 250ms ease;
+
+ /* Table */
+ --table-row-alt: #f9fafb;
+ --table-border: #e5e7eb;
+}
+
+/* --------------------------------------------------------------------------
+ Dark Theme
+ -------------------------------------------------------------------------- */
+[data-theme="dark"] {
+ --bg-primary: #0f1117;
+ --bg-secondary: #161822;
+ --bg-tertiary: #1e2030;
+ --bg-sidebar: #161822;
+ --bg-code: #1e2030;
+ --bg-code-block: #1e2030;
+ --bg-card: #161822;
+ --bg-hero: linear-gradient(135deg, #3730a3 0%, #6d28d9 100%);
+ --bg-search-result: #1e2030;
+
+ --text-primary: #e5e7eb;
+ --text-secondary: #9ca3af;
+ --text-tertiary: #6b7280;
+ --text-inverse: #ffffff;
+ --text-link: #818cf8;
+ --text-link-hover: #a5b4fc;
+ --text-code: #fb7185;
+ --text-code-block: #d4d4d8;
+
+ --border-primary: #2e3248;
+ --border-secondary: #252838;
+ --border-focus: #818cf8;
+
+ --accent: #818cf8;
+ --accent-light: #1e1b4b;
+ --accent-hover: #a5b4fc;
+
+ --callout-note-bg: #172554;
+ --callout-note-border: #3b82f6;
+ --callout-note-text: #93c5fd;
+ --callout-warning-bg: #451a03;
+ --callout-warning-border: #f59e0b;
+ --callout-warning-text: #fcd34d;
+ --callout-tip-bg: #052e16;
+ --callout-tip-border: #10b981;
+ --callout-tip-text: #6ee7b7;
+
+ --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.3);
+ --shadow-md: 0 4px 6px -1px rgba(0, 0, 0, 0.4), 0 2px 4px -2px rgba(0, 0, 0, 0.3);
+ --shadow-lg: 0 10px 15px -3px rgba(0, 0, 0, 0.4), 0 4px 6px -4px rgba(0, 0, 0, 0.3);
+
+ --table-row-alt: #1a1c2e;
+ --table-border: #2e3248;
+}
+
+/* --------------------------------------------------------------------------
+ Reset & Base
+ -------------------------------------------------------------------------- */
+*,
+*::before,
+*::after {
+ box-sizing: border-box;
+ margin: 0;
+ padding: 0;
+}
+
+html {
+ font-size: 16px;
+ scroll-behavior: smooth;
+ scroll-padding-top: calc(var(--header-height, 60px) + 20px);
+ -webkit-text-size-adjust: 100%;
+}
+
+body {
+ font-family: var(--font-sans);
+ color: var(--text-primary);
+ background-color: var(--bg-primary);
+ line-height: 1.7;
+ transition: background-color var(--transition-normal), color var(--transition-normal);
+ display: flex;
+ min-height: 100vh;
+}
+
+a {
+ color: var(--text-link);
+ text-decoration: none;
+ transition: color var(--transition-fast);
+}
+
+a:hover {
+ color: var(--text-link-hover);
+ text-decoration: underline;
+}
+
+/* --------------------------------------------------------------------------
+ Sidebar
+ -------------------------------------------------------------------------- */
+.sidebar {
+ position: fixed;
+ top: 0;
+ left: 0;
+ bottom: 0;
+ width: var(--sidebar-width);
+ background: var(--bg-sidebar);
+ border-right: 1px solid var(--border-primary);
+ overflow-y: auto;
+ z-index: 100;
+ display: flex;
+ flex-direction: column;
+ transition: background-color var(--transition-normal), border-color var(--transition-normal);
+}
+
+.sidebar-header {
+ padding: 24px 20px 16px;
+ border-bottom: 1px solid var(--border-secondary);
+}
+
+.sidebar-logo {
+ display: block;
+ font-size: 1.125rem;
+ font-weight: 700;
+ color: var(--text-primary);
+ text-decoration: none;
+ letter-spacing: -0.01em;
+}
+
+.sidebar-logo:hover {
+ color: var(--accent);
+ text-decoration: none;
+}
+
+.sidebar-tagline {
+ font-size: 0.8125rem;
+ color: var(--text-tertiary);
+ margin-top: 4px;
+ line-height: 1.4;
+}
+
+/* Sidebar Search */
+.sidebar-search {
+ padding: 12px 20px;
+ position: relative;
+}
+
+#search-input {
+ width: 100%;
+ padding: 8px 12px;
+ font-size: 0.875rem;
+ font-family: var(--font-sans);
+ border: 1px solid var(--border-primary);
+ border-radius: 8px;
+ background: var(--bg-primary);
+ color: var(--text-primary);
+ outline: none;
+ transition: border-color var(--transition-fast), box-shadow var(--transition-fast),
+ background-color var(--transition-normal);
+}
+
+#search-input::placeholder {
+ color: var(--text-tertiary);
+}
+
+#search-input:focus {
+ border-color: var(--border-focus);
+ box-shadow: 0 0 0 3px rgba(79, 70, 229, 0.15);
+}
+
+.search-results {
+ display: none;
+ position: absolute;
+ top: 100%;
+ left: 20px;
+ right: 20px;
+ background: var(--bg-search-result);
+ border: 1px solid var(--border-primary);
+ border-radius: 8px;
+ box-shadow: var(--shadow-lg);
+ z-index: 200;
+ max-height: 300px;
+ overflow-y: auto;
+}
+
+.search-result-item {
+ display: block;
+ padding: 10px 14px;
+ font-size: 0.875rem;
+ color: var(--text-primary);
+ border-bottom: 1px solid var(--border-secondary);
+ text-decoration: none;
+ transition: background-color var(--transition-fast);
+}
+
+.search-result-item:last-child {
+ border-bottom: none;
+}
+
+.search-result-item:hover {
+ background: var(--accent-light);
+ color: var(--accent);
+ text-decoration: none;
+}
+
+/* Sidebar Navigation */
+.sidebar-nav {
+ flex: 1;
+ padding: 8px 0 24px;
+ overflow-y: auto;
+}
+
+.nav-group {
+ padding: 0 12px;
+ margin-bottom: 8px;
+}
+
+.nav-group-title {
+ font-size: 0.75rem;
+ font-weight: 600;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ color: var(--text-tertiary);
+ padding: 12px 8px 6px;
+}
+
+.nav-group-pages {
+ list-style: none;
+}
+
+.nav-link {
+ display: block;
+ padding: 6px 12px;
+ font-size: 0.875rem;
+ color: var(--text-secondary);
+ border-radius: 6px;
+ text-decoration: none;
+ transition: color var(--transition-fast), background-color var(--transition-fast);
+ line-height: 1.5;
+}
+
+.nav-link:hover {
+ color: var(--text-primary);
+ background: var(--bg-tertiary);
+ text-decoration: none;
+}
+
+.nav-link.active {
+ color: var(--accent);
+ background: var(--accent-light);
+ font-weight: 500;
+}
+
+/* --------------------------------------------------------------------------
+ Main Wrapper
+ -------------------------------------------------------------------------- */
+.main-wrapper {
+ margin-left: var(--sidebar-width);
+ flex: 1;
+ display: flex;
+ flex-direction: column;
+ min-height: 100vh;
+ transition: margin-left var(--transition-normal);
+}
+
+/* --------------------------------------------------------------------------
+ Top Bar
+ -------------------------------------------------------------------------- */
+.top-bar {
+ position: sticky;
+ top: 0;
+ height: var(--header-height);
+ display: flex;
+ align-items: center;
+ padding: 0 24px;
+ background: var(--bg-primary);
+ border-bottom: 1px solid var(--border-primary);
+ z-index: 50;
+ transition: background-color var(--transition-normal), border-color var(--transition-normal);
+}
+
+.top-bar-spacer {
+ flex: 1;
+}
+
+.sidebar-toggle {
+ display: none;
+ background: none;
+ border: none;
+ color: var(--text-secondary);
+ cursor: pointer;
+ padding: 6px;
+ border-radius: 6px;
+ transition: color var(--transition-fast), background-color var(--transition-fast);
+}
+
+.sidebar-toggle:hover {
+ color: var(--text-primary);
+ background: var(--bg-tertiary);
+}
+
+.theme-toggle {
+ background: none;
+ border: 1px solid var(--border-primary);
+ color: var(--text-secondary);
+ cursor: pointer;
+ padding: 7px;
+ border-radius: 8px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ transition: color var(--transition-fast), background-color var(--transition-fast),
+ border-color var(--transition-fast);
+}
+
+.theme-toggle:hover {
+ color: var(--text-primary);
+ background: var(--bg-tertiary);
+ border-color: var(--border-primary);
+}
+
+/* Show sun in dark mode, moon in light mode */
+.icon-sun {
+ display: none;
+}
+
+.icon-moon {
+ display: block;
+}
+
+[data-theme="dark"] .icon-sun {
+ display: block;
+}
+
+[data-theme="dark"] .icon-moon {
+ display: none;
+}
+
+/* --------------------------------------------------------------------------
+ Content
+ -------------------------------------------------------------------------- */
+.content {
+ flex: 1;
+ padding: calc(var(--header-height, 60px) + 20px) 48px 40px;
+ max-width: calc(var(--content-max-width) + 96px);
+}
+
+.article {
+ max-width: var(--content-max-width);
+}
+
+.article-title {
+ font-size: 2rem;
+ font-weight: 700;
+ letter-spacing: -0.025em;
+ color: var(--text-primary);
+ margin-bottom: 24px;
+ line-height: 1.25;
+}
+
+.article-body {
+ font-size: 1rem;
+ line-height: 1.75;
+ color: var(--text-secondary);
+}
+
+/* Typography in article body */
+.article-body h1 {
+ font-size: 1.875rem;
+ font-weight: 700;
+ margin-top: 48px;
+ margin-bottom: 16px;
+ color: var(--text-primary);
+ letter-spacing: -0.02em;
+ line-height: 1.3;
+}
+
+.article-body h2 {
+ font-size: 1.5rem;
+ font-weight: 600;
+ margin-top: 40px;
+ margin-bottom: 12px;
+ color: var(--text-primary);
+ letter-spacing: -0.015em;
+ line-height: 1.35;
+ padding-bottom: 8px;
+ border-bottom: 1px solid var(--border-secondary);
+}
+
+.article-body h3 {
+ font-size: 1.25rem;
+ font-weight: 600;
+ margin-top: 32px;
+ margin-bottom: 8px;
+ color: var(--text-primary);
+ line-height: 1.4;
+}
+
+.article-body h4 {
+ font-size: 1.0625rem;
+ font-weight: 600;
+ margin-top: 24px;
+ margin-bottom: 8px;
+ color: var(--text-primary);
+}
+
+.article-body p {
+ margin-bottom: 16px;
+}
+
+.article-body ul,
+.article-body ol {
+ margin-bottom: 16px;
+ padding-left: 24px;
+}
+
+.article-body li {
+ margin-bottom: 6px;
+}
+
+.article-body li > ul,
+.article-body li > ol {
+ margin-top: 6px;
+ margin-bottom: 0;
+}
+
+/* Inline code */
+.article-body code {
+ font-family: var(--font-mono);
+ font-size: 0.875em;
+ background: var(--bg-code);
+ color: var(--text-code);
+ padding: 2px 6px;
+ border-radius: 4px;
+}
+
+/* Code blocks (Pygments / codehilite) */
+.article-body pre {
+ margin-bottom: 20px;
+ border-radius: 8px;
+ overflow-x: auto;
+ font-size: 0.875rem;
+ line-height: 1.6;
+}
+
+.article-body .highlight {
+ background: var(--bg-code-block);
+ border-radius: 8px;
+ padding: 16px 20px;
+ overflow-x: auto;
+ margin-bottom: 20px;
+}
+
+.article-body .highlight pre {
+ margin: 0;
+ background: transparent;
+ padding: 0;
+ color: var(--text-code-block);
+}
+
+.article-body .highlight code {
+ background: transparent;
+ color: inherit;
+ padding: 0;
+ font-size: 0.875rem;
+}
+
+/* Pygments syntax highlighting tokens */
+.highlight .hll { background-color: #3d405b; }
+.highlight .c { color: #6a737d; font-style: italic; } /* Comment */
+.highlight .k { color: #c678dd; font-weight: bold; } /* Keyword */
+.highlight .o { color: #c678dd; } /* Operator */
+.highlight .cm { color: #6a737d; font-style: italic; } /* Comment.Multiline */
+.highlight .cp { color: #6a737d; font-weight: bold; } /* Comment.Preproc */
+.highlight .c1 { color: #6a737d; font-style: italic; } /* Comment.Single */
+.highlight .cs { color: #6a737d; font-style: italic; } /* Comment.Special */
+.highlight .gd { color: #e06c75; } /* Generic.Deleted */
+.highlight .gi { color: #98c379; } /* Generic.Inserted */
+.highlight .ge { font-style: italic; } /* Generic.Emph */
+.highlight .gs { font-weight: bold; } /* Generic.Strong */
+.highlight .gu { color: #56b6c2; font-weight: bold; } /* Generic.Subheading */
+.highlight .kc { color: #c678dd; font-weight: bold; } /* Keyword.Constant */
+.highlight .kd { color: #c678dd; font-weight: bold; } /* Keyword.Declaration */
+.highlight .kn { color: #c678dd; } /* Keyword.Namespace */
+.highlight .kp { color: #c678dd; } /* Keyword.Pseudo */
+.highlight .kr { color: #c678dd; font-weight: bold; } /* Keyword.Reserved */
+.highlight .kt { color: #e5c07b; } /* Keyword.Type */
+.highlight .m { color: #d19a66; } /* Literal.Number */
+.highlight .s { color: #98c379; } /* Literal.String */
+.highlight .na { color: #e06c75; } /* Name.Attribute */
+.highlight .nb { color: #56b6c2; } /* Name.Builtin */
+.highlight .nc { color: #e5c07b; font-weight: bold; } /* Name.Class */
+.highlight .no { color: #e06c75; } /* Name.Constant */
+.highlight .nd { color: #61afef; } /* Name.Decorator */
+.highlight .nf { color: #61afef; } /* Name.Function */
+.highlight .nn { color: #e5c07b; } /* Name.Namespace */
+.highlight .nt { color: #e06c75; } /* Name.Tag */
+.highlight .nv { color: #e06c75; } /* Name.Variable */
+.highlight .ow { color: #c678dd; font-weight: bold; } /* Operator.Word */
+.highlight .w { color: #abb2bf; } /* Text.Whitespace */
+.highlight .mb { color: #d19a66; } /* Literal.Number.Bin */
+.highlight .mf { color: #d19a66; } /* Literal.Number.Float */
+.highlight .mh { color: #d19a66; } /* Literal.Number.Hex */
+.highlight .mi { color: #d19a66; } /* Literal.Number.Integer */
+.highlight .mo { color: #d19a66; } /* Literal.Number.Oct */
+.highlight .sa { color: #98c379; } /* Literal.String.Affix */
+.highlight .sb { color: #98c379; } /* Literal.String.Backtick */
+.highlight .sc { color: #98c379; } /* Literal.String.Char */
+.highlight .dl { color: #98c379; } /* Literal.String.Delimiter */
+.highlight .sd { color: #98c379; font-style: italic; } /* Literal.String.Doc */
+.highlight .s2 { color: #98c379; } /* Literal.String.Double */
+.highlight .se { color: #d19a66; } /* Literal.String.Escape */
+.highlight .sh { color: #98c379; } /* Literal.String.Heredoc */
+.highlight .si { color: #98c379; } /* Literal.String.Interpol */
+.highlight .sx { color: #98c379; } /* Literal.String.Other */
+.highlight .sr { color: #56b6c2; } /* Literal.String.Regex */
+.highlight .s1 { color: #98c379; } /* Literal.String.Single */
+.highlight .ss { color: #98c379; } /* Literal.String.Symbol */
+.highlight .bp { color: #56b6c2; } /* Name.Builtin.Pseudo */
+.highlight .fm { color: #61afef; } /* Name.Function.Magic */
+.highlight .vc { color: #e06c75; } /* Name.Variable.Class */
+.highlight .vg { color: #e06c75; } /* Name.Variable.Global */
+.highlight .vi { color: #e06c75; } /* Name.Variable.Instance */
+.highlight .vm { color: #e06c75; } /* Name.Variable.Magic */
+.highlight .il { color: #d19a66; } /* Literal.Number.Integer.Long */
+
+/* --------------------------------------------------------------------------
+ Blockquotes / Callouts
+ -------------------------------------------------------------------------- */
+.article-body blockquote p {
+ margin-bottom: 8px;
+}
+
+.article-body blockquote p:last-child {
+ margin-bottom: 0;
+}
+
+/* Callout boxes */
+blockquote.callout-note {
+ border-left: 4px solid #3b82f6;
+ background: rgba(59, 130, 246, 0.08);
+ padding: 1rem 1.25rem;
+ border-radius: 0 8px 8px 0;
+ margin: 1.5rem 0;
+}
+
+blockquote.callout-warning {
+ border-left: 4px solid #f59e0b;
+ background: rgba(245, 158, 11, 0.08);
+ padding: 1rem 1.25rem;
+ border-radius: 0 8px 8px 0;
+ margin: 1.5rem 0;
+}
+
+blockquote.callout-tip {
+ border-left: 4px solid #10b981;
+ background: rgba(16, 185, 129, 0.08);
+ padding: 1rem 1.25rem;
+ border-radius: 0 8px 8px 0;
+ margin: 1.5rem 0;
+}
+
+/* Chained :not() for broader browser compatibility */
+/* Default blockquote styling (not a callout) */
+blockquote:not(.callout-note):not(.callout-warning):not(.callout-tip) {
+ border-left: 4px solid var(--border-primary);
+ padding: 1rem 1.25rem;
+ margin: 1.5rem 0;
+ color: var(--text-secondary);
+}
+
+/* --------------------------------------------------------------------------
+ Tables
+ -------------------------------------------------------------------------- */
+.article-body table {
+ width: 100%;
+ border-collapse: collapse;
+ margin-bottom: 20px;
+ font-size: 0.9375rem;
+ border: 1px solid var(--table-border);
+ border-radius: 8px;
+ overflow: hidden;
+}
+
+.article-body thead {
+ background: var(--bg-tertiary);
+}
+
+.article-body th {
+ padding: 10px 16px;
+ text-align: left;
+ font-weight: 600;
+ color: var(--text-primary);
+ border-bottom: 2px solid var(--table-border);
+}
+
+.article-body td {
+ padding: 10px 16px;
+ border-bottom: 1px solid var(--table-border);
+ color: var(--text-secondary);
+}
+
+.article-body tbody tr:nth-child(even) {
+ background: var(--table-row-alt);
+}
+
+.article-body tbody tr:hover {
+ background: var(--accent-light);
+}
+
+/* --------------------------------------------------------------------------
+ Links in article
+ -------------------------------------------------------------------------- */
+.article-body a {
+ color: var(--text-link);
+ text-decoration: underline;
+ text-decoration-color: rgba(79, 70, 229, 0.3);
+ text-underline-offset: 2px;
+ transition: color var(--transition-fast), text-decoration-color var(--transition-fast);
+}
+
+.article-body a:hover {
+ color: var(--text-link-hover);
+ text-decoration-color: var(--text-link-hover);
+}
+
+/* --------------------------------------------------------------------------
+ Images
+ -------------------------------------------------------------------------- */
+.article-body img {
+ max-width: 100%;
+ height: auto;
+ border-radius: 8px;
+ margin: 16px 0;
+}
+
+/* --------------------------------------------------------------------------
+ Horizontal Rule
+ -------------------------------------------------------------------------- */
+.article-body hr {
+ border: none;
+ border-top: 1px solid var(--border-primary);
+ margin: 32px 0;
+}
+
+/* --------------------------------------------------------------------------
+ Hero Section (Index Page)
+ -------------------------------------------------------------------------- */
+.hero {
+ text-align: center;
+ padding: 80px 24px 60px;
+ background: var(--bg-hero);
+ border-radius: 16px;
+ margin-bottom: 48px;
+ color: var(--text-inverse);
+}
+
+.hero-title {
+ font-size: 2.75rem;
+ font-weight: 800;
+ letter-spacing: -0.03em;
+ margin-bottom: 12px;
+ line-height: 1.15;
+}
+
+.hero-tagline {
+ font-size: 1.25rem;
+ opacity: 0.9;
+ margin-bottom: 32px;
+ line-height: 1.5;
+ max-width: 560px;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.hero-cta {
+ display: inline-block;
+ padding: 12px 32px;
+ background: rgba(255, 255, 255, 0.2);
+ color: var(--text-inverse);
+ border: 1px solid rgba(255, 255, 255, 0.3);
+ border-radius: 8px;
+ font-size: 1rem;
+ font-weight: 600;
+ text-decoration: none;
+ transition: background var(--transition-fast);
+ backdrop-filter: blur(4px);
+}
+
+.hero-cta:hover {
+ background: rgba(255, 255, 255, 0.3);
+ color: var(--text-inverse);
+ text-decoration: none;
+}
+
+/* --------------------------------------------------------------------------
+ Card Grid (Index Page)
+ -------------------------------------------------------------------------- */
+.card-grid {
+ display: grid;
+ grid-template-columns: repeat(auto-fill, minmax(280px, 1fr));
+ gap: 20px;
+ margin-bottom: 48px;
+}
+
+.card {
+ background: var(--bg-card);
+ border: 1px solid var(--border-primary);
+ border-radius: 12px;
+ padding: 24px;
+ transition: box-shadow var(--transition-normal), border-color var(--transition-fast),
+ background-color var(--transition-normal);
+}
+
+.card:hover {
+ box-shadow: var(--shadow-md);
+ border-color: var(--accent);
+}
+
+.card-title {
+ font-size: 1.0625rem;
+ font-weight: 600;
+ color: var(--text-primary);
+ margin-bottom: 12px;
+}
+
+.card-pages {
+ list-style: none;
+ margin-bottom: 16px;
+}
+
+.card-pages li {
+ margin-bottom: 8px;
+}
+
+.card-pages a {
+ font-size: 0.9375rem;
+ color: var(--text-link);
+ text-decoration: none;
+}
+
+.card-pages a:hover {
+ text-decoration: underline;
+}
+
+.card-page-desc {
+ font-size: 0.8125rem;
+ color: var(--text-tertiary);
+ margin-top: 2px;
+ line-height: 1.4;
+}
+
+.card-link {
+ display: inline-block;
+ font-size: 0.875rem;
+ font-weight: 500;
+ color: var(--accent);
+ text-decoration: none;
+ transition: color var(--transition-fast);
+}
+
+.card-link:hover {
+ color: var(--accent-hover);
+ text-decoration: none;
+}
+
+/* --------------------------------------------------------------------------
+ Table of Contents - Right Sidebar
+ -------------------------------------------------------------------------- */
+.toc-sidebar {
+ position: fixed;
+ top: var(--header-height);
+ right: 20px;
+ width: 200px;
+ max-height: calc(100vh - var(--header-height) - 20px);
+ overflow-y: auto;
+ font-size: 0.8125rem;
+ display: none;
+}
+
+.toc-container h3 {
+ font-size: 0.75rem;
+ font-weight: 600;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ color: var(--text-secondary);
+ margin-bottom: 0.75rem;
+ position: sticky;
+ top: 0;
+ background: var(--bg-primary);
+ padding: 12px 0 8px;
+ z-index: 1;
+}
+
+.toc-container ul {
+ list-style: none;
+ padding: 0;
+ margin: 0;
+}
+
+.toc-container li {
+ margin-bottom: 0.4rem;
+}
+
+.toc-container a {
+ color: var(--text-secondary);
+ text-decoration: none;
+ display: block;
+ padding: 0.2rem 0;
+ padding-left: 0.75rem;
+ border-left: 2px solid var(--border-primary);
+ transition: all 0.15s ease;
+}
+
+.toc-container a:hover {
+ color: var(--accent);
+ border-left-color: var(--accent);
+}
+
+/* Hide h3+ nested items to limit TOC depth to h2 only */
+.toc-container ul ul {
+ display: none;
+}
+
+/* Show TOC and adjust main content on wide screens only */
+@media (min-width: 1280px) {
+ .toc-sidebar {
+ display: block;
+ }
+
+ .content {
+ margin-right: 220px;
+ }
+}
+
+/* TOC is hidden by default (display: none above), no extra rule needed */
+
+/* --------------------------------------------------------------------------
+ Footer
+ -------------------------------------------------------------------------- */
+.footer {
+ padding: 24px 48px;
+ border-top: 1px solid var(--border-primary);
+ color: var(--text-tertiary);
+ font-size: 0.8125rem;
+ transition: border-color var(--transition-normal);
+}
+
+.footer a {
+ color: var(--text-link);
+ text-decoration: none;
+}
+
+.footer a:hover {
+ text-decoration: underline;
+}
+
+/* --------------------------------------------------------------------------
+ Sidebar Overlay (Mobile)
+ -------------------------------------------------------------------------- */
+.sidebar-overlay {
+ display: none;
+ position: fixed;
+ inset: 0;
+ background: rgba(0, 0, 0, 0.5);
+ z-index: 90;
+}
+
+.sidebar-overlay.open {
+ display: block;
+}
+
+/* --------------------------------------------------------------------------
+ Responsive Design
+ -------------------------------------------------------------------------- */
+@media (max-width: 768px) {
+ .sidebar {
+ transform: translateX(-100%);
+ transition: transform var(--transition-normal);
+ }
+
+ .sidebar.open {
+ transform: translateX(0);
+ }
+
+ .main-wrapper {
+ margin-left: 0;
+ }
+
+ .sidebar-toggle {
+ display: flex;
+ }
+
+ .content {
+ padding: 24px 20px;
+ }
+
+ .footer {
+ padding: 20px;
+ }
+
+ .hero {
+ padding: 48px 20px 40px;
+ }
+
+ .hero-title {
+ font-size: 2rem;
+ }
+
+ .hero-tagline {
+ font-size: 1.0625rem;
+ }
+
+ .article-title {
+ font-size: 1.625rem;
+ }
+
+ .card-grid {
+ grid-template-columns: 1fr;
+ }
+}
+
+@media (max-width: 480px) {
+ .hero-title {
+ font-size: 1.75rem;
+ }
+
+ .article-body h1 {
+ font-size: 1.5rem;
+ }
+
+ .article-body h2 {
+ font-size: 1.25rem;
+ }
+
+ .article-body h3 {
+ font-size: 1.125rem;
+ }
+}
+
+/* --------------------------------------------------------------------------
+ Scrollbar Styling
+ -------------------------------------------------------------------------- */
+.sidebar::-webkit-scrollbar {
+ width: 4px;
+}
+
+.sidebar::-webkit-scrollbar-track {
+ background: transparent;
+}
+
+.sidebar::-webkit-scrollbar-thumb {
+ background: var(--border-primary);
+ border-radius: 4px;
+}
+
+.sidebar::-webkit-scrollbar-thumb:hover {
+ background: var(--text-tertiary);
+}
+
+/* --------------------------------------------------------------------------
+ Print Styles
+ -------------------------------------------------------------------------- */
+@media print {
+ .sidebar,
+ .top-bar,
+ .sidebar-overlay {
+ display: none !important;
+ }
+
+ .main-wrapper {
+ margin-left: 0 !important;
+ }
+
+ .content {
+ padding: 0;
+ max-width: 100%;
+ }
+}
+
+/* --------------------------------------------------------------------------
+ Code Copy Button
+ -------------------------------------------------------------------------- */
+.copy-btn {
+ position: absolute;
+ top: 8px;
+ right: 8px;
+ padding: 4px 10px;
+ font-size: 0.75rem;
+ background: var(--bg-secondary);
+ color: var(--text-secondary);
+ border: 1px solid var(--border-primary);
+ border-radius: 4px;
+ cursor: pointer;
+ opacity: 0;
+ transition: opacity 0.15s ease;
+}
+
+pre:hover .copy-btn {
+ opacity: 1;
+}
+
+/* Make copy button always visible on touch devices */
+@media (hover: none) {
+ .copy-btn {
+ opacity: 0.7;
+ }
+}
+
+.copy-btn:hover {
+ background: var(--accent);
+ color: white;
+ border-color: var(--accent);
+}
+
+/* --------------------------------------------------------------------------
+ Prev/Next Page Navigation
+ -------------------------------------------------------------------------- */
+.page-nav {
+ display: flex;
+ justify-content: space-between;
+ gap: 1rem;
+ margin-top: 3rem;
+ padding-top: 2rem;
+ border-top: 1px solid var(--border-primary);
+}
+
+.page-nav-link {
+ display: flex;
+ flex-direction: column;
+ padding: 1rem 1.25rem;
+ border: 1px solid var(--border-primary);
+ border-radius: 8px;
+ text-decoration: none;
+ transition: all 0.15s ease;
+ max-width: 50%;
+}
+
+.page-nav-link:hover {
+ border-color: var(--accent);
+ box-shadow: 0 2px 8px rgba(79, 70, 229, 0.1);
+}
+
+.page-nav-next {
+ text-align: right;
+ margin-left: auto;
+}
+
+.page-nav-label {
+ font-size: 0.75rem;
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ color: var(--text-secondary);
+ margin-bottom: 0.25rem;
+}
+
+.page-nav-title {
+ font-size: 0.95rem;
+ font-weight: 600;
+ color: var(--accent);
+}
+
+/* --------------------------------------------------------------------------
+ Search Modal (Cmd+K)
+ -------------------------------------------------------------------------- */
+.search-modal-overlay {
+ display: none;
+ position: fixed;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: rgba(0, 0, 0, 0.5);
+ z-index: 1000;
+ justify-content: center;
+ padding-top: 15vh;
+}
+
+.search-modal-overlay.active {
+ display: flex;
+}
+
+.search-modal {
+ background: var(--bg-primary);
+ border: 1px solid var(--border-primary);
+ border-radius: 12px;
+ width: 560px;
+ max-height: 480px;
+ display: flex;
+ flex-direction: column;
+ box-shadow: 0 16px 48px rgba(0, 0, 0, 0.2);
+ overflow: hidden;
+}
+
+.search-modal-input {
+ width: 100%;
+ padding: 16px 20px;
+ border: none;
+ border-bottom: 1px solid var(--border-primary);
+ background: transparent;
+ color: var(--text-primary);
+ font-size: 1rem;
+ outline: none;
+ box-sizing: border-box;
+}
+
+.search-modal-results {
+ overflow-y: auto;
+ flex: 1;
+}
+
+.search-modal .search-result-item {
+ display: flex;
+ flex-direction: column;
+ padding: 12px 20px;
+ text-decoration: none;
+ border-bottom: 1px solid var(--border-primary);
+ transition: background 0.1s ease;
+}
+
+.search-modal .search-result-item:hover,
+.search-modal .search-result-item.selected {
+ background: var(--bg-secondary);
+}
+
+.search-result-title {
+ font-weight: 600;
+ color: var(--text-primary);
+ font-size: 0.9rem;
+}
+
+.search-result-preview {
+ font-size: 0.8rem;
+ color: var(--text-secondary);
+ margin-top: 4px;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+}
+
+.search-no-results {
+ padding: 20px;
+ text-align: center;
+ color: var(--text-secondary);
+}
+
+.search-modal-footer {
+ padding: 8px 20px;
+ font-size: 0.75rem;
+ color: var(--text-secondary);
+ border-top: 1px solid var(--border-primary);
+ text-align: center;
+}
+
+.search-modal-footer kbd {
+ display: inline-block;
+ padding: 2px 6px;
+ background: var(--bg-secondary);
+ border: 1px solid var(--border-primary);
+ border-radius: 4px;
+ font-family: inherit;
+ font-size: 0.7rem;
+}
+
+/* --------------------------------------------------------------------------
+ Scroll Spy - TOC Active State
+ -------------------------------------------------------------------------- */
+.toc-container a.toc-active {
+ color: var(--accent);
+ border-left-color: var(--accent);
+ font-weight: 500;
+}
+
+/* --------------------------------------------------------------------------
+ Top Bar - Desktop Visibility & Layout
+ -------------------------------------------------------------------------- */
+.top-bar-left {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
+
+.top-bar-title {
+ font-size: 1rem;
+ font-weight: 600;
+ color: var(--text-primary);
+ text-decoration: none;
+ letter-spacing: -0.01em;
+}
+
+.top-bar-title:hover {
+ color: var(--accent);
+ text-decoration: none;
+}
+
+.top-bar-right {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+}
+
+.top-bar-search-btn {
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ padding: 6px 12px;
+ background: var(--bg-secondary);
+ border: 1px solid var(--border-primary);
+ border-radius: 8px;
+ color: var(--text-tertiary);
+ font-size: 0.8125rem;
+ cursor: pointer;
+ transition: color var(--transition-fast), background-color var(--transition-fast),
+ border-color var(--transition-fast);
+}
+
+.top-bar-search-btn:hover {
+ color: var(--text-primary);
+ background: var(--bg-tertiary);
+ border-color: var(--border-primary);
+}
+
+.top-bar-search-btn kbd {
+ display: inline-block;
+ padding: 1px 5px;
+ background: var(--bg-primary);
+ border: 1px solid var(--border-primary);
+ border-radius: 4px;
+ font-family: inherit;
+ font-size: 0.7rem;
+ color: var(--text-tertiary);
+}
+
+.top-bar-github {
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ padding: 6px 12px;
+ border: 1px solid var(--border-primary);
+ border-radius: 6px;
+ color: var(--text-secondary);
+ text-decoration: none;
+ font-size: 0.85rem;
+ transition: color var(--transition-fast), background-color var(--transition-fast),
+ border-color var(--transition-fast);
+}
+
+.top-bar-github:hover {
+ color: var(--text-primary);
+ background: var(--bg-tertiary);
+ border-color: var(--accent);
+ text-decoration: none;
+}
+
+.top-bar-github svg {
+ flex-shrink: 0;
+}
+
+.github-stars {
+ font-weight: 600;
+ font-size: 0.8rem;
+}
+
+.github-stars:empty {
+ display: none;
+}
+
+/* --------------------------------------------------------------------------
+ Code Language Labels
+ -------------------------------------------------------------------------- */
+.code-label {
+ position: absolute;
+ top: 8px;
+ right: 60px;
+ font-size: 0.7rem;
+ color: var(--text-secondary);
+ text-transform: uppercase;
+ letter-spacing: 0.05em;
+ opacity: 0.6;
+}
diff --git a/src/docsfy/static/theme.js b/src/docsfy/static/theme.js
new file mode 100644
index 0000000..bebf5d0
--- /dev/null
+++ b/src/docsfy/static/theme.js
@@ -0,0 +1,20 @@
+(function() {
+ function getTheme() {
+ try { return localStorage.getItem('theme'); } catch(e) { return null; }
+ }
+ function setTheme(theme) {
+ try { localStorage.setItem('theme', theme); } catch(e) {}
+ }
+
+ var toggle = document.getElementById('theme-toggle');
+ var stored = getTheme();
+ if (stored) document.documentElement.setAttribute('data-theme', stored);
+ else if (window.matchMedia('(prefers-color-scheme: dark)').matches)
+ document.documentElement.setAttribute('data-theme', 'dark');
+ if (toggle) toggle.addEventListener('click', function() {
+ var current = document.documentElement.getAttribute('data-theme');
+ var next = current === 'dark' ? 'light' : 'dark';
+ document.documentElement.setAttribute('data-theme', next);
+ setTheme(next);
+ });
+})();
diff --git a/src/docsfy/storage.py b/src/docsfy/storage.py
new file mode 100644
index 0000000..db97af7
--- /dev/null
+++ b/src/docsfy/storage.py
@@ -0,0 +1,137 @@
+from __future__ import annotations
+
+import os
+import re
+from pathlib import Path
+
+import aiosqlite
+
+VALID_STATUSES = frozenset({"generating", "ready", "error"})
+
+# Module-level paths are set at import time from env vars.
+# Tests override these globals directly for isolation.
+DB_PATH = Path(os.getenv("DATA_DIR", "/data")) / "docsfy.db"
+DATA_DIR = Path(os.getenv("DATA_DIR", "/data"))
+PROJECTS_DIR = DATA_DIR / "projects"
+
+
+async def init_db() -> None:
+ DB_PATH.parent.mkdir(parents=True, exist_ok=True)
+ PROJECTS_DIR.mkdir(parents=True, exist_ok=True)
+ async with aiosqlite.connect(DB_PATH) as db:
+ await db.execute("""
+ CREATE TABLE IF NOT EXISTS projects (
+ name TEXT PRIMARY KEY,
+ repo_url TEXT NOT NULL,
+ status TEXT NOT NULL DEFAULT 'generating',
+ last_commit_sha TEXT,
+ last_generated TEXT,
+ page_count INTEGER DEFAULT 0,
+ error_message TEXT,
+ plan_json TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ )
+ """)
+ await db.commit()
+
+
+async def save_project(name: str, repo_url: str, status: str = "generating") -> None:
+ if status not in VALID_STATUSES:
+ msg = f"Invalid project status: '{status}'. Valid: {', '.join(sorted(VALID_STATUSES))}"
+ raise ValueError(msg)
+ async with aiosqlite.connect(DB_PATH) as db:
+ await db.execute(
+ """INSERT INTO projects (name, repo_url, status, updated_at)
+ VALUES (?, ?, ?, CURRENT_TIMESTAMP)
+ ON CONFLICT(name) DO UPDATE SET
+ repo_url = excluded.repo_url,
+ status = excluded.status,
+ error_message = NULL,
+ updated_at = CURRENT_TIMESTAMP""",
+ (name, repo_url, status),
+ )
+ await db.commit()
+
+
+async def update_project_status(
+ name: str,
+ status: str,
+ last_commit_sha: str | None = None,
+ page_count: int | None = None,
+ error_message: str | None = None,
+ plan_json: str | None = None,
+) -> None:
+ if status not in VALID_STATUSES:
+ msg = f"Invalid project status: '{status}'. Valid: {', '.join(sorted(VALID_STATUSES))}"
+ raise ValueError(msg)
+ async with aiosqlite.connect(DB_PATH) as db:
+ fields = ["status = ?", "updated_at = CURRENT_TIMESTAMP"]
+ values: list[str | int | None] = [status]
+ if last_commit_sha is not None:
+ fields.append("last_commit_sha = ?")
+ values.append(last_commit_sha)
+ if page_count is not None:
+ fields.append("page_count = ?")
+ values.append(page_count)
+ if error_message is not None:
+ fields.append("error_message = ?")
+ values.append(error_message)
+ if plan_json is not None:
+ fields.append("plan_json = ?")
+ values.append(plan_json)
+ if status == "ready":
+ fields.append("last_generated = CURRENT_TIMESTAMP")
+ values.append(name)
+ # Fields list is built from hardcoded column names only (no user input)
+ await db.execute(
+ f"UPDATE projects SET {', '.join(fields)} WHERE name = ?", values
+ )
+ await db.commit()
+
+
+async def get_project(name: str) -> dict[str, str | int | None] | None:
+ async with aiosqlite.connect(DB_PATH) as db:
+ db.row_factory = aiosqlite.Row
+ cursor = await db.execute("SELECT * FROM projects WHERE name = ?", (name,))
+ row = await cursor.fetchone()
+ if row:
+ return dict(row)
+ return None
+
+
+async def list_projects() -> list[dict[str, str | int | None]]:
+ async with aiosqlite.connect(DB_PATH) as db:
+ db.row_factory = aiosqlite.Row
+ cursor = await db.execute(
+ "SELECT name, repo_url, status, last_commit_sha, last_generated, page_count FROM projects ORDER BY updated_at DESC"
+ )
+ rows = await cursor.fetchall()
+ return [dict(row) for row in rows]
+
+
+async def delete_project(name: str) -> bool:
+ async with aiosqlite.connect(DB_PATH) as db:
+ cursor = await db.execute("DELETE FROM projects WHERE name = ?", (name,))
+ await db.commit()
+ return cursor.rowcount > 0
+
+
+def _validate_name(name: str) -> str:
+ """Validate project name to prevent path traversal."""
+ if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9._-]*$", name):
+ msg = f"Invalid project name: '{name}'"
+ raise ValueError(msg)
+ return name
+
+
+def get_project_dir(name: str) -> Path:
+ return PROJECTS_DIR / _validate_name(name)
+
+
+def get_project_site_dir(name: str) -> Path:
+ return PROJECTS_DIR / _validate_name(name) / "site"
+
+
+def get_project_cache_dir(name: str) -> Path:
+ return PROJECTS_DIR / _validate_name(name) / "cache" / "pages"
diff --git a/src/docsfy/templates/index.html b/src/docsfy/templates/index.html
new file mode 100644
index 0000000..1cf60b7
--- /dev/null
+++ b/src/docsfy/templates/index.html
@@ -0,0 +1,163 @@
+
+
+
+
+
+ {{ project_name }}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% if repo_url %}
+
+
+
+
+ {% endif %}
+
+
+
+
+
+
+ {{ project_name }}
+ {{ tagline }}
+ {% if navigation and navigation|length > 0 and navigation[0].pages|length > 0 %}
+ Get Started
+ {% endif %}
+
+
+
+ {% for group in navigation %}
+
+
{{ group.group }}
+
+ {% for page in group.pages %}
+ -
+ {{ page.title }}
+ {% if page.description %}
+
{{ page.description }}
+ {% endif %}
+
+ {% endfor %}
+
+ {% if group.pages|length > 0 %}
+
Explore →
+ {% endif %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/docsfy/templates/page.html b/src/docsfy/templates/page.html
new file mode 100644
index 0000000..bfbc7e5
--- /dev/null
+++ b/src/docsfy/templates/page.html
@@ -0,0 +1,168 @@
+
+
+
+
+
+ {{ title }} - {{ project_name }}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% if repo_url %}
+
+
+
+
+ {% endif %}
+
+
+
+
+
+
+
+
+ {{ content | safe }}
+
+
+
+ {% if toc %}
+
+ {% endif %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/test_ai_client.py b/tests/test_ai_client.py
new file mode 100644
index 0000000..ee08d21
--- /dev/null
+++ b/tests/test_ai_client.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+
+def test_reexports_available() -> None:
+ from docsfy.ai_client import (
+ PROVIDERS,
+ VALID_AI_PROVIDERS,
+ call_ai_cli,
+ check_ai_cli_available,
+ get_ai_cli_timeout,
+ run_parallel_with_limit,
+ )
+
+ assert "claude" in PROVIDERS
+ assert "gemini" in PROVIDERS
+ assert "cursor" in PROVIDERS
+ assert VALID_AI_PROVIDERS == frozenset({"claude", "gemini", "cursor"})
+ assert callable(call_ai_cli)
+ assert callable(check_ai_cli_available)
+ assert callable(get_ai_cli_timeout)
+ assert callable(run_parallel_with_limit)
+
+
+def test_provider_config_types() -> None:
+ from docsfy.ai_client import PROVIDERS, ProviderConfig
+
+ for name, config in PROVIDERS.items():
+ assert isinstance(config, ProviderConfig)
+ assert isinstance(config.binary, str)
+ assert callable(config.build_cmd)
diff --git a/tests/test_config.py b/tests/test_config.py
new file mode 100644
index 0000000..736bbb7
--- /dev/null
+++ b/tests/test_config.py
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+import os
+from unittest.mock import patch
+
+import pytest
+
+
+def test_default_settings() -> None:
+ from docsfy.config import Settings
+
+ with patch.dict(os.environ, {}, clear=True):
+ settings = Settings(_env_file=None)
+ assert settings.ai_provider == "claude"
+ assert settings.ai_model == "claude-opus-4-6[1m]"
+ assert settings.ai_cli_timeout == 60
+ assert settings.log_level == "INFO"
+ assert settings.data_dir == "/data"
+
+
+def test_custom_settings() -> None:
+ from docsfy.config import Settings
+
+ env = {
+ "AI_PROVIDER": "gemini",
+ "AI_MODEL": "gemini-2.5-pro",
+ "AI_CLI_TIMEOUT": "120",
+ "LOG_LEVEL": "DEBUG",
+ }
+ with patch.dict(os.environ, env, clear=True):
+ settings = Settings(_env_file=None)
+ assert settings.ai_provider == "gemini"
+ assert settings.ai_model == "gemini-2.5-pro"
+ assert settings.ai_cli_timeout == 120
+ assert settings.log_level == "DEBUG"
+
+
+def test_invalid_timeout_rejected() -> None:
+ from docsfy.config import Settings
+
+ with patch.dict(os.environ, {"AI_CLI_TIMEOUT": "0"}, clear=True):
+ with pytest.raises(Exception):
+ Settings(_env_file=None)
diff --git a/tests/test_generator.py b/tests/test_generator.py
new file mode 100644
index 0000000..1ae88bc
--- /dev/null
+++ b/tests/test_generator.py
@@ -0,0 +1,122 @@
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+
+
+@pytest.fixture
+def sample_plan() -> dict:
+ return {
+ "project_name": "test-repo",
+ "tagline": "A test project",
+ "navigation": [
+ {
+ "group": "Getting Started",
+ "pages": [
+ {
+ "slug": "introduction",
+ "title": "Introduction",
+ "description": "Overview",
+ },
+ {
+ "slug": "quickstart",
+ "title": "Quick Start",
+ "description": "Get started fast",
+ },
+ ],
+ }
+ ],
+ }
+
+
+async def test_run_planner(tmp_path: Path, sample_plan: dict) -> None:
+ from docsfy.generator import run_planner
+
+ with patch(
+ "docsfy.generator.call_ai_cli", return_value=(True, json.dumps(sample_plan))
+ ):
+ plan = await run_planner(
+ repo_path=tmp_path,
+ project_name="test-repo",
+ ai_provider="claude",
+ ai_model="opus",
+ )
+
+ assert plan is not None
+ assert plan["project_name"] == "test-repo"
+ assert len(plan["navigation"]) == 1
+
+
+async def test_run_planner_ai_failure(tmp_path: Path) -> None:
+ from docsfy.generator import run_planner
+
+ with patch("docsfy.generator.call_ai_cli", return_value=(False, "AI error")):
+ with pytest.raises(RuntimeError, match="AI error"):
+ await run_planner(
+ repo_path=tmp_path,
+ project_name="test-repo",
+ ai_provider="claude",
+ ai_model="opus",
+ )
+
+
+async def test_run_planner_bad_json(tmp_path: Path) -> None:
+ from docsfy.generator import run_planner
+
+ with patch("docsfy.generator.call_ai_cli", return_value=(True, "not json")):
+ with pytest.raises(RuntimeError, match="Failed to parse"):
+ await run_planner(
+ repo_path=tmp_path,
+ project_name="test-repo",
+ ai_provider="claude",
+ ai_model="opus",
+ )
+
+
+async def test_generate_page(tmp_path: Path) -> None:
+ from docsfy.generator import generate_page
+
+ cache_dir = tmp_path / "cache"
+ cache_dir.mkdir()
+
+ with patch(
+ "docsfy.generator.call_ai_cli",
+ return_value=(True, "# Introduction\n\nWelcome!"),
+ ):
+ md = await generate_page(
+ repo_path=tmp_path,
+ slug="introduction",
+ title="Introduction",
+ description="Overview",
+ cache_dir=cache_dir,
+ ai_provider="claude",
+ ai_model="opus",
+ )
+
+ assert "# Introduction" in md
+ assert (cache_dir / "introduction.md").exists()
+
+
+async def test_generate_page_uses_cache(tmp_path: Path) -> None:
+ from docsfy.generator import generate_page
+
+ cache_dir = tmp_path / "cache"
+ cache_dir.mkdir()
+ cached = cache_dir / "introduction.md"
+ cached.write_text("# Cached content")
+
+ md = await generate_page(
+ repo_path=tmp_path,
+ slug="introduction",
+ title="Introduction",
+ description="Overview",
+ cache_dir=cache_dir,
+ ai_provider="claude",
+ ai_model="opus",
+ use_cache=True,
+ )
+
+ assert md == "# Cached content"
diff --git a/tests/test_integration.py b/tests/test_integration.py
new file mode 100644
index 0000000..5d39fb7
--- /dev/null
+++ b/tests/test_integration.py
@@ -0,0 +1,118 @@
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+from httpx import ASGITransport, AsyncClient
+
+
+@pytest.fixture
+async def client(tmp_path: Path):
+ import docsfy.storage as storage
+ from docsfy.main import _generating
+
+ # Save originals
+ orig_db = storage.DB_PATH
+ orig_data = storage.DATA_DIR
+ orig_projects = storage.PROJECTS_DIR
+
+ storage.DB_PATH = tmp_path / "test.db"
+ storage.DATA_DIR = tmp_path
+ storage.PROJECTS_DIR = tmp_path / "projects"
+ _generating.clear()
+
+ from docsfy.main import app
+
+ await storage.init_db()
+ transport = ASGITransport(app=app)
+ async with AsyncClient(transport=transport, base_url="http://test") as ac:
+ yield ac
+
+ # Restore originals
+ storage.DB_PATH = orig_db
+ storage.DATA_DIR = orig_data
+ storage.PROJECTS_DIR = orig_projects
+ _generating.clear()
+
+
+async def test_full_flow_mock(client: AsyncClient, tmp_path: Path) -> None:
+ """Test the full generate -> status -> download flow with mocked AI."""
+ import docsfy.storage as storage
+
+ sample_plan = {
+ "project_name": "test-repo",
+ "tagline": "A test project",
+ "navigation": [
+ {
+ "group": "Getting Started",
+ "pages": [
+ {
+ "slug": "introduction",
+ "title": "Introduction",
+ "description": "Overview",
+ },
+ ],
+ }
+ ],
+ }
+
+ with (
+ patch("docsfy.main.check_ai_cli_available", return_value=(True, "")),
+ patch("docsfy.main.clone_repo", return_value=(tmp_path / "repo", "abc123")),
+ patch("docsfy.main.run_planner", return_value=sample_plan),
+ patch(
+ "docsfy.main.generate_all_pages",
+ return_value={"introduction": "# Intro\n\nWelcome!"},
+ ),
+ ):
+ from docsfy.main import _run_generation
+
+ await storage.save_project(
+ name="test-repo",
+ repo_url="https://github.com/org/test-repo.git",
+ status="generating",
+ )
+
+ await _run_generation(
+ repo_url="https://github.com/org/test-repo.git",
+ repo_path=None,
+ project_name="test-repo",
+ ai_provider="claude",
+ ai_model="opus",
+ ai_cli_timeout=60,
+ )
+
+ # Check status
+ response = await client.get("/api/status")
+ assert response.status_code == 200
+ projects = response.json()["projects"]
+ assert len(projects) == 1
+ assert projects[0]["name"] == "test-repo"
+ assert projects[0]["status"] == "ready"
+
+ # Check project details
+ response = await client.get("/api/projects/test-repo")
+ assert response.status_code == 200
+ assert response.json()["last_commit_sha"] == "abc123"
+
+ # Check docs are served
+ response = await client.get("/docs/test-repo/index.html")
+ assert response.status_code == 200
+ assert "test-repo" in response.text
+
+ response = await client.get("/docs/test-repo/introduction.html")
+ assert response.status_code == 200
+ assert "Welcome!" in response.text
+
+ # Download
+ response = await client.get("/api/projects/test-repo/download")
+ assert response.status_code == 200
+ assert response.headers["content-type"] == "application/gzip"
+
+ # Delete
+ response = await client.delete("/api/projects/test-repo")
+ assert response.status_code == 200
+
+ response = await client.get("/api/projects/test-repo")
+ assert response.status_code == 404
diff --git a/tests/test_json_parser.py b/tests/test_json_parser.py
new file mode 100644
index 0000000..2a9f7e3
--- /dev/null
+++ b/tests/test_json_parser.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+import json
+
+
+def test_parse_direct_json() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ data = {"project_name": "test", "navigation": []}
+ result = parse_json_response(json.dumps(data))
+ assert result == data
+
+
+def test_parse_json_with_surrounding_text() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ raw = 'Here is the plan:\n{"project_name": "test", "navigation": []}\nDone!'
+ result = parse_json_response(raw)
+ assert result is not None
+ assert result["project_name"] == "test"
+
+
+def test_parse_json_from_code_block() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ raw = '```json\n{"project_name": "test", "navigation": []}\n```'
+ result = parse_json_response(raw)
+ assert result is not None
+ assert result["project_name"] == "test"
+
+
+def test_parse_json_nested_braces() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ data = {"project_name": "test", "meta": {"key": "value"}, "navigation": []}
+ raw = f"Some text before {json.dumps(data)} some text after"
+ result = parse_json_response(raw)
+ assert result is not None
+ assert result["meta"]["key"] == "value"
+
+
+def test_parse_json_returns_none_for_garbage() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ result = parse_json_response("this is not json at all")
+ assert result is None
+
+
+def test_parse_json_empty_string() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ result = parse_json_response("")
+ assert result is None
+
+
+def test_parse_json_with_escaped_quotes() -> None:
+ from docsfy.json_parser import parse_json_response
+
+ raw = '{"project_name": "test \\"quoted\\" name", "navigation": []}'
+ result = parse_json_response(raw)
+ assert result is not None
+ assert "quoted" in result["project_name"]
diff --git a/tests/test_main.py b/tests/test_main.py
new file mode 100644
index 0000000..75fdf87
--- /dev/null
+++ b/tests/test_main.py
@@ -0,0 +1,104 @@
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+from httpx import ASGITransport, AsyncClient
+
+
+@pytest.fixture
+async def client(tmp_path: Path):
+ import docsfy.storage as storage
+ from docsfy.main import _generating
+
+ # Save originals
+ orig_db = storage.DB_PATH
+ orig_data = storage.DATA_DIR
+ orig_projects = storage.PROJECTS_DIR
+
+ storage.DB_PATH = tmp_path / "test.db"
+ storage.DATA_DIR = tmp_path
+ storage.PROJECTS_DIR = tmp_path / "projects"
+ _generating.clear()
+
+ from docsfy.main import app
+
+ await storage.init_db()
+ transport = ASGITransport(app=app)
+ async with AsyncClient(transport=transport, base_url="http://test") as ac:
+ yield ac
+
+ # Restore originals
+ storage.DB_PATH = orig_db
+ storage.DATA_DIR = orig_data
+ storage.PROJECTS_DIR = orig_projects
+ _generating.clear()
+
+
+async def test_health_endpoint(client: AsyncClient) -> None:
+ response = await client.get("/health")
+ assert response.status_code == 200
+ assert response.json()["status"] == "ok"
+
+
+async def test_status_endpoint_empty(client: AsyncClient) -> None:
+ response = await client.get("/api/status")
+ assert response.status_code == 200
+ assert response.json()["projects"] == []
+
+
+async def test_generate_endpoint_invalid_url(client: AsyncClient) -> None:
+ response = await client.post("/api/generate", json={"repo_url": "not-a-url"})
+ assert response.status_code == 422
+
+
+async def test_generate_endpoint_starts_generation(client: AsyncClient) -> None:
+ with patch("docsfy.main.asyncio.create_task") as mock_task:
+ mock_task.side_effect = lambda coro: coro.close()
+ response = await client.post(
+ "/api/generate",
+ json={"repo_url": "https://github.com/org/repo.git"},
+ )
+ assert response.status_code == 202
+ body = response.json()
+ assert body["project"] == "repo"
+ assert body["status"] == "generating"
+
+
+async def test_get_project_not_found(client: AsyncClient) -> None:
+ response = await client.get("/api/projects/nonexistent")
+ assert response.status_code == 404
+
+
+async def test_generate_endpoint_with_force(client: AsyncClient) -> None:
+ with patch("docsfy.main.asyncio.create_task") as mock_task:
+ mock_task.side_effect = lambda coro: coro.close()
+ response = await client.post(
+ "/api/generate",
+ json={"repo_url": "https://github.com/org/repo.git", "force": True},
+ )
+ assert response.status_code == 202
+ body = response.json()
+ assert body["project"] == "repo"
+
+
+async def test_generate_endpoint_local_path(
+ client: AsyncClient, tmp_path: Path
+) -> None:
+ # Create a fake git repo
+ (tmp_path / "myrepo" / ".git").mkdir(parents=True)
+ with patch("docsfy.main.asyncio.create_task") as mock_task:
+ mock_task.side_effect = lambda coro: coro.close()
+ response = await client.post(
+ "/api/generate",
+ json={"repo_path": str(tmp_path / "myrepo")},
+ )
+ assert response.status_code == 202
+ body = response.json()
+ assert body["project"] == "myrepo"
+
+
+async def test_delete_project_not_found(client: AsyncClient) -> None:
+ response = await client.delete("/api/projects/nonexistent")
+ assert response.status_code == 404
diff --git a/tests/test_models.py b/tests/test_models.py
new file mode 100644
index 0000000..117f951
--- /dev/null
+++ b/tests/test_models.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+
+def test_generate_request_valid_https() -> None:
+ from docsfy.models import GenerateRequest
+
+ req = GenerateRequest(repo_url="https://github.com/org/repo.git")
+ assert req.repo_url == "https://github.com/org/repo.git"
+
+
+def test_generate_request_valid_ssh() -> None:
+ from docsfy.models import GenerateRequest
+
+ req = GenerateRequest(repo_url="git@github.com:org/repo.git")
+ assert req.repo_url == "git@github.com:org/repo.git"
+
+
+def test_generate_request_extracts_project_name() -> None:
+ from docsfy.models import GenerateRequest
+
+ req = GenerateRequest(repo_url="https://github.com/org/my-repo.git")
+ assert req.project_name == "my-repo"
+
+ req2 = GenerateRequest(repo_url="https://github.com/org/my-repo")
+ assert req2.project_name == "my-repo"
+
+
+def test_generate_request_invalid_url() -> None:
+ from docsfy.models import GenerateRequest
+
+ with pytest.raises(Exception):
+ GenerateRequest(repo_url="not-a-url")
+
+
+def test_doc_page_model() -> None:
+ from docsfy.models import DocPage
+
+ page = DocPage(slug="intro", title="Introduction", description="Project overview")
+ assert page.slug == "intro"
+
+
+def test_doc_plan_model() -> None:
+ from docsfy.models import DocPage, DocPlan, NavGroup
+
+ plan = DocPlan(
+ project_name="my-repo",
+ tagline="A cool project",
+ navigation=[
+ NavGroup(
+ group="Getting Started",
+ pages=[
+ DocPage(slug="intro", title="Introduction", description="Overview")
+ ],
+ )
+ ],
+ )
+ assert plan.project_name == "my-repo"
+ assert len(plan.navigation) == 1
+ assert len(plan.navigation[0].pages) == 1
+
+
+def test_generate_request_local_path(tmp_path: Path) -> None:
+ from docsfy.models import GenerateRequest
+
+ # Create a fake git repo
+ (tmp_path / ".git").mkdir()
+ req = GenerateRequest(repo_path=str(tmp_path))
+ assert req.project_name == tmp_path.name
+
+
+def test_generate_request_requires_source() -> None:
+ from docsfy.models import GenerateRequest
+
+ with pytest.raises(Exception):
+ GenerateRequest()
+
+
+def test_generate_request_rejects_both() -> None:
+ from docsfy.models import GenerateRequest
+
+ with pytest.raises(Exception):
+ GenerateRequest(
+ repo_url="https://github.com/org/repo.git", repo_path="/some/path"
+ )
+
+
+def test_project_status_model() -> None:
+ from docsfy.models import ProjectStatus
+
+ status = ProjectStatus(
+ name="my-repo",
+ repo_url="https://github.com/org/my-repo.git",
+ status="ready",
+ )
+ assert status.name == "my-repo"
+ assert status.status == "ready"
diff --git a/tests/test_prompts.py b/tests/test_prompts.py
new file mode 100644
index 0000000..81aa582
--- /dev/null
+++ b/tests/test_prompts.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+
+def test_build_planner_prompt() -> None:
+ from docsfy.prompts import build_planner_prompt
+
+ prompt = build_planner_prompt("my-repo")
+ assert "my-repo" in prompt
+ assert "JSON" in prompt
+ assert "project_name" in prompt
+ assert "navigation" in prompt
+
+
+def test_build_page_prompt() -> None:
+ from docsfy.prompts import build_page_prompt
+
+ prompt = build_page_prompt(
+ project_name="my-repo",
+ page_title="Installation",
+ page_description="How to install the project",
+ )
+ assert "my-repo" in prompt
+ assert "Installation" in prompt
+ assert "markdown" in prompt.lower()
diff --git a/tests/test_renderer.py b/tests/test_renderer.py
new file mode 100644
index 0000000..1aec6d3
--- /dev/null
+++ b/tests/test_renderer.py
@@ -0,0 +1,81 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+
+def test_render_page_to_html() -> None:
+ from docsfy.renderer import render_page
+
+ html = render_page(
+ markdown_content="# Hello\n\nThis is a test.",
+ page_title="Hello",
+ project_name="test-repo",
+ tagline="A test project",
+ navigation=[{"group": "Docs", "pages": [{"slug": "hello", "title": "Hello"}]}],
+ current_slug="hello",
+ )
+ assert " None:
+ from docsfy.renderer import render_site
+
+ plan = {
+ "project_name": "test-repo",
+ "tagline": "A test project",
+ "navigation": [
+ {
+ "group": "Getting Started",
+ "pages": [
+ {
+ "slug": "introduction",
+ "title": "Introduction",
+ "description": "Overview",
+ },
+ ],
+ },
+ ],
+ }
+ pages = {"introduction": "# Introduction\n\nWelcome to test-repo."}
+ output_dir = tmp_path / "site"
+
+ render_site(plan=plan, pages=pages, output_dir=output_dir)
+
+ assert (output_dir / "index.html").exists()
+ assert (output_dir / "introduction.html").exists()
+ assert (output_dir / "assets" / "style.css").exists()
+ index_html = (output_dir / "index.html").read_text()
+ assert "test-repo" in index_html
+ page_html = (output_dir / "introduction.html").read_text()
+ assert "Welcome to test-repo" in page_html
+
+
+def test_search_index_generated(tmp_path: Path) -> None:
+ from docsfy.renderer import render_site
+
+ plan = {
+ "project_name": "test-repo",
+ "tagline": "Test",
+ "navigation": [
+ {
+ "group": "Docs",
+ "pages": [{"slug": "intro", "title": "Intro", "description": ""}],
+ }
+ ],
+ }
+ pages = {"intro": "# Intro\n\nSome searchable content here."}
+ output_dir = tmp_path / "site"
+
+ render_site(plan=plan, pages=pages, output_dir=output_dir)
+ assert (output_dir / "search-index.json").exists()
+
+ import json
+
+ index = json.loads((output_dir / "search-index.json").read_text())
+ assert len(index) == 1
+ assert index[0]["slug"] == "intro"
+ assert index[0]["title"] == "Intro"
+ assert "searchable content" in index[0]["content"]
diff --git a/tests/test_repository.py b/tests/test_repository.py
new file mode 100644
index 0000000..023b5bb
--- /dev/null
+++ b/tests/test_repository.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+from pathlib import Path
+from unittest.mock import MagicMock, patch
+
+
+def test_extract_repo_name_https() -> None:
+ from docsfy.repository import extract_repo_name
+
+ assert extract_repo_name("https://github.com/org/my-repo.git") == "my-repo"
+ assert extract_repo_name("https://github.com/org/my-repo") == "my-repo"
+
+
+def test_extract_repo_name_ssh() -> None:
+ from docsfy.repository import extract_repo_name
+
+ assert extract_repo_name("git@github.com:org/my-repo.git") == "my-repo"
+
+
+def test_clone_repo_success(tmp_path: Path) -> None:
+ from docsfy.repository import clone_repo
+
+ with patch("docsfy.repository.subprocess.run") as mock_run:
+ mock_run.side_effect = [
+ MagicMock(returncode=0, stdout="", stderr=""),
+ MagicMock(returncode=0, stdout="abc123def\n", stderr=""),
+ ]
+ repo_path, sha = clone_repo("https://github.com/org/repo.git", tmp_path)
+
+ assert repo_path == tmp_path / "repo"
+ assert sha == "abc123def" # pragma: allowlist secret
+ assert mock_run.call_count == 2
+
+ # Verify clone command
+ clone_call = mock_run.call_args_list[0]
+ clone_cmd = clone_call.args[0]
+ assert "clone" in clone_cmd
+ assert "--depth" in clone_cmd
+ assert "1" in clone_cmd
+ assert "--" in clone_cmd
+ assert "https://github.com/org/repo.git" in clone_cmd
+ assert str(tmp_path / "repo") in clone_cmd
+
+ # Verify rev-parse command
+ revparse_call = mock_run.call_args_list[1]
+ assert "rev-parse" in revparse_call.args[0]
+ assert revparse_call.kwargs.get("cwd") == tmp_path / "repo"
+
+
+def test_clone_repo_failure(tmp_path: Path) -> None:
+ import pytest
+ from docsfy.repository import clone_repo
+
+ with patch("docsfy.repository.subprocess.run") as mock_run:
+ mock_run.return_value = MagicMock(
+ returncode=128, stdout="", stderr="fatal: repo not found"
+ )
+ with pytest.raises(RuntimeError, match="Clone failed"):
+ clone_repo("https://github.com/org/bad-repo.git", tmp_path)
+
+
+def test_get_local_repo_info(tmp_path: Path) -> None:
+ from docsfy.repository import get_local_repo_info
+
+ with patch("docsfy.repository.subprocess.run") as mock_run:
+ mock_run.return_value = MagicMock(returncode=0, stdout="def456\n", stderr="")
+ path, sha = get_local_repo_info(tmp_path)
+
+ assert path == tmp_path
+ assert sha == "def456"
+
+
+def test_get_local_repo_info_failure(tmp_path: Path) -> None:
+ import pytest
+ from docsfy.repository import get_local_repo_info
+
+ with patch("docsfy.repository.subprocess.run") as mock_run:
+ mock_run.return_value = MagicMock(
+ returncode=1, stdout="", stderr="fatal: not a git repository"
+ )
+ with pytest.raises(RuntimeError, match="Failed to get commit SHA"):
+ get_local_repo_info(tmp_path)
diff --git a/tests/test_storage.py b/tests/test_storage.py
new file mode 100644
index 0000000..ab7075e
--- /dev/null
+++ b/tests/test_storage.py
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+
+@pytest.fixture
+async def db_path(tmp_path: Path) -> Path:
+ import docsfy.storage as storage
+
+ db = tmp_path / "test.db"
+ storage.DB_PATH = db
+ storage.DATA_DIR = tmp_path
+ storage.PROJECTS_DIR = tmp_path / "projects"
+ await storage.init_db()
+ return db
+
+
+async def test_init_db_creates_table(db_path: Path) -> None:
+ assert db_path.exists()
+
+
+async def test_save_and_get_project(db_path: Path) -> None:
+ from docsfy.storage import get_project, save_project
+
+ await save_project(
+ name="my-repo",
+ repo_url="https://github.com/org/my-repo.git",
+ status="generating",
+ )
+ project = await get_project("my-repo")
+ assert project is not None
+ assert project["name"] == "my-repo"
+ assert project["repo_url"] == "https://github.com/org/my-repo.git"
+ assert project["status"] == "generating"
+
+
+async def test_update_project_status(db_path: Path) -> None:
+ from docsfy.storage import get_project, save_project, update_project_status
+
+ await save_project(
+ name="my-repo",
+ repo_url="https://github.com/org/my-repo.git",
+ status="generating",
+ )
+ await update_project_status(
+ "my-repo", status="ready", last_commit_sha="abc123", page_count=5
+ )
+ project = await get_project("my-repo")
+ assert project is not None
+ assert project["status"] == "ready"
+ assert project["last_commit_sha"] == "abc123"
+ assert project["page_count"] == 5
+
+
+async def test_list_projects(db_path: Path) -> None:
+ from docsfy.storage import list_projects, save_project
+
+ await save_project(
+ name="repo-a", repo_url="https://github.com/org/repo-a.git", status="ready"
+ )
+ await save_project(
+ name="repo-b", repo_url="https://github.com/org/repo-b.git", status="generating"
+ )
+ projects = await list_projects()
+ assert len(projects) == 2
+
+
+async def test_delete_project(db_path: Path) -> None:
+ from docsfy.storage import delete_project, get_project, save_project
+
+ await save_project(
+ name="my-repo", repo_url="https://github.com/org/my-repo.git", status="ready"
+ )
+ deleted = await delete_project("my-repo")
+ assert deleted is True
+ project = await get_project("my-repo")
+ assert project is None
+
+
+async def test_delete_nonexistent_project(db_path: Path) -> None:
+ from docsfy.storage import delete_project
+
+ deleted = await delete_project("no-such-repo")
+ assert deleted is False
+
+
+async def test_get_nonexistent_project(db_path: Path) -> None:
+ from docsfy.storage import get_project
+
+ project = await get_project("no-such-repo")
+ assert project is None
diff --git a/tox.toml b/tox.toml
new file mode 100644
index 0000000..2bb848e
--- /dev/null
+++ b/tox.toml
@@ -0,0 +1,26 @@
+skipsdist = true
+
+envlist = ["unused-code", "unittests"]
+
+[env.unused-code]
+deps = ["python-utility-scripts"]
+commands = [
+ [
+ "pyutils-unusedcode",
+ ],
+]
+
+[env.unittests]
+deps = ["uv"]
+commands = [
+ [
+ "uv",
+ "run",
+ "--extra",
+ "dev",
+ "pytest",
+ "-n",
+ "auto",
+ "tests",
+ ],
+]
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000..66f9dd4
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,534 @@
+version = 1
+revision = 3
+requires-python = ">=3.12"
+
+[[package]]
+name = "ai-cli-runner"
+version = "0.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "python-simple-logger" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/98/bb/7ae7870b6e07bae5a322c29b27f06c62efb0323543cdc4be6e8e6f9106e0/ai_cli_runner-0.1.0.tar.gz", hash = "sha256:c40b1eb5ec14976d7ee23cdb015b1cce79aad39a205057c018986c361d5afafd", size = 20564, upload-time = "2026-03-04T11:51:39.335Z" }
+
+[[package]]
+name = "aiosqlite"
+version = "0.22.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" },
+]
+
+[[package]]
+name = "annotated-doc"
+version = "0.0.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
+]
+
+[[package]]
+name = "anyio"
+version = "4.12.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
+]
+
+[[package]]
+name = "certifi"
+version = "2026.2.25"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },
+]
+
+[[package]]
+name = "click"
+version = "8.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
+[[package]]
+name = "colorlog"
+version = "6.10.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a2/61/f083b5ac52e505dfc1c624eafbf8c7589a0d7f32daa398d2e7590efa5fda/colorlog-6.10.1.tar.gz", hash = "sha256:eb4ae5cb65fe7fec7773c2306061a8e63e02efc2c72eba9d27b0fa23c94f1321", size = 17162, upload-time = "2025-10-16T16:14:11.978Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/c1/e419ef3723a074172b68aaa89c9f3de486ed4c2399e2dbd8113a4fdcaf9e/colorlog-6.10.1-py3-none-any.whl", hash = "sha256:2d7e8348291948af66122cff006c9f8da6255d224e7cf8e37d8de2df3bad8c9c", size = 11743, upload-time = "2025-10-16T16:14:10.512Z" },
+]
+
+[[package]]
+name = "docsfy"
+version = "0.1.0"
+source = { editable = "." }
+dependencies = [
+ { name = "ai-cli-runner" },
+ { name = "aiosqlite" },
+ { name = "fastapi" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "pydantic-settings" },
+ { name = "pygments" },
+ { name = "python-simple-logger" },
+ { name = "uvicorn" },
+]
+
+[package.optional-dependencies]
+dev = [
+ { name = "httpx" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "pytest-xdist" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "ai-cli-runner" },
+ { name = "aiosqlite" },
+ { name = "fastapi" },
+ { name = "httpx", marker = "extra == 'dev'" },
+ { name = "jinja2" },
+ { name = "markdown" },
+ { name = "pydantic-settings" },
+ { name = "pygments" },
+ { name = "pytest", marker = "extra == 'dev'" },
+ { name = "pytest-asyncio", marker = "extra == 'dev'" },
+ { name = "pytest-xdist", marker = "extra == 'dev'" },
+ { name = "python-simple-logger" },
+ { name = "uvicorn" },
+]
+provides-extras = ["dev"]
+
+[[package]]
+name = "execnet"
+version = "2.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" },
+]
+
+[[package]]
+name = "fastapi"
+version = "0.135.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-doc" },
+ { name = "pydantic" },
+ { name = "starlette" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e7/7b/f8e0211e9380f7195ba3f3d40c292594fd81ba8ec4629e3854c353aaca45/fastapi-0.135.1.tar.gz", hash = "sha256:d04115b508d936d254cea545b7312ecaa58a7b3a0f84952535b4c9afae7668cd", size = 394962, upload-time = "2026-03-01T18:18:29.369Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl", hash = "sha256:46e2fc5745924b7c840f71ddd277382af29ce1cdb7d5eab5bf697e3fb9999c9e", size = 116999, upload-time = "2026-03-01T18:18:30.831Z" },
+]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
+]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "certifi" },
+ { name = "httpcore" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
+]
+
+[[package]]
+name = "idna"
+version = "3.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+]
+
+[[package]]
+name = "markdown"
+version = "3.10.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" },
+ { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" },
+ { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" },
+ { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
+ { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
+ { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
+ { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
+ { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
+ { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
+ { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
+ { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
+ { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
+ { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" },
+ { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" },
+ { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" },
+ { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" },
+ { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" },
+ { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" },
+ { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" },
+ { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" },
+ { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" },
+ { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" },
+ { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
+]
+
+[[package]]
+name = "packaging"
+version = "26.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+]
+
+[[package]]
+name = "pydantic"
+version = "2.12.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
+ { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
+ { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
+ { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
+ { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
+ { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
+ { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
+ { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+ { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
+]
+
+[[package]]
+name = "pydantic-settings"
+version = "2.13.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
+]
+
+[[package]]
+name = "pytest-asyncio"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
+]
+
+[[package]]
+name = "pytest-xdist"
+version = "3.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "execnet" },
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" },
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.2.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
+]
+
+[[package]]
+name = "python-simple-logger"
+version = "2.0.19"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorlog" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f1/5b/e97b2209a7de9763c50ef073620630aff8ffc546206e519f1d4330293f9c/python_simple_logger-2.0.19.tar.gz", hash = "sha256:1a26cf71da4bea84ac3093a90a2476555a8dde5cce69c8271143eea7675ca9e1", size = 9953, upload-time = "2026-02-26T09:36:04.764Z" }
+
+[[package]]
+name = "starlette"
+version = "0.52.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.41.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/32/ce/eeb58ae4ac36fe09e3842eb02e0eb676bf2c53ae062b98f1b2531673efdd/uvicorn-0.41.0.tar.gz", hash = "sha256:09d11cf7008da33113824ee5a1c6422d89fbc2ff476540d69a34c87fab8b571a", size = 82633, upload-time = "2026-02-16T23:07:24.1Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/83/e4/d04a086285c20886c0daad0e026f250869201013d18f81d9ff5eada73a88/uvicorn-0.41.0-py3-none-any.whl", hash = "sha256:29e35b1d2c36a04b9e180d4007ede3bcb32a85fbdfd6c6aeb3f26839de088187", size = 68783, upload-time = "2026-02-16T23:07:22.357Z" },
+]